From 6379ccfd4da7dd0ff6567d622d313439b2fe34b5 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 17 Dec 2025 23:22:48 +0100 Subject: [PATCH 01/15] feat(profiler): AsyncContextFrame used by profiler with Node.js 22+, on by default (#7119) * Update to pprof 5.13.1 * Profiler can now use AsyncContextFrame starting with Node.js 22.9.0 * Use AsyncContextFrame-based profiling sample context tracking by default, where available. * Add tests * Do not use options.useAsyncContextFrame --- integration-tests/profiler/profiler.spec.js | 6 +-- package.json | 2 +- packages/dd-trace/src/profiling/config.js | 28 +++++----- .../dd-trace/test/profiling/config.spec.js | 54 +++++++++++++++++++ yarn.lock | 8 +-- 5 files changed, 78 insertions(+), 20 deletions(-) diff --git a/integration-tests/profiler/profiler.spec.js b/integration-tests/profiler/profiler.spec.js index 99844d65c10..57ef866d247 100644 --- a/integration-tests/profiler/profiler.spec.js +++ b/integration-tests/profiler/profiler.spec.js @@ -355,12 +355,12 @@ describe('profiler', () => { BUSY_CYCLE_TIME: (busyCycleTimeNs | 0).toString(), DD_TRACE_AGENT_PORT: agent.port } - // With Node 23 or later, test the profiler with async context frame use. + // With Node 22.9.0 or later, test the profiler with async context frame use. const execArgv = [] - if (satisfies(process.versions.node, '>=23.0.0')) { + if (satisfies(process.versions.node, '>=22.9.0')) { env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = 1 if (!satisfies(process.versions.node, '>=24.0.0')) { - // For Node 23, use the experimental command line flag for Node to enable + // For Node 22.9.0+, use the experimental command line flag for Node to enable // async context frame. Node 24 has it enabled by default. execArgv.push('--experimental-async-context-frame') } diff --git a/package.json b/package.json index 51c25cbe45b..9147d198bd4 100644 --- a/package.json +++ b/package.json @@ -135,7 +135,7 @@ "@datadog/native-iast-taint-tracking": "4.1.0", "@datadog/native-metrics": "3.1.1", "@datadog/openfeature-node-server": "^0.2.0", - "@datadog/pprof": "5.12.0", + "@datadog/pprof": "5.13.1", "@datadog/wasm-js-rewriter": "5.0.1", "@opentelemetry/api": ">=1.0.0 <1.10.0", "@opentelemetry/api-logs": "<1.0.0" diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 8d32fc15a7f..cc05944853b 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -221,20 +221,24 @@ class Config { const hasExecArg = (arg) => process.execArgv.includes(arg) || String(NODE_OPTIONS).includes(arg) - this.asyncContextFrameEnabled = isTrue(options.useAsyncContextFrame ?? DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED) - if (this.asyncContextFrameEnabled) { + let canUseAsyncContextFrame = false + if (samplingContextsAvailable) { if (satisfies(process.versions.node, '>=24.0.0')) { - if (hasExecArg('--no-async-context-frame')) { - turnOffAsyncContextFrame('with --no-async-context-frame') - } - } else if (satisfies(process.versions.node, '>=23.0.0')) { - if (!hasExecArg('--experimental-async-context-frame')) { - turnOffAsyncContextFrame('without --experimental-async-context-frame') - } + canUseAsyncContextFrame = !hasExecArg('--no-async-context-frame') + } else if (satisfies(process.versions.node, '>=22.9.0')) { + canUseAsyncContextFrame = hasExecArg('--experimental-async-context-frame') + } + } + this.asyncContextFrameEnabled = isTrue(DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? canUseAsyncContextFrame) + if (this.asyncContextFrameEnabled && !canUseAsyncContextFrame) { + if (!samplingContextsAvailable) { + turnOffAsyncContextFrame(`on ${process.platform}`) + } else if (satisfies(process.versions.node, '>=24.0.0')) { + turnOffAsyncContextFrame('with --no-async-context-frame') + } else if (satisfies(process.versions.node, '>=22.9.0')) { + turnOffAsyncContextFrame('without --experimental-async-context-frame') } else { - // NOTE: technically, this should work starting with 22.7.0 which is when - // AsyncContextFrame debuted, but it would require a change in pprof-nodejs too. - turnOffAsyncContextFrame('but it requires at least Node.js 23') + turnOffAsyncContextFrame('but it requires at least Node.js 22.9.0') } } diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 98d75126013..c1fcac6253a 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -6,6 +6,7 @@ const { assertObjectContains } = require('../../../../integration-tests/helpers' const { describe, it, beforeEach, afterEach } = require('tap').mocha const os = require('node:os') const path = require('node:path') +const satisfies = require('semifies') require('../setup/core') @@ -508,6 +509,59 @@ describe('config', () => { }) } + describe('async context', () => { + const isSupported = samplingContextsAvailable && satisfies(process.versions.node, '>=24.0.0') + describe('where supported', () => { + it('should be on by default', function () { + if (!isSupported) { + this.skip() + } else { + const config = new Config({}) + assert.strictEqual(config.asyncContextFrameEnabled, true) + } + }) + + it('can be turned off by env var', function () { + if (!isSupported) { + this.skip() + } else { + process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = '0' + try { + const config = new Config({}) + assert.strictEqual(config.asyncContextFrameEnabled, false) + } finally { + delete process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED + } + } + }) + }) + + describe('where not supported', function () { + it('should be off by default', function () { + if (isSupported) { + this.skip() + } else { + const config = new Config({}) + assert.strictEqual(config.asyncContextFrameEnabled, false) + } + }) + + it('can not be turned on by env var', function () { + if (isSupported) { + this.skip() + } else { + process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = '1' + try { + const config = new Config({}) + assert.strictEqual(config.asyncContextFrameEnabled, false) + } finally { + delete process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED + } + } + }) + }) + }) + describe('upload compression settings', () => { const expectConfig = (env, method, level, warning) => { process.env = { diff --git a/yarn.lock b/yarn.lock index 031ea964f01..86b7d7f2abb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -246,10 +246,10 @@ "@datadog/flagging-core" "0.2.0" "@openfeature/server-sdk" "~1.18.0" -"@datadog/pprof@5.12.0": - version "5.12.0" - resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-5.12.0.tgz#1f0a592aec5ea3a48ad795d069e8a925d21667db" - integrity sha512-qX32upm9eqObGVGvqHpjQB2bXVPTX0ccXTW3mUqUWXgJrAKyHtTfo9PqfoXhflYs0WD9el9xl9c0bM1RS4vRmQ== +"@datadog/pprof@5.13.1": + version "5.13.1" + resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-5.13.1.tgz#51c540d75cf4471806db65d0686cbe0a96125ce2" + integrity sha512-7yXQco1xOUFFEHN3UsRw/55603lQTctHcGx9N7PgkcgLGL8t/i5qKalF0AhOKBsLBUnbQ9Iv+ecC2YJErJ07PQ== dependencies: delay "^5.0.0" node-gyp-build "<4.0" From 8595e26de47c0e23d21e6f32ee41f7665c5ed2fb Mon Sep 17 00:00:00 2001 From: Kathie Huang <46662481+kathiehuang@users.noreply.github.com> Date: Wed, 17 Dec 2025 17:29:25 -0500 Subject: [PATCH 02/15] [SVLS-8161] Update Azure Function metadata detection logic (#7100) * Check DD_AZURE_RESOURCE_GROUP env var when detecting resource group metadata for Azure Flex Consumption Functions * Add tests, add DD_AZURE_RESOURCE_GROUP to supported configurations * Make flex consumption function detection a function --- packages/dd-trace/src/azure_metadata.js | 10 ++++- packages/dd-trace/src/profiling/config.js | 5 ++- packages/dd-trace/src/serverless.js | 5 +++ .../src/supported-configurations.json | 1 + packages/dd-trace/test/azure_metadata.spec.js | 43 ++++++++++++++++++- 5 files changed, 59 insertions(+), 5 deletions(-) diff --git a/packages/dd-trace/src/azure_metadata.js b/packages/dd-trace/src/azure_metadata.js index 57a548eb2ba..e7120721233 100644 --- a/packages/dd-trace/src/azure_metadata.js +++ b/packages/dd-trace/src/azure_metadata.js @@ -3,7 +3,7 @@ // Modeled after https://github.com/DataDog/libdatadog/blob/f3994857a59bb5679a65967138c5a3aec418a65f/ddcommon/src/azure_app_services.rs const os = require('os') -const { getIsAzureFunction } = require('./serverless') +const { getIsAzureFunction, getIsFlexConsumptionAzureFunction } = require('./serverless') const { getEnvironmentVariable, getEnvironmentVariables } = require('../../dd-trace/src/config-helper') function extractSubscriptionID (ownerName) { @@ -38,6 +38,7 @@ function buildMetadata () { const { COMPUTERNAME, DD_AAS_DOTNET_EXTENSION_VERSION, + DD_AZURE_RESOURCE_GROUP, FUNCTIONS_EXTENSION_VERSION, FUNCTIONS_WORKER_RUNTIME, FUNCTIONS_WORKER_RUNTIME_VERSION, @@ -56,7 +57,12 @@ function buildMetadata () { ? ['functionapp', 'function'] : ['app', 'app'] - const resourceGroup = WEBSITE_RESOURCE_GROUP ?? extractResourceGroup(WEBSITE_OWNER_NAME) + // Azure Functions on Flex Consumption plans require the `DD_AZURE_RESOURCE_GROUP` env var. + // If this logic ever changes, update the logic in `libdatadog`, `serverless-components/src/datadog-trace-agent`, + // and the serverless compat layers accordingly. + const resourceGroup = getIsFlexConsumptionAzureFunction() + ? (DD_AZURE_RESOURCE_GROUP ?? WEBSITE_RESOURCE_GROUP ?? extractResourceGroup(WEBSITE_OWNER_NAME)) + : (WEBSITE_RESOURCE_GROUP ?? extractResourceGroup(WEBSITE_OWNER_NAME)) return trimObject({ extensionVersion: DD_AAS_DOTNET_EXTENSION_VERSION, diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index cc05944853b..9542c4a78ba 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -12,9 +12,10 @@ const SpaceProfiler = require('./profilers/space') const EventsProfiler = require('./profilers/events') const { oomExportStrategies, snapshotKinds } = require('./constants') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') +const { getIsAzureFunction } = require('../serverless') const { tagger } = require('./tagger') const { isFalse, isTrue } = require('../util') -const { getAzureTagsFromMetadata, getAzureAppMetadata } = require('../azure_metadata') +const { getAzureTagsFromMetadata, getAzureAppMetadata, getAzureFunctionMetadata } = require('../azure_metadata') const { getEnvironmentVariables } = require('../config-helper') const defaults = require('../config_defaults') @@ -73,7 +74,7 @@ class Config { tagger.parse(DD_TAGS), tagger.parse(options.tags), tagger.parse({ env, host, service, version, functionname }), - getAzureTagsFromMetadata(getAzureAppMetadata()) + getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata()) ) // Add source code integration tags if available diff --git a/packages/dd-trace/src/serverless.js b/packages/dd-trace/src/serverless.js index 9c4039228a0..7e7e8092fa2 100644 --- a/packages/dd-trace/src/serverless.js +++ b/packages/dd-trace/src/serverless.js @@ -21,6 +21,10 @@ function getIsAzureFunction () { return isAzureFunction } +function getIsFlexConsumptionAzureFunction () { + return getIsAzureFunction() && getEnvironmentVariable('WEBSITE_SKU') === 'FlexConsumption' +} + function isInServerlessEnvironment () { const inAWSLambda = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') !== undefined const isGCPFunction = getIsGCPFunction() @@ -32,5 +36,6 @@ function isInServerlessEnvironment () { module.exports = { getIsGCPFunction, getIsAzureFunction, + getIsFlexConsumptionAzureFunction, isInServerlessEnvironment } diff --git a/packages/dd-trace/src/supported-configurations.json b/packages/dd-trace/src/supported-configurations.json index 7a1aae5b923..5f2ae9184cc 100644 --- a/packages/dd-trace/src/supported-configurations.json +++ b/packages/dd-trace/src/supported-configurations.json @@ -37,6 +37,7 @@ "DD_APPSEC_STACK_TRACE_ENABLED": ["A"], "DD_APPSEC_TRACE_RATE_LIMIT": ["A"], "DD_APPSEC_WAF_TIMEOUT": ["A"], + "DD_AZURE_RESOURCE_GROUP": ["A"], "DD_CIVISIBILITY_AGENTLESS_ENABLED": ["A"], "DD_CIVISIBILITY_AGENTLESS_URL": ["A"], "DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER": ["A"], diff --git a/packages/dd-trace/test/azure_metadata.spec.js b/packages/dd-trace/test/azure_metadata.spec.js index c26b6846ac7..922e22bbc27 100644 --- a/packages/dd-trace/test/azure_metadata.spec.js +++ b/packages/dd-trace/test/azure_metadata.spec.js @@ -6,7 +6,7 @@ const os = require('node:os') require('./setup/core') -const { getAzureAppMetadata, getAzureTagsFromMetadata } = require('../src/azure_metadata') +const { getAzureAppMetadata, getAzureTagsFromMetadata, getAzureFunctionMetadata } = require('../src/azure_metadata') describe('Azure metadata', () => { describe('for apps is', () => { @@ -107,4 +107,45 @@ describe('Azure metadata', () => { } assert.deepStrictEqual(getAzureTagsFromMetadata(getAzureAppMetadata()), expected) }) + + it('uses DD_AZURE_RESOURCE_GROUP for Flex Consumption Azure Functions', () => { + delete process.env.WEBSITE_RESOURCE_GROUP + delete process.env.WEBSITE_OS + delete process.env.DD_AAS_DOTNET_EXTENSION_VERSION + process.env.COMPUTERNAME = 'flex_function' + process.env.WEBSITE_SITE_NAME = 'flex_function_app' + process.env.WEBSITE_OWNER_NAME = 'subscription_id+flex-regionwebspace' + process.env.WEBSITE_INSTANCE_ID = 'instance_id' + process.env.WEBSITE_SKU = 'FlexConsumption' + process.env.FUNCTIONS_EXTENSION_VERSION = '4' + process.env.FUNCTIONS_WORKER_RUNTIME = 'node' + process.env.DD_AZURE_RESOURCE_GROUP = 'flex_resource_group' + const expected = { + functionRuntimeVersion: '4', + instanceID: 'instance_id', + instanceName: 'flex_function', + operatingSystem: os.platform(), + resourceGroup: 'flex_resource_group', + resourceID: + '/subscriptions/subscription_id/resourcegroups/flex_resource_group' + + '/providers/microsoft.web/sites/flex_function_app', + runtime: 'node', + siteKind: 'functionapp', + siteName: 'flex_function_app', + siteType: 'function', + subscriptionID: 'subscription_id' + } + assert.deepStrictEqual(getAzureFunctionMetadata(), expected) + }) + + it('uses WEBSITE_RESOURCE_GROUP for non-Flex Consumption plans', () => { + process.env.WEBSITE_SITE_NAME = 'regular_function_app' + process.env.WEBSITE_RESOURCE_GROUP = 'regular_resource_group' + process.env.WEBSITE_OWNER_NAME = 'subscription_id+extracted_group-regionwebspace' + process.env.WEBSITE_SKU = 'Consumption' + process.env.FUNCTIONS_EXTENSION_VERSION = '4' + process.env.DD_AZURE_RESOURCE_GROUP = 'should_not_use_this' + const metadata = getAzureFunctionMetadata() + assert.strictEqual(metadata.resourceGroup, 'regular_resource_group') + }) }) From d79b689b931ab1646c6a94c35a099412e7b5ef5f Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Thu, 18 Dec 2025 14:24:40 +0100 Subject: [PATCH 03/15] ci: pass GITHUB_TOKEN to license attribution step (#7140) The dd-license-attribution tool requires a GitHub token to avoid API rate limits. This change explicitly passes the GITHUB_TOKEN as an environment variable to the license regeneration step. --- .github/workflows/update-3rdparty-licenses.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/update-3rdparty-licenses.yml b/.github/workflows/update-3rdparty-licenses.yml index d6875c39fe2..feef0743d7d 100644 --- a/.github/workflows/update-3rdparty-licenses.yml +++ b/.github/workflows/update-3rdparty-licenses.yml @@ -51,6 +51,8 @@ jobs: EOF - name: Regenerate LICENSE-3rdparty.csv + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | dd-license-attribution generate-sbom-csv \ --use-mirrors=mirrors.json \ From c51a6727c6363832f282537dcb75d15a3bed0151 Mon Sep 17 00:00:00 2001 From: Pablo Erhard <104538390+pabloerhard@users.noreply.github.com> Date: Thu, 18 Dec 2025 11:24:16 -0500 Subject: [PATCH 04/15] fix: failing appsec tests after iitm bump to v2.0.0 (#6935) * Added support for iitm v2.0.0 * Update iitm with master package.json and yarn.lock --- integration-tests/appsec/iast.esm.spec.js | 3 +- package.json | 2 +- packages/datadog-esbuild/src/utils.js | 14 ++++++- .../appsec/iast/taint-tracking/rewriter.js | 12 ++---- yarn.lock | 42 ++++++++++++++----- 5 files changed, 52 insertions(+), 21 deletions(-) diff --git a/integration-tests/appsec/iast.esm.spec.js b/integration-tests/appsec/iast.esm.spec.js index 7dbb47d1de1..53375200ca8 100644 --- a/integration-tests/appsec/iast.esm.spec.js +++ b/integration-tests/appsec/iast.esm.spec.js @@ -17,7 +17,8 @@ describe('ESM', () => { const nodeOptionsList = [ '--import dd-trace/initialize.mjs', - '--require dd-trace/init.js --loader dd-trace/loader-hook.mjs' + '--require dd-trace/init.js --loader dd-trace/loader-hook.mjs', + '--import dd-trace/register.js --require dd-trace/init' ] nodeOptionsList.forEach(nodeOptions => { diff --git a/package.json b/package.json index 9147d198bd4..beb5c2c6bb5 100644 --- a/package.json +++ b/package.json @@ -127,7 +127,7 @@ ], "dependencies": { "dc-polyfill": "^0.1.10", - "import-in-the-middle": "^1.14.2" + "import-in-the-middle": "^2.0.0" }, "optionalDependencies": { "@datadog/libdatadog": "0.7.0", diff --git a/packages/datadog-esbuild/src/utils.js b/packages/datadog-esbuild/src/utils.js index 4d4c14f40a1..ea7c72c989f 100644 --- a/packages/datadog-esbuild/src/utils.js +++ b/packages/datadog-esbuild/src/utils.js @@ -7,8 +7,20 @@ const path = require('node:path') const { NODE_MAJOR, NODE_MINOR } = require('../../../version.js') const getExportsImporting = (url) => import(url).then(Object.keys) +let getExportsModulePromise + +const loadGetExportsModule = () => { + if (!getExportsModulePromise) { + getExportsModulePromise = import('import-in-the-middle/lib/get-exports.mjs') + } + return getExportsModulePromise +} + const getExports = NODE_MAJOR >= 20 || (NODE_MAJOR === 18 && NODE_MINOR >= 19) - ? require('import-in-the-middle/lib/get-exports.js') + ? async (srcUrl, context, getSource) => { + const mod = await loadGetExportsModule() + return mod.getExports(srcUrl, context, getSource) + } : getExportsImporting function isStarExportLine (line) { diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js index d793c2ebaf9..0118a488310 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js @@ -183,14 +183,10 @@ function enableRewriter (telemetryVerbosity) { } function isEsmConfigured () { - const hasLoaderArg = isFlagPresent('--loader') || isFlagPresent('--experimental-loader') - if (hasLoaderArg) return true - - // Fast path for common case when enabled - if (require.cache[`${process.cwd()}/node_modules/import-in-the-middle/hook.js`]) { - return true - } - return Object.keys(require.cache).some(file => file.endsWith('import-in-the-middle/hook.js')) + return (isFlagPresent('--loader') || + isFlagPresent('--experimental-loader') || + isFlagPresent('dd-trace/initialize.mjs')) || + isFlagPresent('dd-trace/register.js') } let enableEsmRewriter = function (telemetryVerbosity) { diff --git a/yarn.lock b/yarn.lock index 86b7d7f2abb..4fc5b01245e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2642,10 +2642,10 @@ import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-in-the-middle@^1.14.2: - version "1.15.0" - resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.15.0.tgz#9e20827a322bbadaeb5e3bac49ea8f6d4685fdd8" - integrity sha512-bpQy+CrsRmYmoPMAE/0G33iwRqwW4ouqdRg8jgbH3aKuCtOc8lxgmYXg2dMM92CRiGP660EtBcymH/eVUpCSaA== +import-in-the-middle@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-2.0.0.tgz#295948cee94d0565314824c6bd75379d13e5b1a5" + integrity sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A== dependencies: acorn "^8.14.0" acorn-import-attributes "^1.9.5" @@ -4428,8 +4428,16 @@ streamsearch@^1.1.0: resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: - name string-width-cjs +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -4493,8 +4501,14 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: - name strip-ansi-cjs +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -4971,8 +4985,7 @@ workerpool@^9.2.0: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-9.3.4.tgz#f6c92395b2141afd78e2a889e80cb338fe9fca41" integrity sha512-TmPRQYYSAnnDiEB0P/Ytip7bFGvqnSU6I2BcuSw7Hx+JSg/DsUi5ebYfc8GYaSdpuvOcEs6dXxPurOYpe9QFwg== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: - name wrap-ansi-cjs +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -4990,6 +5003,15 @@ wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" From e3edba9f520ca6d5160c2d3ef26ad82d68df00da Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Thu, 18 Dec 2025 18:55:57 +0100 Subject: [PATCH 05/15] test: use partialDeepStrictEqual instead of individual assertions (#7133) This makes the test output in case of errors easier to understand as well as making the code easier to read. --- .../test/index.spec.js | 1 + packages/datadog-plugin-ai/test/index.spec.js | 278 ++++++++---- .../datadog-plugin-apollo/test/index.spec.js | 202 +++++---- .../test/aws-sdk.spec.js | 10 +- .../test/bedrockruntime.spec.js | 16 +- .../test/dynamodb.spec.js | 42 +- .../datadog-plugin-aws-sdk/test/sns.spec.js | 98 ++--- .../test/integration-test/client.spec.js | 90 ++-- .../eventhubs-test/eventhubs.spec.js | 273 +++++++----- .../servicebus-test/servicebus.spec.js | 209 +++++---- .../test/integration-test/client.spec.js | 227 ++++++---- .../datadog-plugin-connect/test/index.spec.js | 187 +++++--- .../test/index.spec.js | 89 ++-- .../datadog-plugin-express/test/index.spec.js | 400 ++++++++++++------ .../test/tracing.spec.js | 171 +++++--- .../datadog-plugin-fetch/test/index.spec.js | 142 ++++--- .../datadog-plugin-grpc/test/client.spec.js | 103 ++--- .../datadog-plugin-grpc/test/server.spec.js | 9 +- .../datadog-plugin-http/test/client.spec.js | 166 ++++---- .../datadog-plugin-ioredis/test/index.spec.js | 1 + .../datadog-plugin-kafkajs/test/index.spec.js | 1 - .../test/index.spec.js | 135 +++--- .../datadog-plugin-mariadb/test/index.spec.js | 116 ++--- .../datadog-plugin-next/test/index.spec.js | 227 ++++++---- .../datadog-plugin-openai/test/index.spec.js | 296 ++++++++----- packages/datadog-plugin-pg/test/index.spec.js | 126 ++++-- .../datadog-plugin-rhea/test/index.spec.js | 15 +- .../test/plugins/util/inferred_proxy.spec.js | 158 ++++--- 28 files changed, 2317 insertions(+), 1471 deletions(-) diff --git a/packages/datadog-plugin-aerospike/test/index.spec.js b/packages/datadog-plugin-aerospike/test/index.spec.js index b7cd3935f1e..cf8f1fdfa07 100644 --- a/packages/datadog-plugin-aerospike/test/index.spec.js +++ b/packages/datadog-plugin-aerospike/test/index.spec.js @@ -9,6 +9,7 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const agent = require('../../dd-trace/test/plugins/agent') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const { expectedSchema, rawExpectedSchema } = require('./naming') + describe('Plugin', () => { let aerospike let config diff --git a/packages/datadog-plugin-ai/test/index.spec.js b/packages/datadog-plugin-ai/test/index.spec.js index 29b19187613..78939e739e2 100644 --- a/packages/datadog-plugin-ai/test/index.spec.js +++ b/packages/datadog-plugin-ai/test/index.spec.js @@ -1,7 +1,7 @@ 'use strict' const agent = require('../../dd-trace/test/plugins/agent') -const { useEnv } = require('../../../integration-tests/helpers') +const { assertObjectContains, useEnv } = require('../../../integration-tests/helpers') const assert = require('node:assert') const semifies = require('semifies') const { withVersions } = require('../../dd-trace/test/setup/mocha') @@ -85,15 +85,23 @@ describe('Plugin', () => { const generateTextSpan = traces[0][0] const doGenerateSpan = traces[0][1] - assert.strictEqual(generateTextSpan.name, 'ai.generateText') - assert.strictEqual(generateTextSpan.resource, 'ai.generateText') - assert.strictEqual(generateTextSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(generateTextSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(generateTextSpan, { + name: 'ai.generateText', + resource: 'ai.generateText', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateText.doGenerate', + resource: 'ai.generateText.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const experimentalTelemetry = { isEnabled: true } @@ -118,15 +126,23 @@ describe('Plugin', () => { const generateTextSpan = traces[0][0] const doGenerateSpan = traces[0][1] - assert.strictEqual(generateTextSpan.name, 'ai.generateText') - assert.strictEqual(generateTextSpan.resource, 'ai.generateText') - assert.strictEqual(generateTextSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(generateTextSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(generateTextSpan, { + name: 'ai.generateText', + resource: 'ai.generateText', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateText.doGenerate', + resource: 'ai.generateText.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const experimentalTelemetry = { tracer: myTracer } @@ -152,15 +168,23 @@ describe('Plugin', () => { const generateTextSpan = traces[0][0] const doGenerateSpan = traces[0][1] - assert.strictEqual(generateTextSpan.name, 'ai.generateText') - assert.strictEqual(generateTextSpan.resource, 'ai.generateText') - assert.strictEqual(generateTextSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(generateTextSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(generateTextSpan, { + name: 'ai.generateText', + resource: 'ai.generateText', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateText.doGenerate', + resource: 'ai.generateText.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const experimentalTelemetry = { isEnabled: true, tracer: myTracer } @@ -186,15 +210,23 @@ describe('Plugin', () => { const generateTextSpan = traces[0][0] const doGenerateSpan = traces[0][1] - assert.strictEqual(generateTextSpan.name, 'ai.generateText') - assert.strictEqual(generateTextSpan.resource, 'ai.generateText') - assert.strictEqual(generateTextSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(generateTextSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(generateTextSpan, { + name: 'ai.generateText', + resource: 'ai.generateText', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateText.doGenerate', + resource: 'ai.generateText.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const result = await ai.generateText({ @@ -215,15 +247,23 @@ describe('Plugin', () => { const generateObjectSpan = traces[0][0] const doGenerateSpan = traces[0][1] - assert.strictEqual(generateObjectSpan.name, 'ai.generateObject') - assert.strictEqual(generateObjectSpan.resource, 'ai.generateObject') - assert.strictEqual(generateObjectSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(generateObjectSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(generateObjectSpan, { + name: 'ai.generateObject', + resource: 'ai.generateObject', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateObject.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'ai.generateObject.doGenerate') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateObject.doGenerate', + resource: 'ai.generateObject.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const schema = ai.jsonSchema({ @@ -252,15 +292,23 @@ describe('Plugin', () => { const embedSpan = traces[0][0] const doEmbedSpan = traces[0][1] - assert.strictEqual(embedSpan.name, 'ai.embed') - assert.strictEqual(embedSpan.resource, 'ai.embed') - assert.strictEqual(embedSpan.meta['ai.request.model'], 'text-embedding-ada-002') - assert.strictEqual(embedSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(embedSpan, { + name: 'ai.embed', + resource: 'ai.embed', + meta: { + 'ai.request.model': 'text-embedding-ada-002', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doEmbedSpan.name, 'ai.embed.doEmbed') - assert.strictEqual(doEmbedSpan.resource, 'ai.embed.doEmbed') - assert.strictEqual(doEmbedSpan.meta['ai.request.model'], 'text-embedding-ada-002') - assert.strictEqual(doEmbedSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doEmbedSpan, { + name: 'ai.embed.doEmbed', + resource: 'ai.embed.doEmbed', + meta: { + 'ai.request.model': 'text-embedding-ada-002', + 'ai.request.model_provider': 'openai' + } + }) }) const result = await ai.embed({ @@ -278,15 +326,23 @@ describe('Plugin', () => { const embedManySpan = traces[0][0] const doEmbedSpan = traces[0][1] - assert.strictEqual(embedManySpan.name, 'ai.embedMany') - assert.strictEqual(embedManySpan.resource, 'ai.embedMany') - assert.strictEqual(embedManySpan.meta['ai.request.model'], 'text-embedding-ada-002') - assert.strictEqual(embedManySpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(embedManySpan, { + name: 'ai.embedMany', + resource: 'ai.embedMany', + meta: { + 'ai.request.model': 'text-embedding-ada-002', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doEmbedSpan.name, 'ai.embedMany.doEmbed') - assert.strictEqual(doEmbedSpan.resource, 'ai.embedMany.doEmbed') - assert.strictEqual(doEmbedSpan.meta['ai.request.model'], 'text-embedding-ada-002') - assert.strictEqual(doEmbedSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doEmbedSpan, { + name: 'ai.embedMany.doEmbed', + resource: 'ai.embedMany.doEmbed', + meta: { + 'ai.request.model': 'text-embedding-ada-002', + 'ai.request.model_provider': 'openai' + } + }) }) const result = await ai.embedMany({ @@ -304,15 +360,23 @@ describe('Plugin', () => { const streamTextSpan = traces[0][0] const doStreamSpan = traces[0][1] - assert.strictEqual(streamTextSpan.name, 'ai.streamText') - assert.strictEqual(streamTextSpan.resource, 'ai.streamText') - assert.strictEqual(streamTextSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(streamTextSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(streamTextSpan, { + name: 'ai.streamText', + resource: 'ai.streamText', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doStreamSpan.name, 'ai.streamText.doStream') - assert.strictEqual(doStreamSpan.resource, 'ai.streamText.doStream') - assert.strictEqual(doStreamSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doStreamSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doStreamSpan, { + name: 'ai.streamText.doStream', + resource: 'ai.streamText.doStream', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const result = await ai.streamText({ @@ -339,15 +403,23 @@ describe('Plugin', () => { const streamObjectSpan = traces[0][0] const doStreamSpan = traces[0][1] - assert.strictEqual(streamObjectSpan.name, 'ai.streamObject') - assert.strictEqual(streamObjectSpan.resource, 'ai.streamObject') - assert.strictEqual(streamObjectSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(streamObjectSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(streamObjectSpan, { + name: 'ai.streamObject', + resource: 'ai.streamObject', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doStreamSpan.name, 'ai.streamObject.doStream') - assert.strictEqual(doStreamSpan.resource, 'ai.streamObject.doStream') - assert.strictEqual(doStreamSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doStreamSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doStreamSpan, { + name: 'ai.streamObject.doStream', + resource: 'ai.streamObject.doStream', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const schema = ai.jsonSchema({ @@ -384,23 +456,35 @@ describe('Plugin', () => { const toolCallSpan2 = traces[0][2] const doGenerateSpan2 = traces[0][3] - assert.strictEqual(toolCallSpan.name, 'ai.generateText') - assert.strictEqual(toolCallSpan.resource, 'ai.generateText') - assert.strictEqual(toolCallSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(toolCallSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(toolCallSpan, { + name: 'ai.generateText', + resource: 'ai.generateText', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateText.doGenerate', + resource: 'ai.generateText.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) assert.strictEqual(toolCallSpan2.name, 'ai.toolCall') assert.strictEqual(toolCallSpan2.resource, 'ai.toolCall') - assert.strictEqual(doGenerateSpan2.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan2.resource, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan2.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan2.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan2, { + name: 'ai.generateText.doGenerate', + resource: 'ai.generateText.doGenerate', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) let tools @@ -462,15 +546,23 @@ describe('Plugin', () => { const generateTextSpan = traces[0][0] const doGenerateSpan = traces[0][1] - assert.strictEqual(generateTextSpan.name, 'ai.generateText') - assert.strictEqual(generateTextSpan.resource, 'test') - assert.strictEqual(generateTextSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(generateTextSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(generateTextSpan, { + name: 'ai.generateText', + resource: 'test', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) - assert.strictEqual(doGenerateSpan.name, 'ai.generateText.doGenerate') - assert.strictEqual(doGenerateSpan.resource, 'test') - assert.strictEqual(doGenerateSpan.meta['ai.request.model'], 'gpt-4o-mini') - assert.strictEqual(doGenerateSpan.meta['ai.request.model_provider'], 'openai') + assertObjectContains(doGenerateSpan, { + name: 'ai.generateText.doGenerate', + resource: 'test', + meta: { + 'ai.request.model': 'gpt-4o-mini', + 'ai.request.model_provider': 'openai' + } + }) }) const result = await ai.generateText({ diff --git a/packages/datadog-plugin-apollo/test/index.spec.js b/packages/datadog-plugin-apollo/test/index.spec.js index 7bb8f82a9fe..506fdbc231c 100644 --- a/packages/datadog-plugin-apollo/test/index.spec.js +++ b/packages/datadog-plugin-apollo/test/index.spec.js @@ -9,6 +9,7 @@ const agent = require('../../dd-trace/test/plugins/agent.js') const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha') const accounts = require('./fixtures.js') const { expectedSchema, rawExpectedSchema } = require('./naming.js') +const { assertObjectContains } = require('../../../integration-tests/helpers') const fixtures = [accounts] const typeDefs = accounts.typeDefs @@ -148,47 +149,63 @@ describe('Plugin', () => { agent .assertSomeTraces((traces) => { // the spans are in order of execution - assert.strictEqual(traces[0][0].name, expectedSchema.server.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][0].resource, 'query MyQuery{hello(name:"")}') - assert.strictEqual(traces[0][0].type, 'web') - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['graphql.operation.name'], operationName) + assertObjectContains(traces[0][0], { + name: expectedSchema.server.opName, + service: expectedSchema.server.serviceName, + resource: 'query MyQuery{hello(name:"")}', + type: 'web', + error: 0, + meta: { + 'graphql.operation.name': operationName, + 'graphql.operation.type': 'query', + component: 'apollo.gateway', + '_dd.integration': 'apollo.gateway' + } + }) assert.ok(!('graphql.source' in traces[0][0].meta)) - assert.strictEqual(traces[0][0].meta['graphql.operation.type'], 'query') - assert.strictEqual(traces[0][0].meta.component, 'apollo.gateway') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'apollo.gateway') - assert.strictEqual(traces[0][1].name, 'apollo.gateway.validate') - assert.strictEqual(traces[0][1].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][1].type, 'web') - assert.strictEqual(traces[0][1].error, 0) - assert.strictEqual(traces[0][1].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][1], { + name: 'apollo.gateway.validate', + service: expectedSchema.server.serviceName, + type: 'web', + error: 0, + meta: { component: 'apollo.gateway' } + }) - assert.strictEqual(traces[0][2].name, 'apollo.gateway.plan') - assert.strictEqual(traces[0][2].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][2].type, 'web') - assert.strictEqual(traces[0][2].error, 0) - assert.strictEqual(traces[0][2].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][2], { + name: 'apollo.gateway.plan', + service: expectedSchema.server.serviceName, + type: 'web', + error: 0, + meta: { component: 'apollo.gateway' } + }) - assert.strictEqual(traces[0][3].name, 'apollo.gateway.execute') - assert.strictEqual(traces[0][3].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][3].type, 'web') - assert.strictEqual(traces[0][3].error, 0) - assert.strictEqual(traces[0][3].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][3], { + name: 'apollo.gateway.execute', + service: expectedSchema.server.serviceName, + type: 'web', + error: 0, + meta: { component: 'apollo.gateway' } + }) - assert.strictEqual(traces[0][4].name, 'apollo.gateway.fetch') - assert.strictEqual(traces[0][4].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][4].type, 'web') - assert.strictEqual(traces[0][4].error, 0) - assert.strictEqual(traces[0][4].meta.serviceName, 'accounts') - assert.strictEqual(traces[0][4].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][4], { + name: 'apollo.gateway.fetch', + service: expectedSchema.server.serviceName, + type: 'web', + error: 0, + meta: { + serviceName: 'accounts', + component: 'apollo.gateway' + } + }) - assert.strictEqual(traces[0][5].name, 'apollo.gateway.postprocessing') - assert.strictEqual(traces[0][5].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][5].type, 'web') - assert.strictEqual(traces[0][5].error, 0) - assert.strictEqual(traces[0][5].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][5], { + name: 'apollo.gateway.postprocessing', + service: expectedSchema.server.serviceName, + type: 'web', + error: 0, + meta: { component: 'apollo.gateway' } + }) }) .then(done) .catch(done) @@ -209,8 +226,10 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].type, 'web') assert.strictEqual(traces[0][0].error, 0) assert.ok(!('graphql.source' in traces[0][0].meta)) - assert.strictEqual(traces[0][0].meta['graphql.operation.type'], 'query') - assert.strictEqual(traces[0][0].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][0].meta, { + 'graphql.operation.type': 'query', + component: 'apollo.gateway' + }) }) .then(done) .catch(done) @@ -241,8 +260,10 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].type, 'web') assert.strictEqual(traces[0][0].error, 0) assert.ok(!('graphql.source' in traces[0][0].meta)) - assert.strictEqual(traces[0][0].meta['graphql.operation.type'], 'query') - assert.strictEqual(traces[0][0].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][0].meta, { + 'graphql.operation.type': 'query', + component: 'apollo.gateway' + }) }) .then(done) .catch(done) @@ -256,10 +277,11 @@ describe('Plugin', () => { it('should instrument mutations', done => { const source = 'mutation { human { name } }' - agent - .assertSomeTraces((traces) => { - assert.strictEqual(traces[0][0].meta['graphql.operation.type'], 'mutation') - }) + agent.assertFirstTraceSpan({ + meta: { + 'graphql.operation.type': 'mutation' + } + }) .then(done) .catch(done) @@ -294,21 +316,29 @@ describe('Plugin', () => { agent .assertSomeTraces((traces) => { assert.strictEqual(traces[0].length, 2) - assert.strictEqual(traces[0][0].name, expectedSchema.server.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][0].error, 1) - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][0], { + name: expectedSchema.server.opName, + service: expectedSchema.server.serviceName, + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'apollo.gateway' + } + }) - assert.strictEqual(traces[0][1].name, 'apollo.gateway.validate') - assert.strictEqual(traces[0][1].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][1].error, 1) - assert.strictEqual(traces[0][1].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][1].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][1].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][1].meta.component, 'apollo.gateway') + assertObjectContains(traces[0][1], { + name: 'apollo.gateway.validate', + service: expectedSchema.server.serviceName, + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'apollo.gateway' + } + }) }) .then(done) .catch(done) @@ -336,12 +366,16 @@ describe('Plugin', () => { assert.strictEqual(traces[0][1].name, 'apollo.gateway.validate') assert.strictEqual(traces[0][1].error, 0) - assert.strictEqual(traces[0][2].name, 'apollo.gateway.plan') - assert.strictEqual(traces[0][2].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][2].error, 1) - assert.strictEqual(traces[0][2].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][2].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][2].meta[ERROR_STACK], error.stack) + assertObjectContains(traces[0][2], { + name: 'apollo.gateway.plan', + service: expectedSchema.server.serviceName, + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack + } + }) }) .then(done) .catch(done) @@ -363,11 +397,15 @@ describe('Plugin', () => { const variableValues = { who: 'world' } agent .assertSomeTraces((traces) => { - assert.strictEqual(traces[0][0].name, expectedSchema.server.opName) - assert.strictEqual(traces[0][0].error, 1) - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) + assertObjectContains(traces[0][0], { + name: expectedSchema.server.opName, + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack + } + }) assert.strictEqual(traces[0][1].name, 'apollo.gateway.validate') assert.strictEqual(traces[0][1].error, 0) @@ -382,18 +420,26 @@ describe('Plugin', () => { // in version 2.3.0, there is no recordExceptions method thus we can't ever attach an error to the // fetch span but instead the error will be propagated to the request span and be set there if (version > '2.3.0') { - assert.strictEqual(traces[0][3].error, 1) - assert.strictEqual(traces[0][3].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][3].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][3].meta[ERROR_STACK], error.stack) + assertObjectContains(traces[0][3], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack + } + }) } else { assert.strictEqual(traces[0][3].error, 0) } - assert.strictEqual(traces[0][4].name, 'apollo.gateway.fetch') - assert.strictEqual(traces[0][4].service, expectedSchema.server.serviceName) - assert.strictEqual(traces[0][4].error, 1) - assert.strictEqual(traces[0][4].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][4].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][4].meta[ERROR_STACK], error.stack) + assertObjectContains(traces[0][4], { + name: 'apollo.gateway.fetch', + service: expectedSchema.server.serviceName, + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack + } + }) assert.strictEqual(traces[0][5].name, 'apollo.gateway.postprocessing') assert.strictEqual(traces[0][5].error, 0) diff --git a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js index 3edc445f41b..d2678ba03f8 100644 --- a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js @@ -285,10 +285,12 @@ describe('Plugin', () => { resource: 'listBuckets', service: 'test' }) - assert.strictEqual(span.error, 0) - assertObjectContains(span.meta, { - 'hook.operation': 'listBuckets', - component: 'aws-sdk' + assertObjectContains(span, { + error: 0, + meta: { + 'hook.operation': 'listBuckets', + component: 'aws-sdk' + } }) }).then(done, done) diff --git a/packages/datadog-plugin-aws-sdk/test/bedrockruntime.spec.js b/packages/datadog-plugin-aws-sdk/test/bedrockruntime.spec.js index 1e69801b103..62fb3840c47 100644 --- a/packages/datadog-plugin-aws-sdk/test/bedrockruntime.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/bedrockruntime.spec.js @@ -7,8 +7,6 @@ const { setup } = require('./spec_helpers') const { models } = require('./fixtures/bedrockruntime') const { withVersions } = require('../../dd-trace/test/setup/mocha') const assert = require('node:assert') -const { assertObjectContains } = require('../../../integration-tests/helpers') - const serviceName = 'bedrock-service-name-test' describe('Plugin', () => { @@ -59,13 +57,12 @@ describe('Plugin', () => { const command = new AWS.InvokeModelCommand(request) - const tracesPromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - assertObjectContains(span.meta, { + const tracesPromise = agent.assertFirstTraceSpan({ + meta: { 'aws.operation': 'invokeModel', 'aws.bedrock.request.model': model.modelId.split('.')[1], 'aws.bedrock.request.model_provider': model.provider.toLowerCase(), - }) + } }) await bedrockRuntimeClient.send(command) @@ -82,13 +79,12 @@ describe('Plugin', () => { const command = new AWS.InvokeModelWithResponseStreamCommand(request) - const tracesPromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - assertObjectContains(span.meta, { + const tracesPromise = agent.assertFirstTraceSpan({ + meta: { 'aws.operation': 'invokeModelWithResponseStream', 'aws.bedrock.request.model': model.modelId.split('.')[1], 'aws.bedrock.request.model_provider': model.provider.toLowerCase(), - }) + } }) const stream = await bedrockRuntimeClient.send(command) diff --git a/packages/datadog-plugin-aws-sdk/test/dynamodb.spec.js b/packages/datadog-plugin-aws-sdk/test/dynamodb.spec.js index 38d2fe176c4..3ec43832968 100644 --- a/packages/datadog-plugin-aws-sdk/test/dynamodb.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/dynamodb.spec.js @@ -13,8 +13,6 @@ const { withVersions } = require('../../dd-trace/test/setup/mocha') const DynamoDb = require('../src/services/dynamodb') const { generatePointerHash } = require('../src/util') const { setup } = require('./spec_helpers') -const { assertObjectContains } = require('../../../integration-tests/helpers') - /* eslint-disable no-console */ async function resetLocalStackDynamo () { try { @@ -160,18 +158,16 @@ describe('Plugin', () => { describe('with payload tagging', () => { it('adds request and response payloads as flattened tags for putItem', async () => { - const agentPromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `putItem ${oneKeyTableName}`) - assertObjectContains(span.meta, { + const agentPromise = agent.assertFirstTraceSpan({ + resource: `putItem ${oneKeyTableName}`, + meta: { 'aws.dynamodb.table_name': oneKeyTableName, aws_service: 'DynamoDB', region: 'us-east-1', 'aws.request.body.TableName': oneKeyTableName, 'aws.request.body.Item.name': 'redacted', 'aws.request.body.Item.data.S': 'test-data' - }) + } }) const operation = () => promisify(dynamo.putItem)({ @@ -186,18 +182,16 @@ describe('Plugin', () => { }) it('adds request and response payloads as flattened tags for updateItem', async () => { - const agentPromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `updateItem ${oneKeyTableName}`) - assertObjectContains(span.meta, { + const agentPromise = agent.assertFirstTraceSpan({ + resource: `updateItem ${oneKeyTableName}`, + meta: { 'aws.dynamodb.table_name': oneKeyTableName, aws_service: 'DynamoDB', region: 'us-east-1', 'aws.request.body.TableName': oneKeyTableName, 'aws.request.body.Key.name.S': 'test-name', 'aws.request.body.AttributeUpdates.data.Value.S': 'updated-data' - }) + } }) const operation = () => promisify(dynamo.updateItem)({ @@ -217,17 +211,15 @@ describe('Plugin', () => { }) it('adds request and response payloads as flattened tags for deleteItem', async () => { - const agentPromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `deleteItem ${oneKeyTableName}`) - assertObjectContains(span.meta, { + const agentPromise = agent.assertFirstTraceSpan({ + resource: `deleteItem ${oneKeyTableName}`, + meta: { 'aws.dynamodb.table_name': oneKeyTableName, aws_service: 'DynamoDB', region: 'us-east-1', 'aws.request.body.TableName': oneKeyTableName, 'aws.request.body.Key.name.S': 'test-name' - }) + } }) const operation = () => promisify(dynamo.deleteItem)({ @@ -253,11 +245,9 @@ describe('Plugin', () => { // Wait a bit to ensure the put completes await wait(100) - const agentPromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `getItem ${oneKeyTableName}`) - assertObjectContains(span.meta, { + const agentPromise = agent.assertFirstTraceSpan({ + resource: `getItem ${oneKeyTableName}`, + meta: { 'aws.dynamodb.table_name': oneKeyTableName, aws_service: 'DynamoDB', region: 'us-east-1', @@ -265,7 +255,7 @@ describe('Plugin', () => { 'aws.request.body.Key.name.S': 'test-get-name', 'aws.response.body.Item.name.S': 'test-get-name', 'aws.response.body.Item.data': 'redacted' - }) + } }) const operation = () => promisify(dynamo.getItem)({ diff --git a/packages/datadog-plugin-aws-sdk/test/sns.spec.js b/packages/datadog-plugin-aws-sdk/test/sns.spec.js index 3565c1495d5..544049a974a 100644 --- a/packages/datadog-plugin-aws-sdk/test/sns.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sns.spec.js @@ -118,11 +118,9 @@ describe('Sns', function () { }) it('adds request and response payloads as flattened tags', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `publish ${TopicArn}`) - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: `publish ${TopicArn}`, + meta: { 'aws.sns.topic_arn': TopicArn, topicname: 'TestTopic', aws_service: 'SNS', @@ -136,7 +134,7 @@ describe('Sns', function () { 'aws.request.body.MessageAttributes.keyTwo.DataType': 'String', 'aws.request.body.MessageAttributes.keyTwo.StringValue': 'keyTwo', 'aws.response.body.MessageId': 'redacted' - }) + } }, { timeoutMs: 20000 }).then(done, done) sns.publish({ @@ -151,11 +149,9 @@ describe('Sns', function () { }) it('expands and redacts keys identified as expandable', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `publish ${TopicArn}`) - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: `publish ${TopicArn}`, + meta: { 'aws.sns.topic_arn': TopicArn, topicname: 'TestTopic', aws_service: 'SNS', @@ -166,7 +162,7 @@ describe('Sns', function () { 'aws.request.body.MessageAttributes.unredacted.StringValue.foo': 'bar', 'aws.request.body.MessageAttributes.unredacted.StringValue.baz': 'yup', 'aws.response.body.MessageId': 'redacted' - }) + } }, { timeoutMs: 20000 }).then(done, done) sns.publish({ @@ -184,20 +180,23 @@ describe('Sns', function () { agent.assertSomeTraces(traces => { const span = traces[0][0] - assert.strictEqual(span.resource, `publish ${TopicArn}`) - assertObjectContains(span.meta, { - 'aws.sns.topic_arn': TopicArn, - topicname: 'TestTopic', - aws_service: 'SNS', - region: 'us-east-1', - 'aws.request.body.TopicArn': TopicArn, - 'aws.request.body.Message': 'message 1', - 'aws.request.body.MessageAttributes.foo': 'redacted', - 'aws.request.body.MessageAttributes.keyOne.DataType': 'String', - 'aws.request.body.MessageAttributes.keyOne.StringValue': 'keyOne', - 'aws.request.body.MessageAttributes.keyTwo.DataType': 'String', - 'aws.request.body.MessageAttributes.keyTwo.StringValue': 'keyTwo' + assertObjectContains(span, { + resource: `publish ${TopicArn}`, + meta: { + 'aws.sns.topic_arn': TopicArn, + topicname: 'TestTopic', + aws_service: 'SNS', + region: 'us-east-1', + 'aws.request.body.TopicArn': TopicArn, + 'aws.request.body.Message': 'message 1', + 'aws.request.body.MessageAttributes.foo': 'redacted', + 'aws.request.body.MessageAttributes.keyOne.DataType': 'String', + 'aws.request.body.MessageAttributes.keyOne.StringValue': 'keyOne', + 'aws.request.body.MessageAttributes.keyTwo.DataType': 'String', + 'aws.request.body.MessageAttributes.keyTwo.StringValue': 'keyTwo' + } }) + assert.ok(Object.hasOwn(span.meta, 'aws.response.body.MessageId')) }, { timeoutMs: 20000 }).then(done, done) @@ -214,17 +213,16 @@ describe('Sns', function () { // TODO add response tests it('redacts user-defined keys to suppress in response', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - assert.strictEqual(span.resource, `getTopicAttributes ${TopicArn}`) - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: `getTopicAttributes ${TopicArn}`, + meta: { 'aws.sns.topic_arn': TopicArn, topicname: 'TestTopic', aws_service: 'SNS', region: 'us-east-1', 'aws.request.body.TopicArn': TopicArn, 'aws.response.body.Attributes.DisplayName': 'redacted' - }) + } }, { timeoutMs: 20000 }).then(done, done) sns.getTopicAttributes({ TopicArn }, e => e && done(e)) @@ -258,16 +256,14 @@ describe('Sns', function () { }) it('redacts phone numbers in request', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, 'publish') - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: 'publish', + meta: { aws_service: 'SNS', region: 'us-east-1', 'aws.request.body.PhoneNumber': 'redacted', 'aws.request.body.Message': 'message 1' - }) + } }, { timeoutMs: 20000 }).then(done, done) sns.publish({ @@ -277,15 +273,13 @@ describe('Sns', function () { }) it('redacts phone numbers in response', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, 'publish') - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: 'publish', + meta: { aws_service: 'SNS', region: 'us-east-1', 'aws.response.body.PhoneNumber': 'redacted' - }) + } }, { timeoutMs: 20000 }).then(done, done) sns.listSMSSandboxPhoneNumbers({ @@ -298,18 +292,16 @@ describe('Sns', function () { describe('subscription confirmation tokens', () => { it('redacts tokens in request', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `confirmSubscription ${TopicArn}`) - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: `confirmSubscription ${TopicArn}`, + meta: { aws_service: 'SNS', 'aws.sns.topic_arn': TopicArn, topicname: 'TestTopic', region: 'us-east-1', 'aws.request.body.Token': 'redacted', 'aws.request.body.TopicArn': TopicArn - }) + } }).then(done, done) sns.confirmSubscription({ @@ -487,16 +479,14 @@ describe('Sns', function () { } it('generates tags for proper publish calls', done => { - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - assert.strictEqual(span.resource, `publish ${TopicArn}`) - assertObjectContains(span.meta, { + agent.assertFirstTraceSpan({ + resource: `publish ${TopicArn}`, + meta: { 'aws.sns.topic_arn': TopicArn, topicname: 'TestTopic', aws_service: 'SNS', region: 'us-east-1' - }) + } }).then(done, done) sns.publish({ TopicArn, Message: 'message 1' }, e => e && done(e)) diff --git a/packages/datadog-plugin-azure-event-hubs/test/integration-test/client.spec.js b/packages/datadog-plugin-azure-event-hubs/test/integration-test/client.spec.js index 732dfe93c66..6cedeb8dd43 100644 --- a/packages/datadog-plugin-azure-event-hubs/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-azure-event-hubs/test/integration-test/client.spec.js @@ -7,6 +7,7 @@ const { FakeAgent, sandboxCwd, useSandbox, + assertObjectContains, checkSpansForServiceName, spawnPluginIntegrationTestProc } = require('../../../../integration-tests/helpers') @@ -48,39 +49,62 @@ describe.skip('esm', () => { const res = agent.assertMessageReceived(({ headers, payload }) => { // list of EventData assert.strictEqual(payload.length, 5) - assert.strictEqual(payload[0][0].name, 'azure.eventhubs.send') - assert.strictEqual(payload[0][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[0][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[0][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[0][0].meta['network.destination.name'], '127.0.0.1:5673') - assert.strictEqual(payload[0][0].metrics['messaging.batch.message_count'], 2) + assertObjectContains(payload[0][0], { + name: 'azure.eventhubs.send', + meta: { + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1:5673' + }, + metrics: { + 'messaging.batch.message_count': 2 + } + }) // list of AMPQ messages - assert.strictEqual(payload[1][0].name, 'azure.eventhubs.send') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[1][0].meta['network.destination.name'], '127.0.0.1:5673') - assert.strictEqual(payload[1][0].metrics['messaging.batch.message_count'], 2) + assertObjectContains(payload[1][0], { + name: 'azure.eventhubs.send', + meta: { + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1:5673' + }, + metrics: { + 'messaging.batch.message_count': 2 + } + }) // Batch -> EventDataBatchImpl - assert.strictEqual(payload[2][0].name, 'azure.eventhubs.create') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['network.destination.name'], '127.0.0.1:5673') - assert.strictEqual(payload[3][0].name, 'azure.eventhubs.create') - assert.strictEqual(payload[3][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[3][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[3][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[3][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[3][0].meta['network.destination.name'], '127.0.0.1:5673') - assert.strictEqual(payload[4][0].name, 'azure.eventhubs.send') - assert.strictEqual(payload[4][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[4][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[4][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[4][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[4][0].meta['network.destination.name'], '127.0.0.1:5673') - assert.strictEqual(payload[4][0].metrics['messaging.batch.message_count'], 4) + assertObjectContains(payload[2][0], { + name: 'azure.eventhubs.create', + meta: { + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1:5673' + } + }) + assertObjectContains(payload[3][0], { + name: 'azure.eventhubs.create', + meta: { + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1:5673' + } + }) + assertObjectContains(payload[4][0], { + name: 'azure.eventhubs.send', + meta: { + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1:5673' + }, + metrics: { + 'messaging.batch.message_count': 4 + } + }) assert.strictEqual(parseLinks(payload[4][0]).length, 2) }) @@ -92,8 +116,8 @@ describe.skip('esm', () => { const res = agent.assertMessageReceived(({ headers, payload }) => { assert.ok(!('_dd.span_links' in payload[2][0])) }) - const envVar = { DD_TRACE_AZURE_EVENTHUBS_BATCH_LINKS_ENABLED: false, ...spawnEnv } - proc = await spawnPluginIntegrationTestProc(sandboxCwd(), 'server.mjs', agent.port, undefined, envVar) + const envVar = { DD_TRACE_AZURE_EVENTHUBS_BATCH_LINKS_ENABLED: 'false', ...spawnEnv } + proc = await spawnPluginIntegrationTestProc(sandboxCwd(), 'server.mjs', agent.port, envVar) await res }).timeout(60000) }) diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/eventhubs-test/eventhubs.spec.js b/packages/datadog-plugin-azure-functions/test/integration-test/eventhubs-test/eventhubs.spec.js index 87ab3eb19a6..2cba6ff3b93 100644 --- a/packages/datadog-plugin-azure-functions/test/integration-test/eventhubs-test/eventhubs.spec.js +++ b/packages/datadog-plugin-azure-functions/test/integration-test/eventhubs-test/eventhubs.spec.js @@ -4,6 +4,7 @@ const assert = require('node:assert/strict') const { FakeAgent, + assertObjectContains, hookFile, sandboxCwd, useSandbox, @@ -46,19 +47,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh1-eventdata', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -71,19 +80,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh1-amqpmessages', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -95,19 +112,27 @@ describe('esm', () => { proc = await spawnPluginIntegrationTestProc(sandboxCwd(), 'func', ['start'], agent.port, undefined, envArgs) return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh1-batch', ({ headers, payload }) => { - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -120,12 +145,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh2-eventdata', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) @@ -138,12 +167,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh2-amqpmessages', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) @@ -156,12 +189,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh2-batch', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) @@ -174,12 +211,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh1-enqueueEvent', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) }) }).timeout(60000) @@ -192,19 +233,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh1-enqueueEvents', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -217,19 +266,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh1-enqueueAmqp', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'EventHubs eventHubTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'eh1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -242,12 +299,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/eh2-enqueueEvent', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) }) }).timeout(60000) @@ -264,12 +325,16 @@ describe('esm', () => { assert.strictEqual(payload[0][2].name, 'azure.eventhubs.create') assert.strictEqual(payload[0][3].name, 'azure.eventhubs.send') assert.strictEqual(parseLinks(payload[0][3]).length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) @@ -286,12 +351,16 @@ describe('esm', () => { assert.strictEqual(payload[0][2].name, 'azure.eventhubs.create') assert.strictEqual(payload[0][3].name, 'azure.eventhubs.send') assert.strictEqual(parseLinks(payload[0][3]).length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'EventHubs eventHubTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'eventhubs') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'eh2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'EventHubs eventHubTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'eventhubs', + 'messaging.destination.name': 'eh2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) diff --git a/packages/datadog-plugin-azure-functions/test/integration-test/servicebus-test/servicebus.spec.js b/packages/datadog-plugin-azure-functions/test/integration-test/servicebus-test/servicebus.spec.js index 4122c847061..4e74a7a475c 100644 --- a/packages/datadog-plugin-azure-functions/test/integration-test/servicebus-test/servicebus.spec.js +++ b/packages/datadog-plugin-azure-functions/test/integration-test/servicebus-test/servicebus.spec.js @@ -4,6 +4,7 @@ const assert = require('node:assert/strict') const { FakeAgent, + assertObjectContains, hookFile, sandboxCwd, useSandbox, @@ -46,12 +47,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-message-1', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) }) }).timeout(60000) @@ -64,19 +69,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-messages-1', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -89,12 +102,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-amqp-message-1', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) }) }).timeout(60000) @@ -107,19 +124,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-amqp-messages-1', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -132,19 +157,27 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-message-batch-1', ({ headers, payload }) => { assert.strictEqual(payload.length, 3) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) - assert.strictEqual(payload[2][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[2][0].resource, 'ServiceBus queueTest1') - assert.strictEqual(payload[2][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[2][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[2][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[2][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[2][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest1', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[2][0]).length, 1) }) }).timeout(60000) @@ -157,12 +190,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-message-2', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) }) }).timeout(60000) @@ -175,12 +212,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-messages-2', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) @@ -193,12 +234,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-amqp-message-2', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 1) }) }).timeout(60000) @@ -211,12 +256,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-amqp-messages-2', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) @@ -229,12 +278,16 @@ describe('esm', () => { return curlAndAssertMessage(agent, 'http://127.0.0.1:7071/api/send-message-batch-2', ({ headers, payload }) => { assert.strictEqual(payload.length, 2) - assert.strictEqual(payload[1][0].name, 'azure.functions.invoke') - assert.strictEqual(payload[1][0].resource, 'ServiceBus queueTest2') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'receive') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.destination.name'], 'queue.2') - assert.strictEqual(payload[1][0].meta['span.kind'], 'consumer') + assertObjectContains(payload[1][0], { + name: 'azure.functions.invoke', + resource: 'ServiceBus queueTest2', + meta: { + 'messaging.operation': 'receive', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.2', + 'span.kind': 'consumer' + } + }) assert.strictEqual(parseLinks(payload[1][0]).length, 2) }) }).timeout(60000) diff --git a/packages/datadog-plugin-azure-service-bus/test/integration-test/client.spec.js b/packages/datadog-plugin-azure-service-bus/test/integration-test/client.spec.js index b11f370c444..86a620f0596 100644 --- a/packages/datadog-plugin-azure-service-bus/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-azure-service-bus/test/integration-test/client.spec.js @@ -4,6 +4,7 @@ const assert = require('node:assert/strict') const { FakeAgent, + assertObjectContains, sandboxCwd, useSandbox, spawnPluginIntegrationTestProc @@ -44,105 +45,177 @@ describe('esm', () => { const res = agent.assertMessageReceived(({ headers, payload }) => { assert.strictEqual(payload.length, 23) // queue message - assert.strictEqual(payload[0][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[0][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[0][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[0][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[0][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[0][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[0][0], { + name: 'azure.servicebus.send', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1' + } + }) // queue array of messages - assert.strictEqual(payload[1][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[1][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[1][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[1][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[1][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[1][0], { + name: 'azure.servicebus.create', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1' + } + }) assert.strictEqual(payload[2][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[3][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[3][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[3][0].meta['messaging.destination.name'], 'queue.1') + assertObjectContains(payload[3][0], { + name: 'azure.servicebus.send', + meta: { + 'messaging.operation': 'send', + 'messaging.destination.name': 'queue.1' + } + }) // queue amqp messages assert.strictEqual(payload[1][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[4][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[4][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[4][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[4][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[4][0], { + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1' + } + }) assert.strictEqual(payload[5][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[6][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[6][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[6][0].meta['messaging.destination.name'], 'queue.1') + assertObjectContains(payload[6][0], { + name: 'azure.servicebus.send', + meta: { + 'messaging.operation': 'send', + 'messaging.destination.name': 'queue.1' + } + }) // topic message - assert.strictEqual(payload[7][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[7][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[7][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[7][0].meta['messaging.destination.name'], 'topic.1') - assert.strictEqual(payload[7][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[7][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[7][0], { + name: 'azure.servicebus.send', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'topic.1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1' + } + }) // topic array of messages - assert.strictEqual(payload[8][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[8][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[8][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[8][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[8][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[8][0], { + name: 'azure.servicebus.create', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1' + } + }) assert.strictEqual(payload[9][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[10][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[10][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[10][0].meta['messaging.destination.name'], 'topic.1') + assertObjectContains(payload[10][0], { + name: 'azure.servicebus.send', + meta: { + 'messaging.operation': 'send', + 'messaging.destination.name': 'topic.1' + } + }) // topic amqp messages - assert.strictEqual(payload[11][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[11][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[11][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[11][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[11][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[11][0], { + name: 'azure.servicebus.create', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1' + } + }) assert.strictEqual(payload[12][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[13][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[13][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[13][0].meta['messaging.destination.name'], 'topic.1') + assertObjectContains(payload[13][0], { + name: 'azure.servicebus.send', + meta: { + 'messaging.operation': 'send', + 'messaging.destination.name': 'topic.1' + } + }) // scheduled message - assert.strictEqual(payload[14][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[14][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[14][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[14][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[14][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[14][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[14][0], { + name: 'azure.servicebus.send', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1' + } + }) // scheduled array of messages - assert.strictEqual(payload[15][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[15][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[15][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[15][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[15][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[15][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[15][0], { + name: 'azure.servicebus.send', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1' + } + }) // scheduled amqp messages - assert.strictEqual(payload[16][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[16][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[16][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[16][0].meta['messaging.destination.name'], 'queue.1') - assert.strictEqual(payload[16][0].meta['messaging.operation'], 'send') - assert.strictEqual(payload[16][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[16][0], { + name: 'azure.servicebus.send', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.destination.name': 'queue.1', + 'messaging.operation': 'send', + 'network.destination.name': '127.0.0.1' + } + }) // queue batch - assert.strictEqual(payload[17][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[17][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[17][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[17][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[17][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[17][0], { + name: 'azure.servicebus.create', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1' + } + }) assert.strictEqual(payload[18][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[19][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[19][0].metrics['messaging.batch.message_count'], 2) - assert.strictEqual(payload[19][0].meta['messaging.destination.name'], 'queue.1') + assertObjectContains(payload[19][0], { + name: 'azure.servicebus.send', + meta: { + 'messaging.destination.name': 'queue.1' + }, + metrics: { + 'messaging.batch.message_count': 2 + } + }) assert.strictEqual(parseLinks(payload[19][0]).length, 2) // topic batch - assert.strictEqual(payload[20][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[20][0].meta['span.kind'], 'producer') - assert.strictEqual(payload[20][0].meta['messaging.system'], 'servicebus') - assert.strictEqual(payload[20][0].meta['messaging.operation'], 'create') - assert.strictEqual(payload[20][0].meta['network.destination.name'], '127.0.0.1') + assertObjectContains(payload[20][0], { + name: 'azure.servicebus.create', + meta: { + 'span.kind': 'producer', + 'messaging.system': 'servicebus', + 'messaging.operation': 'create', + 'network.destination.name': '127.0.0.1' + } + }) assert.strictEqual(payload[21][0].name, 'azure.servicebus.create') - assert.strictEqual(payload[22][0].name, 'azure.servicebus.send') - assert.strictEqual(payload[22][0].metrics['messaging.batch.message_count'], 2) - assert.strictEqual(payload[22][0].meta['messaging.destination.name'], 'topic.1') + assertObjectContains(payload[22][0], { + name: 'azure.servicebus.send', + meta: { + 'messaging.destination.name': 'topic.1' + }, + metrics: { + 'messaging.batch.message_count': 2 + } + }) assert.strictEqual(parseLinks(payload[22][0]).length, 2) }) diff --git a/packages/datadog-plugin-connect/test/index.spec.js b/packages/datadog-plugin-connect/test/index.spec.js index b9949d7bbb1..f53d551ab83 100644 --- a/packages/datadog-plugin-connect/test/index.spec.js +++ b/packages/datadog-plugin-connect/test/index.spec.js @@ -8,6 +8,7 @@ const axios = require('axios') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') +const { assertObjectContains } = require('../../../integration-tests/helpers') const { ERROR_MESSAGE, ERROR_STACK, ERROR_TYPE } = require('../../dd-trace/src/constants') const agent = require('../../dd-trace/test/plugins/agent') const { withVersions } = require('../../dd-trace/test/setup/mocha') @@ -56,15 +57,19 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'connect') - assert.strictEqual(spans[0].meta['_dd.integration'], 'connect') + assertObjectContains(spans[0], { + service: 'test', + type: 'web', + resource: 'GET /user', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'connect', + '_dd.integration': 'connect' + } + }) }) .then(done) .catch(done) @@ -382,10 +387,14 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + resource: 'GET /user', + meta: { + 'http.status_code': '500', + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -417,10 +426,14 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 0) - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['http.status_code'], '400') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 0, + resource: 'GET /user', + meta: { + 'http.status_code': '400', + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -446,12 +459,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -510,16 +527,24 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'connect') - assert.strictEqual(spans[1].error, 1) - assert.strictEqual(spans[1].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[1].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[1].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[1].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'connect' + } + }) + assertObjectContains(spans[1], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -645,14 +670,18 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].service, 'custom') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + service: 'custom', + type: 'web', + resource: 'GET /user', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -680,16 +709,24 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'connect') - assert.strictEqual(spans[1].error, 1) - assert.strictEqual(spans[1].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[1].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[1].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[1].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'connect' + } + }) + assertObjectContains(spans[1], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -715,12 +752,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -823,12 +864,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'connect' + } + }) }) .then(done) .catch(done) @@ -854,12 +899,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'connect') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'connect' + } + }) }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-elasticsearch/test/index.spec.js b/packages/datadog-plugin-elasticsearch/test/index.spec.js index 2a1c271a8d6..5bbddced596 100644 --- a/packages/datadog-plugin-elasticsearch/test/index.spec.js +++ b/packages/datadog-plugin-elasticsearch/test/index.spec.js @@ -9,6 +9,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const { breakThen, unbreakThen } = require('../../dd-trace/test/plugins/helpers') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const { expectedSchema, rawExpectedSchema } = require('./naming') +const { assertObjectContains } = require('../../../integration-tests/helpers') describe('Plugin', () => { let elasticsearch let tracer @@ -80,19 +81,25 @@ describe('Plugin', () => { it('should set the correct tags', done => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) - assert.strictEqual(traces[0][0].meta.component, 'elasticsearch') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'elasticsearch') - assert.strictEqual(traces[0][0].meta['db.type'], 'elasticsearch') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['elasticsearch.method'], 'POST') - assert.strictEqual(traces[0][0].meta['elasticsearch.url'], '/docs/_search') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') + assertObjectContains(traces[0][0], { + name: expectedSchema.outbound.opName, + service: expectedSchema.outbound.serviceName, + meta: { + component: 'elasticsearch', + '_dd.integration': 'elasticsearch', + 'db.type': 'elasticsearch', + 'span.kind': 'client', + 'elasticsearch.method': 'POST', + 'elasticsearch.url': '/docs/_search', + 'out.host': 'localhost' + } + }) if (hasCallbackSupport) { - assert.strictEqual(traces[0][0].meta['elasticsearch.body'], '{"query":{"match_all":{}}}') - assert.strictEqual(traces[0][0].meta['elasticsearch.params'], '{"sort":"name","size":100}') + assertObjectContains(traces[0][0].meta, { + 'elasticsearch.body': '{"query":{"match_all":{}}}', + 'elasticsearch.params': '{"sort":"name","size":100}' + }) } else { assert.ok('elasticsearch.body' in traces[0][0].meta) assert.strictEqual( @@ -119,13 +126,17 @@ describe('Plugin', () => { it('should set the correct tags on msearch', done => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) - assert.strictEqual(traces[0][0].meta.component, 'elasticsearch') - assert.strictEqual(traces[0][0].meta['db.type'], 'elasticsearch') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['elasticsearch.method'], 'POST') - assert.strictEqual(traces[0][0].meta['elasticsearch.url'], '/_msearch') + assertObjectContains(traces[0][0], { + name: expectedSchema.outbound.opName, + service: expectedSchema.outbound.serviceName, + meta: { + component: 'elasticsearch', + 'db.type': 'elasticsearch', + 'span.kind': 'client', + 'elasticsearch.method': 'POST', + 'elasticsearch.url': '/_msearch' + } + }) assert.ok('elasticsearch.body' in traces[0][0].meta) assert.strictEqual( traces[0][0].meta['elasticsearch.body'], @@ -212,10 +223,12 @@ describe('Plugin', () => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'elasticsearch') + assertObjectContains(traces[0][0].meta, { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'elasticsearch' + }) }) .then(done) .catch(done) @@ -269,11 +282,16 @@ describe('Plugin', () => { it('should handle errors', done => { let error - agent.assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'elasticsearch') + agent.assertFirstTraceSpan(span => { + assert.ok(error) + assertObjectContains(span, { + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'elasticsearch' + } + }) }) .then(done) .catch(done) @@ -360,14 +378,15 @@ describe('Plugin', () => { } }, hasCallbackSupport ? () => {} : undefined) - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, 'custom') - assert.strictEqual(traces[0][0].meta.component, 'elasticsearch') - assert.strictEqual(traces[0][0].meta['elasticsearch.params'], 'foo') - assert.strictEqual(traces[0][0].meta['elasticsearch.method'], 'POST') - }) + agent.assertFirstTraceSpan({ + name: expectedSchema.outbound.opName, + service: 'custom', + meta: { + component: 'elasticsearch', + 'elasticsearch.params': 'foo', + 'elasticsearch.method': 'POST' + } + }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-express/test/index.spec.js b/packages/datadog-plugin-express/test/index.spec.js index 65305889bd8..16cbc29f722 100644 --- a/packages/datadog-plugin-express/test/index.spec.js +++ b/packages/datadog-plugin-express/test/index.spec.js @@ -8,6 +8,7 @@ const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const semver = require('semver') const sinon = require('sinon') +const { assertObjectContains } = require('../../../integration-tests/helpers') const { NODE_MAJOR } = require('../../../version') const { ERROR_MESSAGE, ERROR_STACK, ERROR_TYPE } = require('../../dd-trace/src/constants') const agent = require('../../dd-trace/test/plugins/agent') @@ -24,7 +25,10 @@ describe('Plugin', () => { withVersions('express', 'express', version => { // Express.js 4.10.5 and below have a Node.js incompatibility in the `fresh` package RE res._headers missing if (semver.intersects(version, '<=4.10.5') && NODE_MAJOR >= 24) { - describe.skip(`refusing to run tests as express@${version} is incompatible with Node.js ${NODE_MAJOR}`) + describe.skip( + `refusing to run tests as express@${version} is incompatible with Node.js ${NODE_MAJOR}`, + () => {} + ) return } @@ -118,15 +122,19 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta['http.route'], '/user') + assertObjectContains(spans[0], { + service: 'test', + type: 'web', + resource: 'GET /user', + meta: { + component: 'express', + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + 'http.route': '/user' + } + }) }) .then(done) .catch(done) @@ -154,15 +162,19 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /app/user/:id') - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[0].meta['_dd.integration'], 'express') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/app/user/1`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') + assertObjectContains(spans[0], { + service: 'test', + type: 'web', + resource: 'GET /app/user/:id', + meta: { + component: 'express', + '_dd.integration': 'express', + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/app/user/1`, + 'http.method': 'GET', + 'http.status_code': '200' + } + }) }) .then(done) .catch(done) @@ -192,14 +204,18 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /app/user/:id') - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/app/user/1`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') + assertObjectContains(spans[0], { + service: 'test', + type: 'web', + resource: 'GET /app/user/:id', + meta: { + component: 'express', + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/app/user/1`, + 'http.method': 'GET', + 'http.status_code': '200' + } + }) }) .then(done) .catch(done) @@ -232,34 +248,54 @@ describe('Plugin', () => { const whichMiddleware = isExpress4 ? 'express' : 'router' const rootSpan = spans[index++] - assert.strictEqual(rootSpan.resource, 'GET /app/user/:id') - assert.strictEqual(rootSpan.name, 'express.request') - assert.strictEqual(rootSpan.meta.component, 'express') + assertObjectContains(rootSpan, { + resource: 'GET /app/user/:id', + name: 'express.request', + meta: { + component: 'express' + } + }) if (isExpress4) { - assert.strictEqual(spans[index].resource, 'query') - assert.strictEqual(spans[index].name, 'express.middleware') + assertObjectContains(spans[index], { + resource: 'query', + name: 'express.middleware', + meta: { + component: 'express' + } + }) assert.strictEqual(spans[index].parent_id.toString(), rootSpan.span_id.toString()) - assert.strictEqual(spans[index].meta.component, 'express') index++ - assert.strictEqual(spans[index].resource, 'expressInit') - assert.strictEqual(spans[index].name, 'express.middleware') + assertObjectContains(spans[index], { + resource: 'expressInit', + name: 'express.middleware', + meta: { + component: 'express' + } + }) assert.strictEqual(spans[index].parent_id.toString(), rootSpan.span_id.toString()) - assert.strictEqual(spans[index].meta.component, 'express') index++ } - assert.strictEqual(spans[index].resource, 'named') - assert.strictEqual(spans[index].name, `${whichMiddleware}.middleware`) + assertObjectContains(spans[index], { + resource: 'named', + name: `${whichMiddleware}.middleware`, + meta: { + component: whichMiddleware + } + }) assert.strictEqual(spans[index].parent_id.toString(), rootSpan.span_id.toString()) - assert.strictEqual(spans[index].meta.component, whichMiddleware) index++ - assert.strictEqual(spans[index].resource, 'router') - assert.strictEqual(spans[index].name, `${whichMiddleware}.middleware`) + assertObjectContains(spans[index], { + resource: 'router', + name: `${whichMiddleware}.middleware`, + meta: { + component: whichMiddleware + } + }) assert.strictEqual(spans[index].parent_id.toString(), rootSpan.span_id.toString()) - assert.strictEqual(spans[index].meta.component, whichMiddleware) index++ if (isExpress4) { @@ -267,15 +303,23 @@ describe('Plugin', () => { } else { assert.strictEqual(spans[index].resource, 'handle') } - assert.strictEqual(spans[index].name, `${whichMiddleware}.middleware`) + assertObjectContains(spans[index], { + name: `${whichMiddleware}.middleware`, + meta: { + component: whichMiddleware + } + }) assert.strictEqual(spans[index].parent_id.toString(), spans[index - 1].span_id.toString()) - assert.strictEqual(spans[index].meta.component, whichMiddleware) index++ - assert.strictEqual(spans[index].resource, '') - assert.strictEqual(spans[index].name, `${whichMiddleware}.middleware`) + assertObjectContains(spans[index], { + resource: '', + name: `${whichMiddleware}.middleware`, + meta: { + component: whichMiddleware + } + }) assert.strictEqual(spans[index].parent_id.toString(), spans[index - 1].span_id.toString()) - assert.strictEqual(spans[index].meta.component, whichMiddleware) assert.strictEqual(index, spans.length - 1) }) @@ -318,12 +362,20 @@ describe('Plugin', () => { ? 'express' : 'router' - assert.strictEqual(spans[0].resource, 'GET /user/:id') - assert.strictEqual(spans[0].name, 'express.request') - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[breakingSpanIndex].resource, 'breaking') - assert.strictEqual(spans[breakingSpanIndex].name, `${whichMiddleware}.middleware`) - assert.strictEqual(spans[breakingSpanIndex].meta.component, whichMiddleware) + assertObjectContains(spans[0], { + resource: 'GET /user/:id', + name: 'express.request', + meta: { + component: 'express' + } + }) + assertObjectContains(spans[breakingSpanIndex], { + resource: 'breaking', + name: `${whichMiddleware}.middleware`, + meta: { + component: whichMiddleware + } + }) }) .then(done) .catch(done) @@ -366,11 +418,19 @@ describe('Plugin', () => { ? 'express' : 'router' - assert.strictEqual(spans[0].name, 'express.request') - assert.strictEqual(spans[errorSpanIndex].name, `${whichMiddleware}.middleware`) - assert.strictEqual(spans[errorSpanIndex].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[errorSpanIndex].meta.component, whichMiddleware) + assertObjectContains(spans[0], { + name: 'express.request', + meta: { + component: 'express' + } + }) + assertObjectContains(spans[errorSpanIndex], { + name: `${whichMiddleware}.middleware`, + meta: { + [ERROR_TYPE]: error.name, + component: whichMiddleware + } + }) }) .then(done) .catch(done) @@ -546,8 +606,12 @@ describe('Plugin', () => { const spans = sort(traces[0]) assert.strictEqual(spans.filter(span => span.name === 'express.request').length, 1) - assert.strictEqual(spans[0].resource, 'GET /parent/child') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + resource: 'GET /parent/child', + meta: { + component: 'express' + } + }) }) .then(done) .catch(done) @@ -750,7 +814,7 @@ describe('Plugin', () => { }) }) - it('long regex should not steal path', done => { + it('long regex should not steal path', function (done) { const app = express() try { @@ -760,7 +824,7 @@ describe('Plugin', () => { } catch (err) { // eslint-disable-next-line no-console console.log('This version of Express (>4.0 <4.6) has broken support for regex routing. Skipping this test.') - this.skip && this.skip() // mocha allows dynamic skipping, tap does not + this.skip() return done() } @@ -786,7 +850,7 @@ describe('Plugin', () => { }) }) - it('should work with regex having flags', done => { + it('should work with regex having flags', function (done) { const app = express() try { @@ -796,7 +860,7 @@ describe('Plugin', () => { } catch (err) { // eslint-disable-next-line no-console console.log('This version of Express (>4.0 <4.6) has broken support for regex routing. Skipping this test.') - this.skip && this.skip() // mocha allows dynamic skipping, tap does not + this.skip() return done() } @@ -822,7 +886,7 @@ describe('Plugin', () => { }) }) - it('long regex child of string router should not steal path', done => { + it('long regex child of string router should not steal path', function (done) { const app = express() const router = express.Router() @@ -834,7 +898,7 @@ describe('Plugin', () => { } catch (err) { // eslint-disable-next-line no-console console.log('This version of Express (>4.0 <4.6) has broken support for regex routing. Skipping this test.') - this.skip && this.skip() // mocha allows dynamic skipping, tap does not + this.skip() return done() } @@ -1091,10 +1155,14 @@ describe('Plugin', () => { agent.assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 1, + resource: 'GET /user', + meta: { + 'http.status_code': '500', + component: 'express' + } + }) done() }) @@ -1125,10 +1193,14 @@ describe('Plugin', () => { agent.assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 0) - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['http.status_code'], '400') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 0, + resource: 'GET /user', + meta: { + 'http.status_code': '400', + component: 'express' + } + }) done() }) @@ -1154,12 +1226,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'express' + } + }) }) .then(done) .catch(done) @@ -1190,16 +1266,24 @@ describe('Plugin', () => { ? 'express' : 'router' - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[secondErrorIndex].error, 1) - assert.strictEqual(spans[secondErrorIndex].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[secondErrorIndex].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[secondErrorIndex].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[secondErrorIndex].meta.component, whichMiddleware) + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'express' + } + }) + assertObjectContains(spans[secondErrorIndex], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: whichMiddleware + } + }) }) .then(done) .catch(done) @@ -1227,16 +1311,26 @@ describe('Plugin', () => { const spans = sort(traces[0]) const secondErrorIndex = spans.length - 2 - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'express') - assert.strictEqual(spans[secondErrorIndex].error, 1) - assert.strictEqual(spans[secondErrorIndex].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[secondErrorIndex].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[secondErrorIndex].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'express' + } + }) + assertObjectContains(spans[secondErrorIndex], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack + } + }) + assertObjectContains(spans[0].meta, { + component: 'express' + }) }) .then(done) .catch(done) @@ -1249,9 +1343,9 @@ describe('Plugin', () => { }) }) - it('should support capturing groups in routes', done => { + it('should support capturing groups in routes', function (done) { if (semver.intersects(version, '>=5.0.0')) { - this.skip && this.skip() // mocha allows dynamic skipping, tap does not + this.skip() return done() } @@ -1268,8 +1362,12 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].resource, 'GET /:path(*)') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user`) + assertObjectContains(spans[0], { + resource: 'GET /:path(*)', + meta: { + 'http.url': `http://localhost:${port}/user` + } + }) }) .then(done) .catch(done) @@ -1294,8 +1392,12 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].resource, 'GET /*user') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user`) + assertObjectContains(spans[0], { + resource: 'GET /*user', + meta: { + 'http.url': `http://localhost:${port}/user` + } + }) }) .then(done) .catch(done) @@ -1372,10 +1474,14 @@ describe('Plugin', () => { agent.assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 0) - assert.strictEqual(spans[0].resource, 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '404') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 0, + resource: 'GET', + meta: { + 'http.status_code': '404', + component: 'express' + } + }) assert.ok(!('http.route' in spans[0].meta)) }).then(done).catch(done) @@ -1410,14 +1516,18 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /dd') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/dd`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + service: 'test', + type: 'web', + resource: 'GET /dd', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/dd`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'express' + } + }) }) .then(done) .catch(done) @@ -1678,10 +1788,14 @@ describe('Plugin', () => { agent.assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 1, + resource: 'GET /user', + meta: { + 'http.status_code': '500', + component: 'express' + } + }) done() }) @@ -1713,10 +1827,14 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 0) - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['http.status_code'], '400') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 0, + resource: 'GET /user', + meta: { + 'http.status_code': '400', + component: 'express' + } + }) }) .then(done) .catch(done) @@ -1743,11 +1861,15 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'express' + } + }) }) .then(done) .catch(done) @@ -1773,12 +1895,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = sort(traces[0]) - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'express') + assertObjectContains(spans[0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + 'http.status_code': '500', + component: 'express' + } + }) }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-fastify/test/tracing.spec.js b/packages/datadog-plugin-fastify/test/tracing.spec.js index f8ef0df4981..4eacd8d87a5 100644 --- a/packages/datadog-plugin-fastify/test/tracing.spec.js +++ b/packages/datadog-plugin-fastify/test/tracing.spec.js @@ -10,6 +10,7 @@ const semver = require('semver') const { ERROR_MESSAGE, ERROR_STACK, ERROR_TYPE } = require('../../dd-trace/src/constants') const agent = require('../../dd-trace/test/plugins/agent') const { withExports, withVersions } = require('../../dd-trace/test/setup/mocha') +const { assertObjectContains } = require('../../../integration-tests/helpers') const host = 'localhost' describe('Plugin', () => { @@ -58,16 +59,20 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'fastify') - assert.strictEqual(spans[0].meta['_dd.integration'], 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + service: 'test', + type: 'web', + resource: 'GET /user', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'fastify', + '_dd.integration': 'fastify' + } + }) }) .then(done) .catch(done) @@ -94,15 +99,19 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /user/:id') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user/123`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + service: 'test', + type: 'web', + resource: 'GET /user/:id', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user/123`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -128,15 +137,19 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].service, 'test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET /user/:id') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://localhost:${port}/user/123`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + service: 'test', + type: 'web', + resource: 'GET /user/:id', + meta: { + 'span.kind': 'server', + 'http.url': `http://localhost:${port}/user/123`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -275,12 +288,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + resource: 'GET /user', + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -354,13 +371,17 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + resource: 'GET /user', + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -387,10 +408,14 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].error, 0) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + resource: 'GET /user', + error: 0, + meta: { + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -418,12 +443,16 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + resource: 'GET /user', + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -452,13 +481,17 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + resource: 'GET /user', + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'fastify' + } + }) }) .then(done) .catch(done) @@ -491,7 +524,9 @@ describe('Plugin', () => { assert.ok(!(ERROR_TYPE in spans[0].meta)) assert.ok(!(ERROR_MESSAGE in spans[0].meta)) assert.ok(!(ERROR_STACK in spans[0].meta)) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0].meta, { + component: 'fastify' + }) }) .then(done) .catch(done) @@ -524,13 +559,17 @@ describe('Plugin', () => { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[0].name, 'fastify.request') - assert.strictEqual(spans[0].resource, 'GET /user') - assert.strictEqual(spans[0].error, 1) - assert.strictEqual(spans[0].meta[ERROR_TYPE], error.name) - assert.strictEqual(spans[0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(spans[0].meta[ERROR_STACK], error.stack) - assert.strictEqual(spans[0].meta.component, 'fastify') + assertObjectContains(spans[0], { + name: 'fastify.request', + resource: 'GET /user', + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'fastify' + } + }) }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-fetch/test/index.spec.js b/packages/datadog-plugin-fetch/test/index.spec.js index de67cebf267..f0052f15d89 100644 --- a/packages/datadog-plugin-fetch/test/index.spec.js +++ b/packages/datadog-plugin-fetch/test/index.spec.js @@ -10,6 +10,7 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const agent = require('../../dd-trace/test/plugins/agent') const { withNamingSchema } = require('../../dd-trace/test/setup/mocha') const { rawExpectedSchema } = require('./naming') +const { assertObjectContains } = require('../../../integration-tests/helpers') const HTTP_REQUEST_HEADERS = tags.HTTP_REQUEST_HEADERS const HTTP_RESPONSE_HEADERS = tags.HTTP_RESPONSE_HEADERS @@ -69,19 +70,20 @@ describe('Plugin', function () { res.status(200).send() }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'GET') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'GET') - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta.component, 'fetch') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'fetch') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'GET', + meta: { + 'span.kind': 'client', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'fetch', + '_dd.integration': 'fetch', + 'out.host': 'localhost' + } + }) .then(done) .catch(done) @@ -95,18 +97,19 @@ describe('Plugin', function () { res.status(200).send() }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'POST') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'POST') - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta.component, 'fetch') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'POST', + meta: { + 'span.kind': 'client', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'POST', + 'http.status_code': '200', + component: 'fetch', + 'out.host': 'localhost' + } + }) .then(done) .catch(done) @@ -120,18 +123,19 @@ describe('Plugin', function () { res.status(200).send() }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'GET') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'GET') - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta.component, 'fetch') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'GET', + meta: { + 'span.kind': 'client', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'fetch', + 'out.host': 'localhost' + } + }) .then(done) .catch(done) @@ -162,11 +166,12 @@ describe('Plugin', function () { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/user`) - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200', + 'http.url': `http://localhost:${port}/user` + } + }) .then(done) .catch(done) @@ -185,10 +190,11 @@ describe('Plugin', function () { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200' + } + }) .then(done) .catch(done) @@ -208,10 +214,11 @@ describe('Plugin', function () { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200' + } + }) .then(done) .catch(done) @@ -224,10 +231,12 @@ describe('Plugin', function () { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message || error.code) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'fetch') + assertObjectContains(traces[0][0].meta, { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message || error.code, + [ERROR_STACK]: error.stack, + component: 'fetch' + }) }) .then(done) .catch(done) @@ -651,18 +660,19 @@ describe('Plugin', function () { res.status(200).send() }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'GET') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'GET') - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta.component, 'fetch') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'GET', + meta: { + 'span.kind': 'client', + 'http.url': `http://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'fetch', + 'out.host': 'localhost' + } + }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-grpc/test/client.spec.js b/packages/datadog-plugin-grpc/test/client.spec.js index dc57c4b4df6..4d4827a79b5 100644 --- a/packages/datadog-plugin-grpc/test/client.spec.js +++ b/packages/datadog-plugin-grpc/test/client.spec.js @@ -135,16 +135,15 @@ describe('Plugin', () => { }) client.getUnary({ first: 'foobar' }, () => {}) - return agent - .assertSomeTraces(traces => { - assertObjectContains(traces[0][0].meta, { - 'network.destination.ip': '127.0.0.1', - 'network.destination.port': port.toString(), - 'rpc.service': 'test.TestService', - 'span.kind': 'client', - component: 'grpc' - }) - }) + return agent.assertFirstTraceSpan({ + meta: { + 'network.destination.ip': '127.0.0.1', + 'network.destination.port': port.toString(), + 'rpc.service': 'test.TestService', + 'span.kind': 'client', + component: 'grpc' + } + }) }) } @@ -336,19 +335,22 @@ describe('Plugin', () => { return agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].error, 1) - assertObjectContains(traces[0][0].meta, { - [ERROR_MESSAGE]: '2 UNKNOWN: foobar', - [ERROR_TYPE]: 'Error', - 'grpc.method.name': 'getUnary', - 'grpc.method.service': 'TestService', - 'grpc.method.package': 'test', - 'grpc.method.path': '/test.TestService/getUnary', - 'grpc.method.kind': 'unary', - 'rpc.service': 'test.TestService', - 'span.kind': 'client', - component: 'grpc' + assertObjectContains(traces[0][0], { + error: 1, + meta: { + [ERROR_MESSAGE]: '2 UNKNOWN: foobar', + [ERROR_TYPE]: 'Error', + 'grpc.method.name': 'getUnary', + 'grpc.method.service': 'TestService', + 'grpc.method.package': 'test', + 'grpc.method.path': '/test.TestService/getUnary', + 'grpc.method.kind': 'unary', + 'rpc.service': 'test.TestService', + 'span.kind': 'client', + component: 'grpc' + } }) + assert.ok(Object.hasOwn(traces[0][0].meta, ERROR_STACK)) assert.strictEqual(traces[0][0].metrics['grpc.status.code'], 2) }) @@ -382,18 +384,21 @@ describe('Plugin', () => { return agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].error, 1) - assertObjectContains(traces[0][0].meta, { - [ERROR_TYPE]: 'Error', - 'grpc.method.name': 'getUnary', - 'grpc.method.service': 'TestService', - 'grpc.method.package': 'test', - 'grpc.method.path': '/test.TestService/getUnary', - 'grpc.method.kind': 'unary', - 'rpc.service': 'test.TestService', - 'span.kind': 'client', - component: 'grpc' + assertObjectContains(traces[0][0], { + error: 1, + meta: { + [ERROR_TYPE]: 'Error', + 'grpc.method.name': 'getUnary', + 'grpc.method.service': 'TestService', + 'grpc.method.package': 'test', + 'grpc.method.path': '/test.TestService/getUnary', + 'grpc.method.kind': 'unary', + 'rpc.service': 'test.TestService', + 'span.kind': 'client', + component: 'grpc' + } }) + assert.ok(Object.hasOwn(traces[0][0].meta, ERROR_STACK)) assert.match(traces[0][0].meta[ERROR_MESSAGE], /^13 INTERNAL:.+$/m) assert.strictEqual(traces[0][0].metrics['grpc.status.code'], 13) @@ -560,12 +565,9 @@ describe('Plugin', () => { client.getUnary({ first: 'foobar' }, () => {}) - return agent - .assertSomeTraces(traces => { - assertObjectContains(traces[0][0], { - service: 'custom' - }) - }) + return agent.assertFirstTraceSpan({ + service: 'custom' + }) }) }) @@ -656,18 +658,17 @@ describe('Plugin', () => { client.getUnary({ first: 'foobar' }, metadata, () => {}) - return agent - .assertSomeTraces(traces => { - assertObjectContains(traces[0][0].meta, { - 'grpc.method.name': 'getUnary', - 'grpc.method.service': 'TestService', - 'grpc.method.path': '/test.TestService/getUnary', - 'grpc.method.kind': 'unary', - 'grpc.request.metadata.foo': 'bar', - 'rpc.service': 'test.TestService', - 'span.kind': 'client' - }) - }) + return agent.assertFirstTraceSpan({ + meta: { + 'grpc.method.name': 'getUnary', + 'grpc.method.service': 'TestService', + 'grpc.method.path': '/test.TestService/getUnary', + 'grpc.method.kind': 'unary', + 'grpc.request.metadata.foo': 'bar', + 'rpc.service': 'test.TestService', + 'span.kind': 'client' + } + }) }) it('should handle response metadata', async () => { diff --git a/packages/datadog-plugin-grpc/test/server.spec.js b/packages/datadog-plugin-grpc/test/server.spec.js index 49f8bf6fb70..85408d5cb98 100644 --- a/packages/datadog-plugin-grpc/test/server.spec.js +++ b/packages/datadog-plugin-grpc/test/server.spec.js @@ -420,12 +420,9 @@ describe('Plugin', () => { client.getUnary({ first: 'foobar' }, () => {}) - return agent - .assertSomeTraces(traces => { - assertObjectContains(traces[0][0], { - service: 'custom' - }) - }) + return agent.assertFirstTraceSpan({ + service: 'custom' + }) }) }) diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index e4b3e8216a4..6505278e725 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -14,6 +14,7 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const { rawExpectedSchema } = require('./naming') const { satisfies } = require('semver') +const { assertObjectContains } = require('../../../integration-tests/helpers') const HTTP_REQUEST_HEADERS = tags.HTTP_REQUEST_HEADERS const HTTP_RESPONSE_HEADERS = tags.HTTP_RESPONSE_HEADERS const NODE_MAJOR = parseInt(process.versions.node.split('.')[0]) @@ -104,19 +105,20 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'GET') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'GET') - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta.component, 'http') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'http') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'GET', + meta: { + 'span.kind': 'client', + 'http.url': `${protocol}://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '200', + component: 'http', + '_dd.integration': 'http', + 'out.host': 'localhost' + } + }) .then(done) .catch(done) @@ -159,18 +161,19 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'CONNECT') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'CONNECT') - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta.component, 'http') - assert.strictEqual(traces[0][0].meta['out.host'], 'localhost') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'CONNECT', + meta: { + 'span.kind': 'client', + 'http.url': `${protocol}://localhost:${port}/user`, + 'http.method': 'CONNECT', + 'http.status_code': '200', + component: 'http', + 'out.host': 'localhost' + } + }) .then(done) .catch(done) @@ -203,17 +206,18 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].service, SERVICE_NAME) - assert.strictEqual(traces[0][0].type, 'http') - assert.strictEqual(traces[0][0].resource, 'GET') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - assert.strictEqual(traces[0][0].meta['http.method'], 'GET') - assert.strictEqual(traces[0][0].meta['http.status_code'], '101') - assert.strictEqual(traces[0][0].meta.component, 'http') - }) + agent.assertFirstTraceSpan({ + service: SERVICE_NAME, + type: 'http', + resource: 'GET', + meta: { + 'span.kind': 'client', + 'http.url': `${protocol}://localhost:${port}/user`, + 'http.method': 'GET', + 'http.status_code': '101', + component: 'http' + } + }) .then(done) .catch(done) @@ -276,11 +280,12 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200', + 'http.url': `${protocol}://localhost:${port}/user` + } + }) .then(done) .catch(done) @@ -302,11 +307,12 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200', + 'http.url': `${protocol}://localhost:${port}/user` + } + }) .then(done) .catch(done) @@ -325,11 +331,12 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200', + 'http.url': `${protocol}://localhost:${port}/user` + } + }) .then(done) .catch(done) @@ -353,9 +360,11 @@ describe('Plugin', () => { app.get('/user', (req, res) => res.status(200).send()) - agent.assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200', + 'http.url': `${protocol}://localhost:${port}/user` + } }).then(done, done) const req = http.request(url) @@ -551,10 +560,15 @@ describe('Plugin', () => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message || error.code) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'http') + assertObjectContains(traces[0][0], { + error: 1, + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message || error.code, + [ERROR_STACK]: error.stack, + component: 'http' + } + }) }) .then(done) .catch(done) @@ -623,12 +637,17 @@ describe('Plugin', () => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].error, 1) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) + assertObjectContains(traces[0][0], { + error: 1, + meta: { + [ERROR_MESSAGE]: error.message, + [ERROR_TYPE]: error.name, + [ERROR_STACK]: error.stack, + component: 'http' + } + }) + assert.ok(!('http.status_code' in traces[0][0].meta)) - assert.strictEqual(traces[0][0].meta.component, 'http') }) .then(done) .catch(done) @@ -860,11 +879,12 @@ describe('Plugin', () => { }) appListener = server(app, port => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['http.status_code'], '200') - assert.strictEqual(traces[0][0].meta['http.url'], `${protocol}://localhost:${port}/user`) - }) + agent.assertFirstTraceSpan({ + meta: { + 'http.status_code': '200', + 'http.url': `${protocol}://localhost:${port}/user` + } + }) .then(done) .catch(done) @@ -890,9 +910,13 @@ describe('Plugin', () => { appListener = server(app, port => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][1].error, 0) - assert.strictEqual(traces[0][1].meta['http.status_code'], '200') - assert.strictEqual(traces[0][1].meta['http.url'], `${protocol}://localhost:${port}/user`) + assertObjectContains(traces[0][1], { + error: 0, + meta: { + 'http.status_code': '200', + 'http.url': `${protocol}://localhost:${port}/user` + } + }) }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-ioredis/test/index.spec.js b/packages/datadog-plugin-ioredis/test/index.spec.js index 472a7d525cc..396d3f6c109 100644 --- a/packages/datadog-plugin-ioredis/test/index.spec.js +++ b/packages/datadog-plugin-ioredis/test/index.spec.js @@ -9,6 +9,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const { breakThen, unbreakThen } = require('../../dd-trace/test/plugins/helpers') const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha') const { expectedSchema, rawExpectedSchema } = require('./naming') + describe('Plugin', () => { let Redis let redis diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index a6b02edeb52..e9560e25977 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -130,7 +130,6 @@ describe('Plugin', () => { const expectedSpanPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] - assertObjectContains(span, { name: resourceName, service: expectedSchema.send.serviceName, diff --git a/packages/datadog-plugin-langchain/test/index.spec.js b/packages/datadog-plugin-langchain/test/index.spec.js index 7f68d23cf3d..502a8f73c1e 100644 --- a/packages/datadog-plugin-langchain/test/index.spec.js +++ b/packages/datadog-plugin-langchain/test/index.spec.js @@ -5,7 +5,7 @@ const assert = require('node:assert/strict') const { expect } = require('chai') const { after, before, beforeEach, describe, it } = require('mocha') -const { useEnv } = require('../../../integration-tests/helpers') +const { assertObjectContains, useEnv } = require('../../../integration-tests/helpers') const iastFilter = require('../../dd-trace/src/appsec/iast/taint-tracking/filter') const agent = require('../../dd-trace/test/plugins/agent') const { withVersions } = require('../../dd-trace/test/setup/mocha') @@ -163,12 +163,15 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.name, 'langchain.request') - assert.strictEqual(span.resource, 'langchain.llms.openai.OpenAI') - - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-3.5-turbo-instruct') - assert.strictEqual(span.meta['langchain.request.type'], 'llm') + assertObjectContains(span, { + name: 'langchain.request', + resource: 'langchain.llms.openai.OpenAI', + meta: { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-3.5-turbo-instruct', + 'langchain.request.type': 'llm' + } + }) }) const result = await llm.generate(['what is 2 + 2?']) @@ -183,8 +186,10 @@ describe('Plugin', () => { .assertSomeTraces(traces => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-3.5-turbo-instruct') + assertObjectContains(span.meta, { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-3.5-turbo-instruct' + }) }) const llm = getLangChainOpenAiClient('llm', { model: 'gpt-3.5-turbo-instruct' }) @@ -202,8 +207,10 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-3.5-turbo-instruct') + assertObjectContains(span.meta, { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-3.5-turbo-instruct' + }) }) const llm = getLangChainOpenAiClient('llm', { model: 'gpt-3.5-turbo-instruct', n: 2 }) @@ -247,12 +254,15 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.name, 'langchain.request') - assert.strictEqual(span.resource, 'langchain.chat_models.openai.ChatOpenAI') - - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-4') - assert.strictEqual(span.meta['langchain.request.type'], 'chat_model') + assertObjectContains(span, { + name: 'langchain.request', + resource: 'langchain.chat_models.openai.ChatOpenAI', + meta: { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-4', + 'langchain.request.type': 'chat_model' + } + }) }) const chatModel = getLangChainOpenAiClient('chat', { model: 'gpt-4' }) @@ -269,8 +279,10 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-4') + assertObjectContains(span.meta, { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-4' + }) }) const chatModel = getLangChainOpenAiClient('chat', { model: 'gpt-4' }) @@ -291,8 +303,10 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-4') + assertObjectContains(span.meta, { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-4' + }) }) const chatModel = getLangChainOpenAiClient('chat', { model: 'gpt-4' }) @@ -313,8 +327,10 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'gpt-4') + assertObjectContains(span.meta, { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'gpt-4' + }) }) const tools = [ @@ -351,12 +367,16 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.name, 'langchain.request') - assert.strictEqual(span.resource, 'langchain.chat_models.anthropic.ChatAnthropic') + assertObjectContains(span, { + name: 'langchain.request', + resource: 'langchain.chat_models.anthropic.ChatAnthropic', + meta: { + 'langchain.request.provider': 'anthropic', + 'langchain.request.type': 'chat_model' + } + }) - assert.strictEqual(span.meta['langchain.request.provider'], 'anthropic') assert.ok(Object.hasOwn(span.meta, 'langchain.request.model')) - assert.strictEqual(span.meta['langchain.request.type'], 'chat_model') }) const chatModel = getLangChainAnthropicClient('chat', { modelName: 'claude-3-5-sonnet-20241022' }) @@ -409,10 +429,13 @@ describe('Plugin', () => { // we already check the chat model span in previous tests assert.strictEqual(spans[1].resource, 'langchain.chat_models.openai.ChatOpenAI') - assert.strictEqual(chainSpan.name, 'langchain.request') - assert.strictEqual(chainSpan.resource, 'langchain_core.runnables.RunnableSequence') - - assert.strictEqual(chainSpan.meta['langchain.request.type'], 'chain') + assertObjectContains(chainSpan, { + name: 'langchain.request', + resource: 'langchain_core.runnables.RunnableSequence', + meta: { + 'langchain.request.type': 'chain' + } + }) }) const model = getLangChainOpenAiClient('chat', { model: 'gpt-4' }) @@ -458,7 +481,9 @@ describe('Plugin', () => { // we already check the chat model span in previous tests assert.strictEqual(spans[1].resource, 'langchain.chat_models.openai.ChatOpenAI') - assert.strictEqual(chainSpan.meta['langchain.request.type'], 'chain') + assertObjectContains(chainSpan.meta, { + 'langchain.request.type': 'chain' + }) }) const result = await chain.invoke({ topic: 'chickens', style: 'dad joke' }) @@ -491,7 +516,9 @@ describe('Plugin', () => { const chainSpan = spans[0] - assert.strictEqual(chainSpan.meta['langchain.request.type'], 'chain') + assertObjectContains(chainSpan.meta, { + 'langchain.request.type': 'chain' + }) }) const result = await chain.batch(['chickens', 'dogs']) @@ -513,7 +540,9 @@ describe('Plugin', () => { const chainSpan = spans[0] - assert.strictEqual(chainSpan.meta['langchain.request.type'], 'chain') + assertObjectContains(chainSpan.meta, { + 'langchain.request.type': 'chain' + }) }) const parser = new langchainOutputParsers.JsonOutputParser() @@ -561,12 +590,15 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.name, 'langchain.request') - assert.strictEqual(span.resource, 'langchain.embeddings.openai.OpenAIEmbeddings') - - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'text-embedding-ada-002') - assert.strictEqual(span.meta['langchain.request.type'], 'embedding') + assertObjectContains(span, { + name: 'langchain.request', + resource: 'langchain.embeddings.openai.OpenAIEmbeddings', + meta: { + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'text-embedding-ada-002', + 'langchain.request.type': 'embedding' + } + }) }) const query = 'Hello, world!' @@ -583,9 +615,11 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.meta['langchain.request.type'], 'embedding') - assert.strictEqual(span.meta['langchain.request.provider'], 'openai') - assert.strictEqual(span.meta['langchain.request.model'], 'text-embedding-ada-002') + assertObjectContains(span.meta, { + 'langchain.request.type': 'embedding', + 'langchain.request.provider': 'openai', + 'langchain.request.model': 'text-embedding-ada-002' + }) }) const embeddings = getLangChainOpenAiClient('embedding') @@ -616,12 +650,15 @@ describe('Plugin', () => { assert.strictEqual(traces[0].length, 1) const span = traces[0][0] - assert.strictEqual(span.name, 'langchain.request') - assert.strictEqual(span.resource, 'langchain.embeddings.GoogleGenerativeAIEmbeddings') - - assert.strictEqual(span.meta['langchain.request.provider'], 'googlegenerativeai') - assert.strictEqual(span.meta['langchain.request.model'], 'text-embedding-004') - assert.strictEqual(span.meta['langchain.request.type'], 'embedding') + assertObjectContains(span, { + name: 'langchain.request', + resource: 'langchain.embeddings.GoogleGenerativeAIEmbeddings', + meta: { + 'langchain.request.provider': 'googlegenerativeai', + 'langchain.request.model': 'text-embedding-004', + 'langchain.request.type': 'embedding' + } + }) }) const query = 'Hello, world!' diff --git a/packages/datadog-plugin-mariadb/test/index.spec.js b/packages/datadog-plugin-mariadb/test/index.spec.js index 07ce63d182c..e70b8b0c0fc 100644 --- a/packages/datadog-plugin-mariadb/test/index.spec.js +++ b/packages/datadog-plugin-mariadb/test/index.spec.js @@ -10,8 +10,8 @@ const semver = require('semver') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const agent = require('../../dd-trace/test/plugins/agent') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') - const { expectedSchema, rawExpectedSchema } = require('./naming') +const { assertObjectContains } = require('../../../integration-tests/helpers') // https://github.com/mariadb-corporation/mariadb-connector-nodejs/commit/0a90b71ab20ab4e8b6a86a77ba291bba8ba6a34e const range = semver.gte(process.version, '15.0.0') ? '>=2.5.1' : '>=2' @@ -105,20 +105,20 @@ describe('Plugin', () => { }) it('should do automatic instrumentation', done => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) - assert.strictEqual(traces[0][0].resource, 'SELECT 1 + 1 AS solution') - assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'db') - assert.strictEqual(traces[0][0].meta['db.user'], 'root') - assert.strictEqual(traces[0][0].meta['db.type'], 'mariadb') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta.component, 'mariadb') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'mariadb') - }) + agent.assertFirstTraceSpan({ + name: expectedSchema.outbound.opName, + service: expectedSchema.outbound.serviceName, + resource: 'SELECT 1 + 1 AS solution', + type: 'sql', + meta: { + 'span.kind': 'client', + 'db.name': 'db', + 'db.user': 'root', + 'db.type': 'mariadb', + component: 'mariadb', + '_dd.integration': 'mariadb' + } + }) .then(done) .catch(done) @@ -129,19 +129,19 @@ describe('Plugin', () => { if (semver.intersects(version, '>=3')) { it('should support prepared statement shorthand', done => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) - assert.strictEqual(traces[0][0].resource, 'SELECT ? + ? AS solution') - assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'db') - assert.strictEqual(traces[0][0].meta['db.user'], 'root') - assert.strictEqual(traces[0][0].meta['db.type'], 'mariadb') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta.component, 'mariadb') - }) + agent.assertFirstTraceSpan({ + name: expectedSchema.outbound.opName, + service: expectedSchema.outbound.serviceName, + resource: 'SELECT ? + ? AS solution', + type: 'sql', + meta: { + 'span.kind': 'client', + 'db.name': 'db', + 'db.user': 'root', + 'db.type': 'mariadb', + component: 'mariadb' + } + }) .then(done) .catch(done) @@ -151,19 +151,19 @@ describe('Plugin', () => { }) it('should support prepared statements', done => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) - assert.strictEqual(traces[0][0].resource, 'SELECT ? + ? AS solution') - assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'db') - assert.strictEqual(traces[0][0].meta['db.user'], 'root') - assert.strictEqual(traces[0][0].meta['db.type'], 'mariadb') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta.component, 'mariadb') - }) + agent.assertFirstTraceSpan({ + name: expectedSchema.outbound.opName, + service: expectedSchema.outbound.serviceName, + resource: 'SELECT ? + ? AS solution', + type: 'sql', + meta: { + 'span.kind': 'client', + 'db.name': 'db', + 'db.user': 'root', + 'db.type': 'mariadb', + component: 'mariadb' + } + }) .then(done) .catch(done) @@ -184,10 +184,12 @@ describe('Plugin', () => { agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'mariadb') + assertObjectContains(traces[0][0].meta, { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'mariadb' + }) }) .then(done) .catch(done) @@ -344,18 +346,18 @@ describe('Plugin', () => { }) it('should do automatic instrumentation', done => { - agent - .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].name, expectedSchema.outbound.opName) - assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) - assert.strictEqual(traces[0][0].resource, 'SELECT 1 + 1 AS solution') - assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.user'], 'root') - assert.strictEqual(traces[0][0].meta['db.type'], 'mariadb') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta.component, 'mariadb') - }) + agent.assertFirstTraceSpan({ + name: expectedSchema.outbound.opName, + service: expectedSchema.outbound.serviceName, + resource: 'SELECT 1 + 1 AS solution', + type: 'sql', + meta: { + 'span.kind': 'client', + 'db.user': 'root', + 'db.type': 'mariadb', + component: 'mariadb' + } + }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index 2c5e2c114c4..87a69a703af 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -7,6 +7,7 @@ const axios = require('axios') const { after, before, describe, it } = require('mocha') const { satisfies } = require('semver') +const { assertObjectContains } = require('../../../integration-tests/helpers') const path = require('node:path') const { execSync, spawn } = require('node:child_process') const { writeFileSync, readdirSync } = require('node:fs') @@ -169,15 +170,19 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET /api/hello/[name]') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta.component, 'next') - assert.strictEqual(spans[1].meta['_dd.integration'], 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + resource: 'GET /api/hello/[name]', + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '200', + component: 'next', + '_dd.integration': 'next' + } + }) }) .then(done) .catch(done) @@ -224,13 +229,17 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '404') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '404', + component: 'next' + } + }) }) .then(done) .catch(done) @@ -245,14 +254,18 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET /_error') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '400') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + resource: 'GET /_error', + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '400', + component: 'next' + } + }) }) .then(done) .catch(done) @@ -286,10 +299,14 @@ describe('Plugin', function () { const nextRequestSpan = spans.find(span => span.name === 'next.request') assert.ok(nextRequestSpan, 'next.request span should exist') - assert.strictEqual(nextRequestSpan.resource, 'GET /api/hello/[name]') - assert.strictEqual(nextRequestSpan.meta['next.page'], '/api/hello/[name]') - assert.strictEqual(nextRequestSpan.meta['http.method'], 'GET') - assert.strictEqual(nextRequestSpan.meta['http.status_code'], '200') + assertObjectContains(nextRequestSpan, { + resource: 'GET /api/hello/[name]', + meta: { + 'next.page': '/api/hello/[name]', + 'http.method': 'GET', + 'http.status_code': '200' + } + }) const webRequestSpan = spans.find(span => span.name === 'web.request') assert.ok(webRequestSpan, 'web.request span should exist') @@ -314,14 +331,18 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET /hello/[name]') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + resource: 'GET /hello/[name]', + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '200', + component: 'next' + } + }) }) .then(done) .catch(done) @@ -344,8 +365,12 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].resource, `GET ${expectedPath}`) - assert.strictEqual(spans[1].meta['http.status_code'], `${statusCode || 200}`) + assertObjectContains(spans[1], { + resource: `GET ${expectedPath}`, + meta: { + 'http.status_code': `${statusCode || 200}` + } + }) }) .then(done) .catch(done) @@ -359,13 +384,17 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '404') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '404', + component: 'next' + } + }) }) .then(done) .catch(done) @@ -396,12 +425,16 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].error, 1) + assertObjectContains(spans[1], { + name: 'next.request', + error: 1, + meta: { + 'http.status_code': '500', + 'error.message': 'fail', + 'error.type': 'Error' + } + }) - assert.strictEqual(spans[1].meta['http.status_code'], '500') - assert.strictEqual(spans[1].meta['error.message'], 'fail') - assert.strictEqual(spans[1].meta['error.type'], 'Error') assert.ok(spans[1].meta['error.stack'] != null) }) .then(done) @@ -417,14 +450,18 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET /public/*') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + resource: 'GET /public/*', + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '200', + component: 'next' + } + }) }) return Promise.all([axios.get(`http://127.0.0.1:${port}/test.txt`), tracingPromise]) @@ -438,11 +475,15 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].resource, 'GET /_next/static/*') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + resource: 'GET /_next/static/*', + meta: { + 'http.method': 'GET', + 'http.status_code': '200', + component: 'next' + } + }) }) return Promise.all([axios.get(`http://127.0.0.1:${port}/_next/static/chunks/${file}`), tracingPromise]) @@ -520,20 +561,24 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET /api/hello/[name]') - assert.strictEqual(spans[1].error, 1) - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta.foo, 'bar') - assert.strictEqual(spans[1].meta.req, 'IncomingMessage') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + resource: 'GET /api/hello/[name]', + error: 1, + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '200', + foo: 'bar', + req: 'IncomingMessage', + component: 'next', + times_hook_called: '1' + } + }) // assert request hook was only called once across the whole request - assert.strictEqual(spans[1].meta.times_hook_called, '1') }) .then(done) .catch(done) @@ -549,11 +594,15 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].error, 1) + assertObjectContains(spans[1], { + name: 'next.request', + error: 1, + meta: { + 'error.message': 'error in app dir api route', + 'error.type': 'Error' + } + }) - assert.strictEqual(spans[1].meta['error.message'], 'error in app dir api route') - assert.strictEqual(spans[1].meta['error.type'], 'Error') assert.ok(spans[1].meta['error.stack'] != null) }) .then(done) @@ -589,14 +638,18 @@ describe('Plugin', function () { .assertSomeTraces(traces => { const spans = traces[0] - assert.strictEqual(spans[1].name, 'next.request') - assert.strictEqual(spans[1].service, 'test') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, expectedResource) - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta.component, 'next') + assertObjectContains(spans[1], { + name: 'next.request', + service: 'test', + type: 'web', + resource: expectedResource, + meta: { + 'span.kind': 'server', + 'http.method': 'GET', + 'http.status_code': '200', + component: 'next' + } + }) }) return Promise.all([axios.get(`http://127.0.0.1:${port}${resource}`), promise]) diff --git a/packages/datadog-plugin-openai/test/index.spec.js b/packages/datadog-plugin-openai/test/index.spec.js index 4c652562443..49ef23f434a 100644 --- a/packages/datadog-plugin-openai/test/index.spec.js +++ b/packages/datadog-plugin-openai/test/index.spec.js @@ -8,7 +8,7 @@ const Path = require('path') const semver = require('semver') const sinon = require('sinon') -const { useEnv } = require('../../../integration-tests/helpers') +const { assertObjectContains, useEnv } = require('../../../integration-tests/helpers') const { DogStatsDClient } = require('../../dd-trace/src/dogstatsd') const { NoopExternalLogger } = require('../../dd-trace/src/external-logger/src') const Sampler = require('../../dd-trace/src/sampler') @@ -116,10 +116,14 @@ describe('Plugin', () => { it('should attach an error to the span', async () => { const checkTraces = agent .assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].error, 1) + assertObjectContains(traces[0][0], { + error: 1, + meta: { + 'error.type': 'Error' + } + }) // the message content differs on OpenAI version, even between patches assert.ok(traces[0][0].meta['error.message'] != null) - assert.strictEqual(traces[0][0].meta['error.type'], 'Error') assert.ok(traces[0][0].meta['error.stack'] != null) }) @@ -234,14 +238,17 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createCompletion') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/completions') - - assert.strictEqual(traces[0][0].meta.component, 'openai') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'openai') - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'gpt-3.5-turbo-instruct') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/completions', + component: 'openai', + '_dd.integration': 'openai', + 'openai.request.model': 'gpt-3.5-turbo-instruct' + } + }) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.model')) }) @@ -397,11 +404,14 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createEmbedding') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/embeddings') - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') - - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'text-embedding-ada-002') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.endpoint': '/vcr/openai/embeddings', + 'openai.request.method': 'POST', + 'openai.request.model': 'text-embedding-ada-002' + } + }) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.model')) }) @@ -434,9 +444,13 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'listModels') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/models') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/vcr/openai/models' + } + }) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.count')) }) @@ -464,12 +478,16 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'retrieveModel') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') // TODO: this might be a bug... - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/models/*') - assert.strictEqual(traces[0][0].meta['openai.request.id'], 'gpt-4') - assert.strictEqual(traces[0][0].meta['openai.response.owned_by'], 'openai') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/v1/models/*', + 'openai.request.id': 'gpt-4', + 'openai.response.owned_by': 'openai' + } + }) }) if (semver.satisfies(realVersion, '>=4.0.0')) { @@ -496,16 +514,18 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'deleteModel') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'DELETE') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/models/*') - - assert.strictEqual(traces[0][0].metrics['openai.response.deleted'], 1) + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'DELETE', + 'openai.request.endpoint': '/v1/models/*', + 'openai.response.id': 'ft:gpt-4.1-mini-2025-04-14:datadog-staging::BkaILRSh' + }, + metrics: { + 'openai.response.deleted': 1 + } + }) assert.ok('openai.response.id' in traces[0][0].meta) - assert.strictEqual( - traces[0][0].meta['openai.response.id'], - 'ft:gpt-4.1-mini-2025-04-14:datadog-staging::BkaILRSh' - ) }) if (semver.satisfies(realVersion, '>=4.0.0')) { @@ -532,10 +552,13 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'listFiles') } - assert.strictEqual(traces[0][0].error, 0) - - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/files') - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.endpoint': '/vcr/openai/files', + 'openai.request.method': 'GET' + } + }) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.count')) }) @@ -568,16 +591,19 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createFile') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/files') - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') - - assert.strictEqual(traces[0][0].meta['openai.request.filename'], 'fine-tune.jsonl') - assert.strictEqual(traces[0][0].meta['openai.request.purpose'], 'fine-tune') - assert.strictEqual(traces[0][0].meta['openai.response.purpose'], 'fine-tune') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.endpoint': '/vcr/openai/files', + 'openai.request.method': 'POST', + 'openai.request.filename': 'fine-tune.jsonl', + 'openai.request.purpose': 'fine-tune', + 'openai.response.purpose': 'fine-tune', + 'openai.response.filename': 'fine-tune.jsonl' + } + }) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.status')) assert.match(traces[0][0].meta['openai.response.id'], /^file-/) - assert.strictEqual(traces[0][0].meta['openai.response.filename'], 'fine-tune.jsonl') assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.bytes')) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.created_at')) }) @@ -609,13 +635,16 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'retrieveFile') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/files/*') - - assert.strictEqual(traces[0][0].meta['openai.response.filename'], 'fine-tune.jsonl') - assert.strictEqual(traces[0][0].meta['openai.response.id'], 'file-RpTpuvRVtnKpdKZb7DDGto') - assert.strictEqual(traces[0][0].meta['openai.response.purpose'], 'fine-tune') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/v1/files/*', + 'openai.response.filename': 'fine-tune.jsonl', + 'openai.response.id': 'file-RpTpuvRVtnKpdKZb7DDGto', + 'openai.response.purpose': 'fine-tune' + } + }) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.status')) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.bytes')) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.created_at')) @@ -646,9 +675,13 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'downloadFile') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/files/*/content') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/v1/files/*/content' + } + }) }) if (semver.satisfies(realVersion, '>=4.0.0 < 4.17.1')) { @@ -679,11 +712,14 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'deleteFile') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'DELETE') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/files/*') - - assert.strictEqual(traces[0][0].meta['openai.response.id'], 'file-RpTpuvRVtnKpdKZb7DDGto') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'DELETE', + 'openai.request.endpoint': '/v1/files/*', + 'openai.response.id': 'file-RpTpuvRVtnKpdKZb7DDGto' + } + }) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.deleted')) }) @@ -716,14 +752,17 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createFineTune') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/fine_tuning/jobs') - - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'gpt-4.1-mini-2025-04-14') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/fine_tuning/jobs', + 'openai.request.model': 'gpt-4.1-mini-2025-04-14', + 'openai.response.model': 'gpt-4.1-mini-2025-04-14' + } + }) assert.match(traces[0][0].meta['openai.response.id'], /^ftjob-/) - assert.strictEqual(traces[0][0].meta['openai.response.model'], 'gpt-4.1-mini-2025-04-14') assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.created_at')) }) @@ -752,11 +791,14 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'retrieveFineTune') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/fine_tuning/jobs/*') - - assert.strictEqual(traces[0][0].meta['openai.response.id'], 'ftjob-q9CUUUsHJemGUVQ1Ecc01zcf') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/v1/fine_tuning/jobs/*', + 'openai.response.id': 'ftjob-q9CUUUsHJemGUVQ1Ecc01zcf' + } + }) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.model')) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.created_at')) }) @@ -782,10 +824,14 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].resource, 'cancelFineTune') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/fine_tuning/jobs/*/cancel') - assert.strictEqual(traces[0][0].meta['openai.response.id'], 'ftjob-q9CUUUsHJemGUVQ1Ecc01zcf') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/v1/fine_tuning/jobs/*/cancel', + 'openai.response.id': 'ftjob-q9CUUUsHJemGUVQ1Ecc01zcf' + } + }) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.created_at')) }) @@ -810,9 +856,13 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].resource, 'listFineTuneEvents') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/v1/fine_tuning/jobs/*/events') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/v1/fine_tuning/jobs/*/events' + } + }) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.count')) }) @@ -837,10 +887,14 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'listFineTunes') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'GET') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/fine_tuning/jobs') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'GET', + 'openai.request.endpoint': '/vcr/openai/fine_tuning/jobs' + } + }) assert.ok(Object.hasOwn(traces[0][0].metrics, 'openai.response.count')) }) @@ -865,9 +919,13 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createModeration') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/moderations') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/moderations' + } + }) assert.match(traces[0][0].meta['openai.response.id'], /^modr-/) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.model')) @@ -905,11 +963,15 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createImage') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/images/generations') - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'dall-e-3') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/images/generations', + 'openai.request.model': 'dall-e-3' + } + }) }) if (semver.satisfies(realVersion, '>=4.0.0')) { @@ -966,10 +1028,14 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createImageEdit') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/images/edits') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/images/edits' + } + }) // TODO(sabrenner): fix in a follow-up (super simple - img.name) }) @@ -1009,10 +1075,14 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createImageVariation') } - assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/images/variations') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/images/variations' + } + }) }) if (semver.satisfies(realVersion, '>=4.0.0')) { @@ -1053,12 +1123,15 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createTranscription') } - assert.strictEqual(traces[0][0].error, 0) - assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/audio/transcriptions') - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'gpt-4o-mini-transcribe') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.endpoint': '/vcr/openai/audio/transcriptions', + 'openai.request.method': 'POST', + 'openai.request.model': 'gpt-4o-mini-transcribe' + } + }) }) const result = await openai.audio.transcriptions.create({ @@ -1095,12 +1168,15 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createTranslation') } - assert.strictEqual(traces[0][0].error, 0) - assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/audio/translations') - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'whisper-1') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.endpoint': '/vcr/openai/audio/translations', + 'openai.request.method': 'POST', + 'openai.request.model': 'whisper-1' + } + }) }) if (semver.satisfies(realVersion, '>=4.0.0')) { @@ -1146,13 +1222,15 @@ describe('Plugin', () => { } else { assert.strictEqual(traces[0][0].resource, 'createChatCompletion') } - assert.strictEqual(traces[0][0].error, 0) - - assert.strictEqual(traces[0][0].meta['openai.request.method'], 'POST') assert.ok('openai.request.endpoint' in traces[0][0].meta) - assert.strictEqual(traces[0][0].meta['openai.request.endpoint'], '/vcr/openai/chat/completions') - - assert.strictEqual(traces[0][0].meta['openai.request.model'], 'gpt-3.5-turbo') + assertObjectContains(traces[0][0], { + error: 0, + meta: { + 'openai.request.method': 'POST', + 'openai.request.endpoint': '/vcr/openai/chat/completions', + 'openai.request.model': 'gpt-3.5-turbo' + } + }) assert.ok(Object.hasOwn(traces[0][0].meta, 'openai.response.model')) }) diff --git a/packages/datadog-plugin-pg/test/index.spec.js b/packages/datadog-plugin-pg/test/index.spec.js index 61c2b48b93e..a2121ef7878 100644 --- a/packages/datadog-plugin-pg/test/index.spec.js +++ b/packages/datadog-plugin-pg/test/index.spec.js @@ -11,6 +11,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const { expectedSchema, rawExpectedSchema } = require('./naming') const ddpv = require('mocha/package.json').version +const { assertObjectContains } = require('../../../integration-tests/helpers') const clients = { pg: pg => pg.Client @@ -71,13 +72,19 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) assert.strictEqual(traces[0][0].resource, 'SELECT $1::text as message') assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.user'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.type'], 'postgres') - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].meta['_dd.integration'], 'pg') - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0], { + meta: { + 'span.kind': 'client', + 'db.name': 'postgres', + 'db.user': 'postgres', + 'db.type': 'postgres', + component: 'pg', + '_dd.integration': 'pg' + }, + metrics: { + 'network.destination.port': 5432 + } + }) if (implementation !== 'pg.native') { assert.ok(Object.hasOwn(traces[0][0].metrics, 'db.pid')) @@ -119,12 +126,18 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) assert.strictEqual(traces[0][0].resource, 'SELECT $1::text as message') assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.user'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.type'], 'postgres') - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0], { + meta: { + 'span.kind': 'client', + 'db.name': 'postgres', + 'db.user': 'postgres', + 'db.type': 'postgres', + component: 'pg' + }, + metrics: { + 'network.destination.port': 5432 + } + }) if (implementation !== 'pg.native') { assert.ok(Object.hasOwn(traces[0][0].metrics, 'db.pid')) @@ -143,11 +156,17 @@ describe('Plugin', () => { let error agent.assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) - assert.strictEqual(traces[0][0].meta[ERROR_STACK], error.stack) - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0], { + meta: { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + [ERROR_STACK]: error.stack, + component: 'pg' + }, + metrics: { + 'network.destination.port': 5432 + } + }) }) .then(done) .catch(done) @@ -165,16 +184,19 @@ describe('Plugin', () => { let error agent.assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta[ERROR_TYPE], error.name) - assert.strictEqual(traces[0][0].meta[ERROR_MESSAGE], error.message) + assertObjectContains(traces[0][0].meta, { + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message, + component: 'pg' + }) // pg modifies stacktraces as of v8.11.1 const actualErrorNoStack = traces[0][0].meta[ERROR_STACK].split('\n')[0] const expectedErrorNoStack = error.stack.split('\n')[0] assert.deepStrictEqual(actualErrorNoStack, expectedErrorNoStack) - - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0].metrics, { + 'network.destination.port': 5432 + }) }) .then(done) .catch(done) @@ -234,12 +256,18 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].service, expectedSchema.outbound.serviceName) assert.strictEqual(traces[0][0].resource, 'SELECT * FROM generate_series(0, 1) num') assert.strictEqual(traces[0][0].type, 'sql') - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.type'], 'postgres') - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].metrics['db.stream'], 1) - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0], { + meta: { + 'span.kind': 'client', + 'db.name': 'postgres', + 'db.type': 'postgres', + component: 'pg' + }, + metrics: { + 'db.stream': 1, + 'network.destination.port': 5432 + } + }) }) const cursor = client.query(new Cursor('SELECT * FROM generate_series(0, 1) num')) @@ -265,12 +293,18 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].resource, 'SELECT * FROM generate_series(0, 1) num') assert.strictEqual(traces[0][0].type, 'sql') assert.strictEqual(traces[0][0].error, 0) - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.type'], 'postgres') - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].metrics['db.stream'], 1) - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0], { + meta: { + 'span.kind': 'client', + 'db.name': 'postgres', + 'db.type': 'postgres', + component: 'pg' + }, + metrics: { + 'db.stream': 1, + 'network.destination.port': 5432 + } + }) }) const query = new QueryStream('SELECT * FROM generate_series(0, 1) num', []) @@ -294,12 +328,18 @@ describe('Plugin', () => { assert.strictEqual(traces[0][0].resource, 'SELECT * FROM generate_series(0, 1) num') assert.strictEqual(traces[0][0].type, 'sql') assert.strictEqual(traces[0][0].error, 1) - assert.strictEqual(traces[0][0].meta['span.kind'], 'client') - assert.strictEqual(traces[0][0].meta['db.name'], 'postgres') - assert.strictEqual(traces[0][0].meta['db.type'], 'postgres') - assert.strictEqual(traces[0][0].meta.component, 'pg') - assert.strictEqual(traces[0][0].metrics['db.stream'], 1) - assert.strictEqual(traces[0][0].metrics['network.destination.port'], 5432) + assertObjectContains(traces[0][0], { + meta: { + 'span.kind': 'client', + 'db.name': 'postgres', + 'db.type': 'postgres', + component: 'pg' + }, + metrics: { + 'db.stream': 1, + 'network.destination.port': 5432 + } + }) }) const query = new QueryStream('SELECT * FROM generate_series(0, 1) num', []) @@ -609,7 +649,9 @@ describe('Plugin', () => { it('query should inject _dd.dbm_trace_injected into span', done => { agent.assertSomeTraces(traces => { - assert.strictEqual(traces[0][0].meta['_dd.dbm_trace_injected'], 'true') + assertObjectContains(traces[0][0].meta, { + '_dd.dbm_trace_injected': 'true' + }) done() }) diff --git a/packages/datadog-plugin-rhea/test/index.spec.js b/packages/datadog-plugin-rhea/test/index.spec.js index 9f9c1727b5c..dec4cd77274 100644 --- a/packages/datadog-plugin-rhea/test/index.spec.js +++ b/packages/datadog-plugin-rhea/test/index.spec.js @@ -449,13 +449,16 @@ describe('Plugin', () => { agent.assertSomeTraces(traces => { const span = traces[0][0] - assert.strictEqual(span.error, 1) - assertObjectContains(span.meta, { - [ERROR_MESSAGE]: 'this is an error', - [ERROR_TYPE]: 'Error', - [ERROR_STACK]: error.stack, - component: 'rhea' + assertObjectContains(span, { + error: 1, + meta: { + [ERROR_MESSAGE]: 'this is an error', + [ERROR_TYPE]: 'Error', + [ERROR_STACK]: error.stack, + component: 'rhea' + } }) + Session.prototype.on_transfer = onTransfer }).then(done, done) diff --git a/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js b/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js index 1e4b9f6a521..79380fcbf27 100644 --- a/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js +++ b/packages/dd-trace/test/plugins/util/inferred_proxy.spec.js @@ -8,6 +8,7 @@ const { Agent } = require('node:http') require('../../setup/core') const agent = require('../agent') +const { assertObjectContains } = require('../../../../../integration-tests/helpers') // Create axios instance with no connection pooling const httpClient = axios.create({ @@ -126,26 +127,35 @@ describe('Inferred Proxy Spans', function () { assert.strictEqual(spans[0].service, 'example.com') assert.strictEqual(spans[0].resource, 'GET /test') assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].meta['http.url'], 'example.com/test') - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta.component, 'aws-apigateway') - assert.strictEqual(spans[0].meta['_dd.integration'], 'aws-apigateway') - assert.strictEqual(spans[0].metrics['_dd.inferred_span'], 1) + assertObjectContains(spans[0], { + meta: { + 'http.url': 'example.com/test', + 'http.method': 'GET', + 'http.status_code': '200', + component: 'aws-apigateway', + '_dd.integration': 'aws-apigateway' + }, + metrics: { + '_dd.inferred_span': 1 + } + }) assert.strictEqual(spans[0].start.toString(), '1729780025472999936') assert.strictEqual(spans[0].span_id.toString(), spans[1].parent_id.toString()) - assert.strictEqual(spans[1].name, 'web.request') - assert.strictEqual(spans[1].service, 'aws-server') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET') - assert.strictEqual(spans[1].meta.component, 'http') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.url'], `http://127.0.0.1:${port}/`) - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '200') - assert.strictEqual(spans[1].meta['span.kind'], 'server') + assertObjectContains(spans[1], { + name: 'web.request', + service: 'aws-server', + type: 'web', + resource: 'GET', + meta: { + component: 'http', + 'span.kind': 'server', + 'http.url': `http://127.0.0.1:${port}/`, + 'http.method': 'GET', + 'http.status_code': '200' + } + }) }) }) @@ -163,28 +173,37 @@ describe('Inferred Proxy Spans', function () { const spans = traces[0] assert.strictEqual(spans.length, 2) - assert.strictEqual(spans[0].name, 'aws.apigateway') - assert.strictEqual(spans[0].service, 'example.com') - assert.strictEqual(spans[0].resource, 'GET /test') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].meta['http.url'], 'example.com/test') - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '500') - assert.strictEqual(spans[0].meta.component, 'aws-apigateway') + assertObjectContains(spans[0], { + name: 'aws.apigateway', + service: 'example.com', + resource: 'GET /test', + type: 'web', + meta: { + 'http.url': 'example.com/test', + 'http.method': 'GET', + 'http.status_code': '500', + component: 'aws-apigateway' + } + }) + assert.strictEqual(spans[0].error, 1) assert.strictEqual(spans[0].start.toString(), '1729780025472999936') assert.strictEqual(spans[0].span_id.toString(), spans[1].parent_id.toString()) - assert.strictEqual(spans[1].name, 'web.request') - assert.strictEqual(spans[1].service, 'aws-server') - assert.strictEqual(spans[1].type, 'web') - assert.strictEqual(spans[1].resource, 'GET') - assert.strictEqual(spans[1].meta.component, 'http') - assert.strictEqual(spans[1].meta['span.kind'], 'server') - assert.strictEqual(spans[1].meta['http.url'], `http://127.0.0.1:${port}/error`) - assert.strictEqual(spans[1].meta['http.method'], 'GET') - assert.strictEqual(spans[1].meta['http.status_code'], '500') - assert.strictEqual(spans[1].meta['span.kind'], 'server') + assertObjectContains(spans[1], { + name: 'web.request', + service: 'aws-server', + type: 'web', + resource: 'GET', + meta: { + component: 'http', + 'span.kind': 'server', + 'http.url': `http://127.0.0.1:${port}/error`, + 'http.method': 'GET', + 'http.status_code': '500' + } + }) + assert.strictEqual(spans[1].error, 1) }) }) @@ -200,16 +219,20 @@ describe('Inferred Proxy Spans', function () { const spans = traces[0] assert.strictEqual(spans.length, 1) - assert.strictEqual(spans[0].name, 'web.request') - assert.strictEqual(spans[0].service, 'aws-server') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET') - assert.strictEqual(spans[0].meta.component, 'http') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://127.0.0.1:${port}/no-aws-headers`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta['span.kind'], 'server') + assertObjectContains(spans[0], { + name: 'web.request', + service: 'aws-server', + type: 'web', + resource: 'GET', + meta: { + component: 'http', + 'span.kind': 'server', + 'http.url': `http://127.0.0.1:${port}/no-aws-headers`, + 'http.method': 'GET', + 'http.status_code': '200' + } + }) + assert.strictEqual(spans[0].error, 0) }) }) @@ -228,16 +251,20 @@ describe('Inferred Proxy Spans', function () { const spans = traces[0] assert.strictEqual(spans.length, 1) - assert.strictEqual(spans[0].name, 'web.request') - assert.strictEqual(spans[0].service, 'aws-server') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET') - assert.strictEqual(spans[0].meta.component, 'http') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://127.0.0.1:${port}/a-few-aws-headers`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta['span.kind'], 'server') + assertObjectContains(spans[0], { + name: 'web.request', + service: 'aws-server', + type: 'web', + resource: 'GET', + meta: { + component: 'http', + 'span.kind': 'server', + 'http.url': `http://127.0.0.1:${port}/a-few-aws-headers`, + 'http.method': 'GET', + 'http.status_code': '200' + } + }) + assert.strictEqual(spans[0].error, 0) }) }) @@ -256,16 +283,19 @@ describe('Inferred Proxy Spans', function () { assert.strictEqual(spans.length, 1) - assert.strictEqual(spans[0].name, 'web.request') - assert.strictEqual(spans[0].service, 'aws-server') - assert.strictEqual(spans[0].type, 'web') - assert.strictEqual(spans[0].resource, 'GET') - assert.strictEqual(spans[0].meta.component, 'http') - assert.strictEqual(spans[0].meta['span.kind'], 'server') - assert.strictEqual(spans[0].meta['http.url'], `http://127.0.0.1:${port}/configured-off`) - assert.strictEqual(spans[0].meta['http.method'], 'GET') - assert.strictEqual(spans[0].meta['http.status_code'], '200') - assert.strictEqual(spans[0].meta['span.kind'], 'server') + assertObjectContains(spans[0], { + name: 'web.request', + service: 'aws-server', + type: 'web', + resource: 'GET', + meta: { + component: 'http', + 'span.kind': 'server', + 'http.url': `http://127.0.0.1:${port}/configured-off`, + 'http.method': 'GET', + 'http.status_code': '200' + } + }) }) }) }) From 855bf7c2cb12fc4aa66cc2c8d65cd05a4be97a0e Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 18 Dec 2025 13:02:25 -0500 Subject: [PATCH 06/15] add maximum node version in guardrails (#6788) --- .github/workflows/platform.yml | 2 + integration-tests/init.spec.js | 117 ++++++++++++++++++---- package.json | 2 +- packages/dd-trace/src/guardrails/index.js | 6 +- 4 files changed, 102 insertions(+), 25 deletions(-) diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index 62b4e6bb46c..91ce670a8a3 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -442,6 +442,7 @@ jobs: # We'll run these separately for earlier (i.e. unsupported) versions integration-guardrails: strategy: + fail-fast: false matrix: version: [14.0.0, 14, 16.0.0, 18.0.0, 20.0.0, 22.0.0, 24.0.0] runs-on: ubuntu-latest @@ -462,6 +463,7 @@ jobs: integration-guardrails-unsupported: strategy: + fail-fast: false matrix: version: ['0.8', '0.10', '0.12', '4', '6', '8', '10', '12'] runs-on: ubuntu-latest diff --git a/integration-tests/init.spec.js b/integration-tests/init.spec.js index dd091b63be3..efed3fafae9 100644 --- a/integration-tests/init.spec.js +++ b/integration-tests/init.spec.js @@ -1,5 +1,6 @@ 'use strict' +const assert = require('assert') const semver = require('semver') const { runAndCheckWithTelemetry: testFile, @@ -14,7 +15,7 @@ const fs = require('fs') const DD_INJECTION_ENABLED = 'tracing' const DD_INJECT_FORCE = 'true' const DD_TRACE_DEBUG = 'true' -const { NODE_VERSION } = require('../version') +const { NODE_MAJOR, NODE_VERSION } = require('../version') const telemetryAbort = ['abort', 'reason:incompatible_runtime', 'abort.runtime', ''] const telemetryForced = ['complete', 'injection_forced:true'] @@ -99,46 +100,118 @@ function testRuntimeVersionChecks (arg, filename) { } } - if (!currentVersionIsSupported) { - context('when node version is less than engines field', () => { - useEnv({ NODE_OPTIONS }) + let pkgPath + let pkgStr - it('should not initialize the tracer', () => doTest('false\n', [])) + before(() => { + pkgPath = `${sandboxCwd()}/node_modules/dd-trace/package.json` + pkgStr = fs.readFileSync(pkgPath, 'utf8') + }) - context('with DD_INJECTION_ENABLED', () => { - useEnv({ DD_INJECTION_ENABLED }) + after(() => { + fs.writeFileSync(pkgPath, pkgStr) + }) - context('without debug', () => { - it('should not initialize the tracer', () => doTest('false\n', telemetryAbort)) + it('should be able to use the engines field', () => { + const engines = require(`${sandboxCwd()}/node_modules/dd-trace/package.json`).engines.node - it('should initialize the tracer, if DD_INJECT_FORCE', () => doTestForced('true\n', telemetryForced)) - }) + assert.match(engines, /^>=\d+ <\d+$/) + }) - context('with debug', () => { - useEnv({ DD_TRACE_DEBUG }) + context('when node version is too recent', () => { + useEnv({ NODE_OPTIONS }) + + before(() => { + const pkg = JSON.parse(pkgStr) + pkg.engines.node = `>=${NODE_MAJOR - 1} <${NODE_MAJOR}` + fs.writeFileSync(pkgPath, JSON.stringify(pkg)) + }) - it('should not initialize the tracer', () => - doTest(`Aborting application instrumentation due to incompatible_runtime. + it('should not initialize the tracer', () => doTest('false\n', [])) + + context('with DD_INJECTION_ENABLED', () => { + useEnv({ DD_INJECTION_ENABLED }) + + context('without debug', () => { + it('should not initialize the tracer', () => doTest('false\n', telemetryAbort)) + + it('should initialize the tracer, if DD_INJECT_FORCE', () => doTestForced('true\n', telemetryForced)) + }) + + context('with debug', () => { + useEnv({ DD_TRACE_DEBUG }) + + it('should not initialize the tracer', () => + doTest(`Aborting application instrumentation due to incompatible_runtime. Found incompatible runtime Node.js ${process.versions.node}, Supported runtimes: Node.js \ ->=18. +>=${NODE_MAJOR - 1} <${NODE_MAJOR}. false `, telemetryAbort)) - it('should initialize the tracer, if DD_INJECT_FORCE', () => - doTestForced(`Aborting application instrumentation due to incompatible_runtime. + it('should initialize the tracer, if DD_INJECT_FORCE', () => + doTestForced(`Aborting application instrumentation due to incompatible_runtime. Found incompatible runtime Node.js ${process.versions.node}, Supported runtimes: Node.js \ ->=18. +>=${NODE_MAJOR - 1} <${NODE_MAJOR}. DD_INJECT_FORCE enabled, allowing unsupported runtimes and continuing. Application instrumentation bootstrapping complete true `, telemetryForced)) - }) }) }) - } else { - context('when node version is more than engines field', () => { + }) + + context('when node version is too old', () => { + useEnv({ NODE_OPTIONS }) + + before(() => { + const pkg = JSON.parse(pkgStr) + pkg.engines.node = `>=${NODE_MAJOR + 1} <${NODE_MAJOR + 2}` + fs.writeFileSync(pkgPath, JSON.stringify(pkg)) + }) + + it('should not initialize the tracer', () => doTest('false\n', [])) + + context('with DD_INJECTION_ENABLED', () => { + useEnv({ DD_INJECTION_ENABLED }) + + context('without debug', () => { + it('should not initialize the tracer', () => doTest('false\n', telemetryAbort)) + + it('should initialize the tracer, if DD_INJECT_FORCE', () => doTestForced('true\n', telemetryForced)) + }) + + context('with debug', () => { + useEnv({ DD_TRACE_DEBUG }) + + it('should not initialize the tracer', () => + doTest(`Aborting application instrumentation due to incompatible_runtime. +Found incompatible runtime Node.js ${process.versions.node}, Supported runtimes: Node.js \ +>=${NODE_MAJOR + 1} <${NODE_MAJOR + 2}. +false +`, telemetryAbort)) + + it('should initialize the tracer, if DD_INJECT_FORCE', () => + doTestForced(`Aborting application instrumentation due to incompatible_runtime. +Found incompatible runtime Node.js ${process.versions.node}, Supported runtimes: Node.js \ +>=${NODE_MAJOR + 1} <${NODE_MAJOR + 2}. +DD_INJECT_FORCE enabled, allowing unsupported runtimes and continuing. +Application instrumentation bootstrapping complete +true +`, telemetryForced)) + }) + }) + }) + + if (currentVersionIsSupported) { + context('when node version is in range of the engines field', () => { useEnv({ NODE_OPTIONS }) + before(() => { + const pkg = JSON.parse(pkgStr) + pkg.engines.node = '>=0 <1000' + fs.writeFileSync(pkgPath, JSON.stringify(pkg)) + }) + it('should initialize the tracer, if no DD_INJECTION_ENABLED', () => doTest('true\n', [], 'manual')) context('with DD_INJECTION_ENABLED', () => { diff --git a/package.json b/package.json index beb5c2c6bb5..207923f7f11 100644 --- a/package.json +++ b/package.json @@ -95,7 +95,7 @@ }, "homepage": "https://github.com/DataDog/dd-trace-js#readme", "engines": { - "node": ">=18" + "node": ">=18 <26" }, "files": [ "/package.json", diff --git a/packages/dd-trace/src/guardrails/index.js b/packages/dd-trace/src/guardrails/index.js index 9379d042c0e..585308d9bdf 100644 --- a/packages/dd-trace/src/guardrails/index.js +++ b/packages/dd-trace/src/guardrails/index.js @@ -14,7 +14,9 @@ function guard (fn) { var clobberBailout = false var forced = isTrue(process.env.DD_INJECT_FORCE) var engines = require('../../../../package.json').engines - var minMajor = parseInt(engines.node.replace(/[^0-9]/g, '')) + var versions = engines.node.match(/^>=(\d+) <(\d+)$/) + var minMajor = versions[1] + var nextMajor = versions[2] var version = process.versions.node if (process.env.DD_INJECTION_ENABLED) { @@ -40,7 +42,7 @@ function guard (fn) { // If the runtime doesn't match the engines field in package.json, then we // should not initialize the tracer. - if (!clobberBailout && NODE_MAJOR < minMajor) { + if (!clobberBailout && (NODE_MAJOR < minMajor || NODE_MAJOR >= nextMajor)) { initBailout = true telemetry([ { name: 'abort', tags: ['reason:incompatible_runtime'] }, From 22338f38eb0ad78b60b30028930f3eb4d6d4d773 Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Thu, 18 Dec 2025 20:00:33 +0100 Subject: [PATCH 07/15] test: rewrite chai to assert (#7134) Automated rewrites including a lint:fix call. Afterwards manual fixes and small improvements. --- .../appsec/endpoints-collection.spec.js | 2 +- integration-tests/cucumber/cucumber.spec.js | 50 +- integration-tests/cypress/cypress.spec.js | 72 +- .../debugger/snapshot-pruning.spec.js | 2 +- integration-tests/jest/jest.spec.js | 104 +- integration-tests/mocha/mocha.spec.js | 70 +- .../playwright/playwright.spec.js | 308 +-- integration-tests/profiler/profiler.spec.js | 62 +- integration-tests/remote_config.spec.js | 2 +- integration-tests/vitest/vitest.spec.js | 6 +- .../test/express-mongo-sanitize.spec.js | 6 +- .../test/helpers/check-require-cache.spec.js | 3 +- .../test/multer.spec.js | 8 +- .../test/passport-http.spec.js | 10 +- .../test/passport-local.spec.js | 9 +- .../test/index.spec.js | 10 +- .../test/index.spec.js | 63 +- packages/datadog-plugin-fs/test/index.spec.js | 5 +- .../test/integration-test/client.spec.js | 7 +- .../test/http_endpoint.spec.js | 31 +- .../datadog-plugin-kafkajs/test/index.spec.js | 25 +- .../datadog-plugin-koa/test/index.spec.js | 4 +- .../test/index.spec.js | 33 +- .../datadog-plugin-next/test/index.spec.js | 5 +- .../datadog-plugin-openai/test/index.spec.js | 86 +- .../datadog-plugin-prisma/test/index.spec.js | 16 +- .../datadog-plugin-winston/test/index.spec.js | 3 +- packages/datadog-shimmer/test/shimmer.spec.js | 8 +- .../dd-trace/test/appsec/blocking.spec.js | 9 +- packages/dd-trace/test/appsec/graphql.spec.js | 3 +- .../hardcoded-password-analyzer.spec.js | 8 +- .../hardcoded-secret-analyzer.spec.js | 2 +- .../analyzers/ldap-injection-analyzer.spec.js | 3 +- .../analyzers/path-traversal-analyzer.spec.js | 4 +- .../analyzers/sql-injection-analyzer.spec.js | 7 +- .../iast/context/context-plugin.spec.js | 29 +- .../test/appsec/iast/iast-plugin.spec.js | 31 +- .../dd-trace/test/appsec/iast/index.spec.js | 6 +- .../appsec/iast/overhead-controller.spec.js | 17 +- .../appsec/iast/taint-tracking/index.spec.js | 4 +- .../appsec/iast/taint-tracking/plugin.spec.js | 81 +- .../iast/taint-tracking/plugins/kafka.spec.js | 13 +- .../taint-tracking/rewriter-telemetry.spec.js | 3 +- .../iast/taint-tracking/rewriter.spec.js | 11 +- .../taint-tracking-operations.spec.js | 51 +- .../appsec/iast/telemetry/iast-metric.spec.js | 23 +- .../test/appsec/iast/telemetry/index.spec.js | 10 +- .../test/appsec/iast/telemetry/logs.spec.js | 5 +- .../appsec/iast/telemetry/namespaces.spec.js | 7 +- .../appsec/iast/telemetry/span-tags.spec.js | 9 +- packages/dd-trace/test/appsec/iast/utils.js | 4 +- .../vulnerability-formatter/index.spec.js | 8 +- .../iast/vulnerability-reporter.spec.js | 31 +- packages/dd-trace/test/appsec/index.spec.js | 50 +- .../command_injection.integration.spec.js | 4 +- .../rasp/rasp-metrics.integration.spec.js | 12 +- .../dd-trace/test/appsec/rasp/utils.spec.js | 6 +- .../dd-trace/test/appsec/reporter.spec.js | 21 +- .../dd-trace/test/appsec/rule_manager.spec.js | 8 +- .../dd-trace/test/appsec/sdk/set_user.spec.js | 14 +- .../test/appsec/sdk/track_event.spec.js | 94 +- .../test/appsec/sdk/user_blocking.spec.js | 6 +- .../appsec/waf-metrics.integration.spec.js | 20 +- .../dd-trace/test/appsec/waf/index.spec.js | 15 +- .../appsec/waf/waf_context_wrapper.spec.js | 5 +- .../dynamic-instrumentation.spec.js | 6 +- .../exporters/agent-proxy/agent-proxy.spec.js | 5 +- .../exporters/ci-visibility-exporter.spec.js | 3 +- packages/dd-trace/test/config.spec.js | 1733 ++++++++++------- .../test/datastreams/processor.spec.js | 5 +- .../encode/agentless-ci-visibility.spec.js | 5 +- .../test/exporters/agent/writer.spec.js | 10 +- .../common/agent-info-exporter.spec.js | 4 +- .../test/exporters/log/exporter.spec.js | 3 +- .../exporters/span-stats/exporter.spec.js | 5 +- .../test/guardrails/telemetry.spec.js | 2 +- packages/dd-trace/test/llmobs/index.spec.js | 23 +- .../dd-trace/test/llmobs/sdk/index.spec.js | 6 +- .../test/llmobs/span_processor.spec.js | 2 +- .../dd-trace/test/llmobs/writers/base.spec.js | 4 +- .../openfeature/flagging_provider.spec.js | 4 +- .../flagging_provider_timeout.spec.js | 2 +- .../openfeature/writers/exposures.spec.js | 130 +- .../dd-trace/test/opentelemetry/logs.spec.js | 2 +- .../dd-trace/test/opentelemetry/span.spec.js | 7 +- .../opentracing/propagation/text_map.spec.js | 11 +- .../test/plugins/util/ip_extractor.spec.js | 5 +- .../dd-trace/test/priority_sampler.spec.js | 3 +- packages/dd-trace/test/process-tags.spec.js | 200 +- .../test/profiling/exporters/agent.spec.js | 58 +- .../dd-trace/test/profiling/profiler.spec.js | 13 +- .../test/remote_config/manager.spec.js | 6 +- .../test/service-naming/schema.spec.js | 5 +- packages/dd-trace/test/setup/core.js | 6 - packages/dd-trace/test/span_format.spec.js | 11 +- .../test/telemetry/dependencies.spec.js | 73 +- .../dd-trace/test/telemetry/index.spec.js | 2 +- .../test/telemetry/logs/index.spec.js | 29 +- .../dd-trace/test/telemetry/metrics.spec.js | 67 +- 99 files changed, 2219 insertions(+), 1875 deletions(-) diff --git a/integration-tests/appsec/endpoints-collection.spec.js b/integration-tests/appsec/endpoints-collection.spec.js index 9df64053440..d68edb14f3a 100644 --- a/integration-tests/appsec/endpoints-collection.spec.js +++ b/integration-tests/appsec/endpoints-collection.spec.js @@ -174,7 +174,7 @@ describe('Endpoints collection', () => { e.method === expected.method && e.path === expected.path ) - assert.ok(found != null) + assert.ok(found) assert.strictEqual(found.type, 'REST') assert.strictEqual(found.operation_name, 'http.request') assert.strictEqual(found.resource_name, `${expected.method} ${expected.path}`) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 2faca9f7f1c..d06396fef0b 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -201,7 +201,7 @@ describe(`cucumber@${version} commonJS`, () => { assert.strictEqual(testSpan.meta[ORIGIN_KEY], CI_APP_ORIGIN) assert.strictEqual(testSpan.meta[COMPONENT], 'cucumber') assert.strictEqual(testSpan.metrics[SAMPLING_PRIORITY], AUTO_KEEP) - assert.ok(testSpan.meta[TEST_FRAMEWORK_VERSION] != null) + assert.ok(testSpan.meta[TEST_FRAMEWORK_VERSION]) assert.strictEqual(testSpan.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) assert.strictEqual(testSpan.meta[TEST_SUITE], 'ci-visibility/cucumber-plugin-tests/features/simple.feature') assert.strictEqual( @@ -209,7 +209,7 @@ describe(`cucumber@${version} commonJS`, () => { 'ci-visibility/cucumber-plugin-tests/features/simple.feature', 'Test source file should be the simple feature' ) - assert.ok(testSpan.metrics[TEST_SOURCE_START] != null) + assert.ok(testSpan.metrics[TEST_SOURCE_START]) assert.strictEqual(testSpan.type, 'test') assert.strictEqual(testSpan.name, 'cucumber.test') assert.strictEqual(testSpan.parent_id.toString(), '0') @@ -236,7 +236,7 @@ describe(`cucumber@${version} commonJS`, () => { assert.match(errorMessage, /AssertionError/) assert.match(errorMessage, /datadog/) assert.match(errorMessage, /godatad/) - assert.ok(testSpan.meta[ERROR_STACK] != null) + assert.ok(testSpan.meta[ERROR_STACK]) } if (testName === 'hooks fail') { @@ -245,7 +245,7 @@ describe(`cucumber@${version} commonJS`, () => { assert.match(errorMessage, /TypeError: Cannot set/) assert.match(errorMessage, /of undefined/) assert.match(errorMessage, /boom/) - assert.ok(testSpan.meta[ERROR_STACK] != null) + assert.ok(testSpan.meta[ERROR_STACK]) } const testSteps = spans.filter( @@ -254,7 +254,7 @@ describe(`cucumber@${version} commonJS`, () => { const { steps } = testInfoByTestName[testName] steps.forEach(({ name, stepStatus }) => { const stepSpan = testSteps.find(span => span.meta['cucumber.step'] === name) - assert.ok(stepSpan != null) + assert.ok(stepSpan) assert.strictEqual(stepSpan.meta['step.status'], stepStatus, `Test ${testName} should have step ${name} with status ${stepStatus}`) assert.strictEqual(stepSpan.meta[COMPONENT], 'cucumber') @@ -328,16 +328,16 @@ describe(`cucumber@${version} commonJS`, () => { assert.strictEqual(testSessionEventContent.meta[CUCUMBER_IS_PARALLEL], 'true') } - assert.ok(testSessionEventContent.test_session_id != null) - assert.ok(testSessionEventContent.meta[TEST_COMMAND] != null) - assert.ok(testSessionEventContent.meta[TEST_TOOLCHAIN] != null) + assert.ok(testSessionEventContent.test_session_id) + assert.ok(testSessionEventContent.meta[TEST_COMMAND]) + assert.ok(testSessionEventContent.meta[TEST_TOOLCHAIN]) assert.strictEqual(testSessionEventContent.resource.startsWith('test_session.'), true) assert.strictEqual(testSessionEventContent.meta[TEST_STATUS], 'fail') - assert.ok(testModuleEventContent.test_session_id != null) - assert.ok(testModuleEventContent.test_module_id != null) - assert.ok(testModuleEventContent.meta[TEST_COMMAND] != null) - assert.ok(testModuleEventContent.meta[TEST_MODULE] != null) + assert.ok(testModuleEventContent.test_session_id) + assert.ok(testModuleEventContent.test_module_id) + assert.ok(testModuleEventContent.meta[TEST_COMMAND]) + assert.ok(testModuleEventContent.meta[TEST_MODULE]) assert.strictEqual(testModuleEventContent.resource.startsWith('test_module.'), true) assert.strictEqual(testModuleEventContent.meta[TEST_STATUS], 'fail') assert.strictEqual( @@ -363,14 +363,14 @@ describe(`cucumber@${version} commonJS`, () => { test_session_id: testSessionId } }) => { - assert.ok(meta[TEST_COMMAND] != null) - assert.ok(meta[TEST_MODULE] != null) - assert.ok(testSuiteId != null) + assert.ok(meta[TEST_COMMAND]) + assert.ok(meta[TEST_MODULE]) + assert.ok(testSuiteId) assert.strictEqual(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.strictEqual(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) assert.strictEqual(meta[TEST_SOURCE_FILE].startsWith(featuresPath), true) assert.strictEqual(metrics[TEST_SOURCE_START], 1) - assert.ok(metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(metrics[DD_HOST_CPU_COUNT]) }) assert.deepStrictEqual(testEvents.map(test => test.content.resource).sort(), [ @@ -399,9 +399,9 @@ describe(`cucumber@${version} commonJS`, () => { test_session_id: testSessionId } }) => { - assert.ok(meta[TEST_COMMAND] != null) - assert.ok(meta[TEST_MODULE] != null) - assert.ok(testSuiteId != null) + assert.ok(meta[TEST_COMMAND]) + assert.ok(meta[TEST_MODULE]) + assert.ok(testSuiteId) assert.strictEqual(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.strictEqual(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) assert.strictEqual(meta[TEST_SOURCE_FILE].startsWith('ci-visibility/features'), true) @@ -412,7 +412,7 @@ describe(`cucumber@${version} commonJS`, () => { if (runMode === 'parallel') { assert.strictEqual(meta[CUCUMBER_IS_PARALLEL], 'true') } - assert.ok(metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(metrics[DD_HOST_CPU_COUNT]) if (!meta[TEST_NAME].includes('Say skip')) { assert.strictEqual(meta['custom_tag.before'], 'hello before') assert.strictEqual(meta['custom_tag.after'], 'hello after') @@ -535,15 +535,15 @@ describe(`cucumber@${version} commonJS`, () => { 2, 'Steps should be covered twice' ) - assert.ok(coveragePayload.content.coverages[0].test_session_id != null) - assert.ok(coveragePayload.content.coverages[0].test_suite_id != null) + assert.ok(coveragePayload.content.coverages[0].test_session_id) + assert.ok(coveragePayload.content.coverages[0].test_suite_id) const testSession = eventsRequest .payload .events .find(event => event.type === 'test_session_end') .content - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const eventTypes = eventsRequest.payload.events.map(event => event.type) assertObjectContains(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end']) @@ -593,7 +593,7 @@ describe(`cucumber@${version} commonJS`, () => { assert.strictEqual(testSession.meta[TEST_ITR_TESTS_SKIPPED], 'false') assert.strictEqual(testSession.meta[TEST_CODE_COVERAGE_ENABLED], 'false') assert.strictEqual(testSession.meta[TEST_ITR_SKIPPING_ENABLED], 'false') - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const testModule = payload.events.find(event => event.type === 'test_module_end').content assert.strictEqual(testModule.meta[TEST_ITR_TESTS_SKIPPED], 'false') assert.strictEqual(testModule.meta[TEST_CODE_COVERAGE_ENABLED], 'false') @@ -1980,7 +1980,7 @@ describe(`cucumber@${version} commonJS`, () => { assert.strictEqual(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` - assert.ok(retriedTest.meta[snapshotIdKey] != null) + assert.ok(retriedTest.meta[snapshotIdKey]) snapshotIdByTest = retriedTest.meta[snapshotIdKey] spanIdByTest = retriedTest.span_id.toString() diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 3f668066549..1ae6c9f31e3 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -222,7 +222,7 @@ moduleTypes.forEach(({ span.resource === 'cypress/e2e/basic-fail.js.basic fail suite can fail' ) - assert.ok(passedTestSpan != null) + assert.ok(passedTestSpan) assert.strictEqual(passedTestSpan.name, 'cypress.test') assert.strictEqual(passedTestSpan.resource, 'cypress/e2e/basic-pass.js.basic pass suite can pass') assert.strictEqual(passedTestSpan.type, 'test') @@ -231,17 +231,17 @@ moduleTypes.forEach(({ assert.strictEqual(passedTestSpan.meta[TEST_SUITE], 'cypress/e2e/basic-pass.js') assert.strictEqual(passedTestSpan.meta[TEST_FRAMEWORK], 'cypress') assert.strictEqual(passedTestSpan.meta[TEST_TYPE], 'browser') - assert.ok(passedTestSpan.meta[TEST_SOURCE_FILE] != null) + assert.ok(passedTestSpan.meta[TEST_SOURCE_FILE]) assert.match(passedTestSpan.meta[TEST_SOURCE_FILE], /cypress\/e2e\/basic-pass\.js/) - assert.ok(passedTestSpan.meta[TEST_FRAMEWORK_VERSION] != null) - assert.ok(passedTestSpan.meta[COMPONENT] != null) - assert.ok(passedTestSpan.metrics[TEST_SOURCE_START] != null) + assert.ok(passedTestSpan.meta[TEST_FRAMEWORK_VERSION]) + assert.ok(passedTestSpan.meta[COMPONENT]) + assert.ok(passedTestSpan.metrics[TEST_SOURCE_START]) assert.strictEqual(passedTestSpan.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) assert.strictEqual(passedTestSpan.meta.customTag, 'customValue') assert.strictEqual(passedTestSpan.meta.addTagsBeforeEach, 'customBeforeEach') assert.strictEqual(passedTestSpan.meta.addTagsAfterEach, 'customAfterEach') - assert.ok(failedTestSpan != null) + assert.ok(failedTestSpan) assert.strictEqual(failedTestSpan.name, 'cypress.test') assert.strictEqual(failedTestSpan.resource, 'cypress/e2e/basic-fail.js.basic fail suite can fail') assert.strictEqual(failedTestSpan.type, 'test') @@ -250,14 +250,14 @@ moduleTypes.forEach(({ assert.strictEqual(failedTestSpan.meta[TEST_SUITE], 'cypress/e2e/basic-fail.js') assert.strictEqual(failedTestSpan.meta[TEST_FRAMEWORK], 'cypress') assert.strictEqual(failedTestSpan.meta[TEST_TYPE], 'browser') - assert.ok(failedTestSpan.meta[TEST_SOURCE_FILE] != null) + assert.ok(failedTestSpan.meta[TEST_SOURCE_FILE]) assert.match(failedTestSpan.meta[TEST_SOURCE_FILE], /cypress\/e2e\/basic-fail\.js/) - assert.ok(failedTestSpan.meta[TEST_FRAMEWORK_VERSION] != null) - assert.ok(failedTestSpan.meta[COMPONENT] != null) - assert.ok(failedTestSpan.meta[ERROR_MESSAGE] != null) + assert.ok(failedTestSpan.meta[TEST_FRAMEWORK_VERSION]) + assert.ok(failedTestSpan.meta[COMPONENT]) + assert.ok(failedTestSpan.meta[ERROR_MESSAGE]) assert.match(failedTestSpan.meta[ERROR_MESSAGE], /expected/) - assert.ok(failedTestSpan.meta[ERROR_TYPE] != null) - assert.ok(failedTestSpan.metrics[TEST_SOURCE_START] != null) + assert.ok(failedTestSpan.meta[ERROR_TYPE]) + assert.ok(failedTestSpan.metrics[TEST_SOURCE_START]) assert.strictEqual(passedTestSpan.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) assert.strictEqual(failedTestSpan.meta.customTag, 'customValue') assert.strictEqual(failedTestSpan.meta.addTagsBeforeEach, 'customBeforeEach') @@ -482,23 +482,23 @@ moduleTypes.forEach(({ const { content: testSessionEventContent } = testSessionEvent const { content: testModuleEventContent } = testModuleEvent - assert.ok(testSessionEventContent.test_session_id != null) - assert.ok(testSessionEventContent.meta[TEST_COMMAND] != null) - assert.ok(testSessionEventContent.meta[TEST_TOOLCHAIN] != null) + assert.ok(testSessionEventContent.test_session_id) + assert.ok(testSessionEventContent.meta[TEST_COMMAND]) + assert.ok(testSessionEventContent.meta[TEST_TOOLCHAIN]) assert.strictEqual(testSessionEventContent.resource.startsWith('test_session.'), true) assert.strictEqual(testSessionEventContent.meta[TEST_STATUS], 'fail') - assert.ok(testModuleEventContent.test_session_id != null) - assert.ok(testModuleEventContent.test_module_id != null) - assert.ok(testModuleEventContent.meta[TEST_COMMAND] != null) - assert.ok(testModuleEventContent.meta[TEST_MODULE] != null) + assert.ok(testModuleEventContent.test_session_id) + assert.ok(testModuleEventContent.test_module_id) + assert.ok(testModuleEventContent.meta[TEST_COMMAND]) + assert.ok(testModuleEventContent.meta[TEST_MODULE]) assert.strictEqual(testModuleEventContent.resource.startsWith('test_module.'), true) assert.strictEqual(testModuleEventContent.meta[TEST_STATUS], 'fail') assert.strictEqual( testModuleEventContent.test_session_id.toString(10), testSessionEventContent.test_session_id.toString(10) ) - assert.ok(testModuleEventContent.meta[TEST_FRAMEWORK_VERSION] != null) + assert.ok(testModuleEventContent.meta[TEST_FRAMEWORK_VERSION]) assertObjectContains(testSuiteEvents.map(suite => suite.content.resource), [ 'test_suite.cypress/e2e/other.cy.js', @@ -519,14 +519,14 @@ moduleTypes.forEach(({ test_session_id: testSessionId } }) => { - assert.ok(meta[TEST_COMMAND] != null) - assert.ok(meta[TEST_MODULE] != null) - assert.ok(testSuiteId != null) + assert.ok(meta[TEST_COMMAND]) + assert.ok(meta[TEST_MODULE]) + assert.ok(testSuiteId) assert.strictEqual(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.strictEqual(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) assert.strictEqual(meta[TEST_SOURCE_FILE].startsWith('cypress/e2e/'), true) assert.strictEqual(metrics[TEST_SOURCE_START], 1) - assert.ok(metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(metrics[DD_HOST_CPU_COUNT]) }) assertObjectContains(testEvents.map(test => test.content.resource), [ @@ -550,9 +550,9 @@ moduleTypes.forEach(({ test_session_id: testSessionId } }) => { - assert.ok(meta[TEST_COMMAND] != null) - assert.ok(meta[TEST_MODULE] != null) - assert.ok(testSuiteId != null) + assert.ok(meta[TEST_COMMAND]) + assert.ok(meta[TEST_MODULE]) + assert.ok(testSuiteId) assert.strictEqual(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.strictEqual(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) assert.strictEqual(meta[TEST_SOURCE_FILE].startsWith('cypress/e2e/'), true) @@ -560,7 +560,7 @@ moduleTypes.forEach(({ assert.strictEqual(meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'false') assert.strictEqual(meta['test.customtag'], 'customvalue') assert.strictEqual(meta['test.customtag2'], 'customvalue2') - assert.ok(metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(metrics[DD_HOST_CPU_COUNT]) }) }, 25000) @@ -820,7 +820,7 @@ moduleTypes.forEach(({ const notSkippedTest = events.find(event => event.content.resource === 'cypress/e2e/other.cy.js.context passes' ) - assert.ok(notSkippedTest != null) + assert.ok(notSkippedTest) assert.strictEqual(notSkippedTest.content.meta[TEST_STATUS], 'pass') }, 25000) @@ -1173,9 +1173,9 @@ moduleTypes.forEach(({ const testModuleEvent = events.find(event => event.type === 'test_module_end') testEvents.forEach(testEvent => { - assert.ok(testEvent.content.test_suite_id != null) - assert.ok(testEvent.content.test_module_id != null) - assert.ok(testEvent.content.test_session_id != null) + assert.ok(testEvent.content.test_suite_id) + assert.ok(testEvent.content.test_module_id) + assert.ok(testEvent.content.test_session_id) assert.notStrictEqual(testEvent.content.test_suite_id, testModuleEvent.content.test_module_id) }) }, 25000) @@ -1205,9 +1205,9 @@ moduleTypes.forEach(({ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { const events = payloads.flatMap(({ payload }) => payload.events) const testSessionEvent = events.find(event => event.type === 'test_session_end') - assert.ok(testSessionEvent != null) + assert.ok(testSessionEvent) const testModuleEvent = events.find(event => event.type === 'test_module_end') - assert.ok(testModuleEvent != null) + assert.ok(testModuleEvent) const testSuiteEvents = events.filter(event => event.type === 'test_suite_end') assert.strictEqual(testSuiteEvents.length, 4) const testEvents = events.filter(event => event.type === 'test') @@ -1243,9 +1243,9 @@ moduleTypes.forEach(({ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { const events = payloads.flatMap(({ payload }) => payload.events) const testSessionEvent = events.find(event => event.type === 'test_session_end') - assert.ok(testSessionEvent != null) + assert.ok(testSessionEvent) const testModuleEvent = events.find(event => event.type === 'test_module_end') - assert.ok(testModuleEvent != null) + assert.ok(testModuleEvent) const testSuiteEvents = events.filter(event => event.type === 'test_suite_end') assert.strictEqual(testSuiteEvents.length, 4) const testEvents = events.filter(event => event.type === 'test') diff --git a/integration-tests/debugger/snapshot-pruning.spec.js b/integration-tests/debugger/snapshot-pruning.spec.js index d3ba92c4e3a..d30f1b248c7 100644 --- a/integration-tests/debugger/snapshot-pruning.spec.js +++ b/integration-tests/debugger/snapshot-pruning.spec.js @@ -17,7 +17,7 @@ describe('Dynamic Instrumentation', function () { assert.ok(payloadSize < 1024 * 1024) // 1MB const capturesJson = JSON.stringify(payload.debugger.snapshot.captures) - assert.ok(capturesJson.includes('"pruned":true')) + assert.match(capturesJson, /"pruned":true/) done() }) diff --git a/integration-tests/jest/jest.spec.js b/integration-tests/jest/jest.spec.js index 32e04324a4b..2c79a387c2a 100644 --- a/integration-tests/jest/jest.spec.js +++ b/integration-tests/jest/jest.spec.js @@ -191,7 +191,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { testSpans.forEach(testSpan => { assert.strictEqual(testSpan.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) - assert.ok(testSpan.metrics[TEST_SOURCE_START] != null) + assert.ok(testSpan.metrics[TEST_SOURCE_START]) }) }) @@ -241,7 +241,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { (!parameters || test.meta[TEST_PARAMETERS] === JSON.stringify(parameters)) ) - assert.ok(test != null) + assert.ok(test) assert.strictEqual(test.meta.language, 'javascript') assert.strictEqual(test.meta.service, 'plugin-tests') @@ -262,8 +262,8 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(test.service, 'plugin-tests') assert.strictEqual(test.resource, `ci-visibility/jest-plugin-tests/jest-test.js.${name}`) - assert.ok(test.metrics[TEST_SOURCE_START] != null) - assert.ok(test.meta[TEST_FRAMEWORK_VERSION] != null) + assert.ok(test.metrics[TEST_SOURCE_START]) + assert.ok(test.meta[TEST_FRAMEWORK_VERSION]) if (extraTags) { Object.entries(extraTags).forEach(([key, value]) => { @@ -319,7 +319,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { span.resource === `ci-visibility/jest-plugin-tests/jest-hook-failure.js.${name}` ) - assert.ok(testSpan != null) + assert.ok(testSpan) assert.strictEqual(testSpan.meta.language, 'javascript') assert.strictEqual(testSpan.meta[ORIGIN_KEY], CI_APP_ORIGIN) assert.strictEqual(testSpan.meta[TEST_FRAMEWORK], 'jest') @@ -334,7 +334,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(testSpan.type, 'test') assert.strictEqual(testSpan.name, 'jest.test') assert.strictEqual(testSpan.resource, `ci-visibility/jest-plugin-tests/jest-hook-failure.js.${name}`) - assert.ok(testSpan.meta[TEST_FRAMEWORK_VERSION] != null) + assert.ok(testSpan.meta[TEST_FRAMEWORK_VERSION]) }) }, 25000) @@ -372,7 +372,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { span.resource === `ci-visibility/jest-plugin-tests/jest-focus.js.${name}` ) - assert.ok(testSpan != null) + assert.ok(testSpan) assert.strictEqual(testSpan.meta.language, 'javascript') assert.strictEqual(testSpan.meta[ORIGIN_KEY], CI_APP_ORIGIN) assert.strictEqual(testSpan.meta[TEST_FRAMEWORK], 'jest') @@ -384,7 +384,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(testSpan.type, 'test') assert.strictEqual(testSpan.name, 'jest.test') assert.strictEqual(testSpan.resource, `ci-visibility/jest-plugin-tests/jest-focus.js.${name}`) - assert.ok(testSpan.meta[TEST_FRAMEWORK_VERSION] != null) + assert.ok(testSpan.meta[TEST_FRAMEWORK_VERSION]) }) }, 25000) @@ -413,7 +413,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { const testSpan = payloads .flatMap(({ payload }) => payload.flatMap(trace => trace)) .find(span => span.type === 'test') - assert.ok(testSpan != null) + assert.ok(testSpan) assert.strictEqual(testSpan.meta[TEST_NAME], 'jest-inject-globals will be run') assert.strictEqual(testSpan.meta[TEST_STATUS], 'pass') assert.strictEqual(testSpan.meta[TEST_SUITE], 'ci-visibility/jest-plugin-tests/jest-inject-globals.js') @@ -476,8 +476,8 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { ] ) assert.strictEqual(suites.length, 2) - assert.ok(sessionEventContent != null) - assert.ok(moduleEventContent != null) + assert.ok(sessionEventContent) + assert.ok(moduleEventContent) assert.match(testOutput, new RegExp(expectedStdout)) @@ -486,12 +486,12 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { testEvent.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true ) - assert.ok(testEvent.metrics[TEST_SOURCE_START] != null) + assert.ok(testEvent.metrics[TEST_SOURCE_START]) assert.strictEqual(testEvent.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'false') // Can read DD_TAGS assert.strictEqual(testEvent.meta['test.customtag'], 'customvalue') assert.strictEqual(testEvent.meta['test.customtag2'], 'customvalue2') - assert.ok(testEvent.metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(testEvent.metrics[DD_HOST_CPU_COUNT]) }) suites.forEach(testSuite => { @@ -500,7 +500,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { true ) assert.strictEqual(testSuite.metrics[TEST_SOURCE_START], 1) - assert.ok(testSuite.metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(testSuite.metrics[DD_HOST_CPU_COUNT]) }) done() } catch (error) { @@ -542,38 +542,38 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { const testSuiteEvent = events.find(event => event.type === 'test_suite_end').content const testEvent = events.find(event => event.type === 'test').content - assert.ok(testSessionEvent != null) + assert.ok(testSessionEvent) assert.strictEqual(testSessionEvent.meta[TEST_STATUS], 'pass') - assert.ok(testSessionEvent[TEST_SESSION_ID] != null) - assert.ok(testSessionEvent.meta[TEST_COMMAND] != null) + assert.ok(testSessionEvent[TEST_SESSION_ID]) + assert.ok(testSessionEvent.meta[TEST_COMMAND]) assert.ok(testSessionEvent[TEST_SUITE_ID] == null) assert.ok(testSessionEvent[TEST_MODULE_ID] == null) - assert.ok(testModuleEvent != null) + assert.ok(testModuleEvent) assert.strictEqual(testModuleEvent.meta[TEST_STATUS], 'pass') - assert.ok(testModuleEvent[TEST_SESSION_ID] != null) - assert.ok(testModuleEvent[TEST_MODULE_ID] != null) - assert.ok(testModuleEvent.meta[TEST_COMMAND] != null) + assert.ok(testModuleEvent[TEST_SESSION_ID]) + assert.ok(testModuleEvent[TEST_MODULE_ID]) + assert.ok(testModuleEvent.meta[TEST_COMMAND]) assert.ok(testModuleEvent[TEST_SUITE_ID] == null) - assert.ok(testSuiteEvent != null) + assert.ok(testSuiteEvent) assert.strictEqual(testSuiteEvent.meta[TEST_STATUS], 'pass') assert.strictEqual(testSuiteEvent.meta[TEST_SUITE], 'ci-visibility/jest-plugin-tests/jest-test-suite.js') - assert.ok(testSuiteEvent.meta[TEST_COMMAND] != null) - assert.ok(testSuiteEvent.meta[TEST_MODULE] != null) - assert.ok(testSuiteEvent[TEST_SUITE_ID] != null) - assert.ok(testSuiteEvent[TEST_SESSION_ID] != null) - assert.ok(testSuiteEvent[TEST_MODULE_ID] != null) + assert.ok(testSuiteEvent.meta[TEST_COMMAND]) + assert.ok(testSuiteEvent.meta[TEST_MODULE]) + assert.ok(testSuiteEvent[TEST_SUITE_ID]) + assert.ok(testSuiteEvent[TEST_SESSION_ID]) + assert.ok(testSuiteEvent[TEST_MODULE_ID]) - assert.ok(testEvent != null) + assert.ok(testEvent) assert.strictEqual(testEvent.meta[TEST_STATUS], 'pass') assert.strictEqual(testEvent.meta[TEST_NAME], 'jest-test-suite-visibility works') assert.strictEqual(testEvent.meta[TEST_SUITE], 'ci-visibility/jest-plugin-tests/jest-test-suite.js') - assert.ok(testEvent.meta[TEST_COMMAND] != null) - assert.ok(testEvent.meta[TEST_MODULE] != null) - assert.ok(testEvent[TEST_SUITE_ID] != null) - assert.ok(testEvent[TEST_SESSION_ID] != null) - assert.ok(testEvent[TEST_MODULE_ID] != null) + assert.ok(testEvent.meta[TEST_COMMAND]) + assert.ok(testEvent.meta[TEST_MODULE]) + assert.ok(testEvent[TEST_SUITE_ID]) + assert.ok(testEvent[TEST_SESSION_ID]) + assert.ok(testEvent[TEST_MODULE_ID]) }) childProcess = exec( @@ -823,7 +823,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { env: { DD_TRACE_AGENT_PORT: receiver.port, NODE_OPTIONS: '-r dd-trace/ci/init', - RUN_IN_PARALLEL: true + RUN_IN_PARALLEL: 'true', }, stdio: 'pipe' }) @@ -844,7 +844,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { cwd, env: { ...getCiVisAgentlessConfig(receiver.port), - RUN_IN_PARALLEL: true, + RUN_IN_PARALLEL: 'true', DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' @@ -874,7 +874,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { cwd, env: { ...getCiVisEvpProxyConfig(receiver.port), - RUN_IN_PARALLEL: true + RUN_IN_PARALLEL: 'true', }, stdio: 'pipe' }) @@ -915,7 +915,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` - assert.ok(retriedTest.meta[snapshotIdKey] != null) + assert.ok(retriedTest.meta[snapshotIdKey]) snapshotIdByTest = retriedTest.meta[snapshotIdKey] spanIdByTest = retriedTest.span_id.toString() @@ -946,7 +946,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { ...getCiVisAgentlessConfig(receiver.port), TESTS_TO_RUN: 'dynamic-instrumentation/test-', DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1', - RUN_IN_PARALLEL: true + RUN_IN_PARALLEL: 'true', }, stdio: 'inherit' } @@ -969,7 +969,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { env: { ...getCiVisAgentlessConfig(receiver.port), NODE_OPTIONS: '-r dd-trace/ci/init', - RUN_IN_PARALLEL: true, + RUN_IN_PARALLEL: 'true', TESTS_TO_RUN: 'timeout-test/timeout-test.js' }, stdio: 'pipe' @@ -1051,8 +1051,8 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { env: { ...getCiVisAgentlessConfig(receiver.port), TESTS_TO_RUN: 'jest-bad-import/jest-bad-import-test', - RUN_IN_PARALLEL: true, - WAIT_FOR_UNHANDLED_REJECTIONS: true + RUN_IN_PARALLEL: 'true', + WAIT_FOR_UNHANDLED_REJECTIONS: 'true' }, stdio: 'inherit' }) @@ -1103,7 +1103,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { env: { ...getCiVisAgentlessConfig(receiver.port), TESTS_TO_RUN: 'jest-bad-import-torn-down/jest-bad-import-test', - RUN_IN_PARALLEL: true, + RUN_IN_PARALLEL: 'true', }, stdio: 'inherit' }) @@ -1146,7 +1146,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { receiver.assertPayloadReceived(({ payload }) => { const testSession = payload.events.find(event => event.type === 'test_session_end').content - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) }, ({ url }) => url === '/api/v2/citestcycle').then(() => done()).catch(done) childProcess = exec( @@ -1170,8 +1170,8 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { const testSuites = events.filter(event => event.type === 'test_suite_end') const tests = events.filter(event => event.type === 'test') - assert.ok(testSession != null) - assert.ok(testModule != null) + assert.ok(testSession) + assert.ok(testModule) assert.strictEqual(testSuites.length, 2) assert.strictEqual(tests.length, 2) }) @@ -1301,7 +1301,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { childProcess = fork(startupTestFile, { cwd, env: { - DD_CIVISIBILITY_AGENTLESS_ENABLED: 1, + DD_CIVISIBILITY_AGENTLESS_ENABLED: '1', NODE_OPTIONS: '-r dd-trace/ci/init' }, stdio: 'pipe' @@ -1431,11 +1431,11 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assertObjectContains(allCoverageFiles.sort(), expectedCoverageFiles.sort()) const [coveragePayload] = codeCovRequest.payload - assert.ok(coveragePayload.content.coverages[0].test_session_id != null) - assert.ok(coveragePayload.content.coverages[0].test_suite_id != null) + assert.ok(coveragePayload.content.coverages[0].test_session_id) + assert.ok(coveragePayload.content.coverages[0].test_suite_id) const testSession = eventsRequest.payload.events.find(event => event.type === 'test_session_end').content - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const eventTypes = eventsRequest.payload.events.map(event => event.type) assertObjectContains(eventTypes, ['test', 'test_suite_end', 'test_session_end', 'test_module_end']) @@ -1482,7 +1482,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(testSession.meta[TEST_ITR_TESTS_SKIPPED], 'false') assert.strictEqual(testSession.meta[TEST_CODE_COVERAGE_ENABLED], 'false') assert.strictEqual(testSession.meta[TEST_ITR_SKIPPING_ENABLED], 'false') - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const testModule = payload.events.find(event => event.type === 'test_module_end').content assert.strictEqual(testModule.meta[TEST_ITR_TESTS_SKIPPED], 'false') assert.strictEqual(testModule.meta[TEST_CODE_COVERAGE_ENABLED], 'false') @@ -3519,7 +3519,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` - assert.ok(retriedTest.meta[snapshotIdKey] != null) + assert.ok(retriedTest.meta[snapshotIdKey]) snapshotIdByTest = retriedTest.meta[snapshotIdKey] spanIdByTest = retriedTest.span_id.toString() @@ -3603,7 +3603,7 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { assert.strictEqual(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 6) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` - assert.ok(retriedTest.meta[snapshotIdKey] != null) + assert.ok(retriedTest.meta[snapshotIdKey]) snapshotIdByTest = retriedTest.meta[snapshotIdKey] spanIdByTest = retriedTest.span_id.toString() diff --git a/integration-tests/mocha/mocha.spec.js b/integration-tests/mocha/mocha.spec.js index af50f45e597..96ece91518d 100644 --- a/integration-tests/mocha/mocha.spec.js +++ b/integration-tests/mocha/mocha.spec.js @@ -149,7 +149,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { testSpans.forEach(testSpan => { assert.strictEqual(testSpan.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) - assert.ok(testSpan.metrics[TEST_SOURCE_START] != null) + assert.ok(testSpan.metrics[TEST_SOURCE_START]) }) done() @@ -212,20 +212,20 @@ describe(`mocha@${MOCHA_VERSION}`, function () { ] ) assert.strictEqual(suites.length, 2) - assert.ok(sessionEventContent != null) - assert.ok(moduleEventContent != null) + assert.ok(sessionEventContent) + assert.ok(moduleEventContent) tests.forEach(testEvent => { assert.strictEqual( testEvent.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true ) - assert.ok(testEvent.metrics[TEST_SOURCE_START] != null) + assert.ok(testEvent.metrics[TEST_SOURCE_START]) assert.strictEqual(testEvent.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'false') // Can read DD_TAGS assert.strictEqual(testEvent.meta['test.customtag'], 'customvalue') assert.strictEqual(testEvent.meta['test.customtag2'], 'customvalue2') - assert.ok(testEvent.metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(testEvent.metrics[DD_HOST_CPU_COUNT]) }) suites.forEach(testSuite => { @@ -234,7 +234,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { true ) assert.strictEqual(testSuite.metrics[TEST_SOURCE_START], 1) - assert.ok(testSuite.metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(testSuite.metrics[DD_HOST_CPU_COUNT]) }) }) @@ -285,7 +285,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.parent_id.toString(), '0') assert.strictEqual(test.meta[TEST_STATUS], 'pass') assert.strictEqual(test.meta[ORIGIN_KEY], CI_APP_ORIGIN) - assert.ok(test.meta[TEST_FRAMEWORK_VERSION] != null) + assert.ok(test.meta[TEST_FRAMEWORK_VERSION]) assert.strictEqual(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) assert.strictEqual(test.meta[LIBRARY_VERSION], ddTraceVersion) assert.strictEqual(test.meta[COMPONENT], 'mocha') @@ -324,8 +324,8 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/failing.js') assert.strictEqual(test.meta[ERROR_TYPE], 'AssertionError') assert.strictEqual(test.meta[ERROR_MESSAGE], 'Expected values to be strictly equal:\n\ntrue !== false\n') - assert.ok(test.metrics[TEST_SOURCE_START] != null) - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.metrics[TEST_SOURCE_START]) + assert.ok(test.meta[ERROR_STACK]) assert.strictEqual(test.parent_id.toString(), '0') assert.strictEqual(test.type, 'test') assert.strictEqual(test.name, 'mocha.test') @@ -435,7 +435,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/done-fail.js') assert.strictEqual(test.meta[ERROR_TYPE], 'AssertionError') assert.strictEqual(test.meta[ERROR_MESSAGE], 'Expected values to be strictly equal:\n\ntrue !== false\n') - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) }) childProcess = exec( @@ -500,7 +500,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/promise-fail.js') assert.strictEqual(test.meta[ERROR_TYPE], 'AssertionError') assert.strictEqual(test.meta[ERROR_MESSAGE], 'Expected values to be strictly equal:\n\ntrue !== false\n') - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) }) childProcess = exec( @@ -565,7 +565,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/async-fail.js') assert.strictEqual(test.meta[ERROR_TYPE], 'AssertionError') assert.strictEqual(test.meta[ERROR_MESSAGE], 'Expected values to be strictly equal:\n\ntrue !== false\n') - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) }) childProcess = exec( @@ -599,7 +599,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/timeout-fail.js') assert.strictEqual(test.meta[ERROR_TYPE], 'Error') assert.match(test.meta[ERROR_MESSAGE], /Timeout/) - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) }) childProcess = exec( @@ -663,7 +663,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_SUITE], 'ci-visibility/mocha-plugin-tests/parameterized.js') assert.strictEqual(test.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/parameterized.js') assert.strictEqual(test.meta[TEST_PARAMETERS], JSON.stringify({ arguments: [1, 2, 3], metadata: {} })) - assert.ok(test.metrics[TEST_SOURCE_START] != null) + assert.ok(test.metrics[TEST_SOURCE_START]) assert.strictEqual(test.parent_id.toString(), '0') assert.strictEqual(test.type, 'test') assert.strictEqual(test.name, 'mocha.test') @@ -695,7 +695,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { const [testSpan] = tests const httpSpan = spans.find(span => span.name === 'http.request') - assert.ok(httpSpan != null) + assert.ok(httpSpan) // Test span assertions assert.strictEqual(testSpan.meta[COMPONENT], 'mocha') @@ -705,7 +705,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(testSpan.meta[TEST_SUITE], 'ci-visibility/mocha-plugin-tests/integration.js') assert.strictEqual(testSpan.meta[TEST_SOURCE_FILE], 'ci-visibility/mocha-plugin-tests/integration.js') assert.strictEqual(testSpan.meta[ORIGIN_KEY], CI_APP_ORIGIN) - assert.ok(testSpan.metrics[TEST_SOURCE_START] != null) + assert.ok(testSpan.metrics[TEST_SOURCE_START]) assert.strictEqual(testSpan.parent_id.toString(), '0') // HTTP span assertions @@ -747,7 +747,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { .includes('mocha-fail-hook-sync "before each" hook for "will not run but be reported as failed":') ) assert.match(test.meta[ERROR_MESSAGE], /Cannot set /) - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) }) childProcess = exec( @@ -834,13 +834,13 @@ describe(`mocha@${MOCHA_VERSION}`, function () { testNames.forEach(({ name, status, errorMsg }) => { const test = tests.find(t => t.meta[TEST_NAME] === name) - assert.ok(test != null) + assert.ok(test) assert.strictEqual(test.meta[TEST_STATUS], status) assert.strictEqual(test.meta[COMPONENT], 'mocha') if (errorMsg) { assert.strictEqual(test.meta[ERROR_MESSAGE].startsWith(errorMsg), true) assert.strictEqual(test.meta[ERROR_TYPE], 'Error') - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) } }) }) @@ -872,7 +872,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(test.meta[TEST_STATUS], 'fail') assert.strictEqual(test.meta[ERROR_TYPE], 'AssertionError') assert.strictEqual(test.meta[ERROR_MESSAGE], 'Expected values to be strictly equal:\n\ntrue !== false\n') - assert.ok(test.meta[ERROR_STACK] != null) + assert.ok(test.meta[ERROR_STACK]) }) childProcess = exec( @@ -953,7 +953,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { testNames.forEach(({ name, status }) => { const test = tests.find(t => t.meta[TEST_NAME] === name) - assert.ok(test != null) + assert.ok(test) assert.strictEqual(test.meta[TEST_STATUS], status) assert.strictEqual(test.meta[COMPONENT], 'mocha') }) @@ -1001,8 +1001,8 @@ describe(`mocha@${MOCHA_VERSION}`, function () { const testModuleEvent = events.find(event => event.type === 'test_module_end')?.content const testSuiteEvents = events.filter(event => event.type === 'test_suite_end').map(e => e.content) - assert.ok(testSessionEvent != null) - assert.ok(testModuleEvent != null) + assert.ok(testSessionEvent) + assert.ok(testModuleEvent) assert.strictEqual(testSuiteEvents.length, 4, 'Should have 4 test suite events') assert.strictEqual(testSessionEvent.meta[TEST_STATUS], 'fail') @@ -1254,9 +1254,9 @@ describe(`mocha@${MOCHA_VERSION}`, function () { test_module_id: testModuleId, test_session_id: testSessionId }) => { - assert.ok(meta[TEST_COMMAND] != null) - assert.ok(meta[TEST_MODULE] != null) - assert.ok(testSuiteId != null) + assert.ok(meta[TEST_COMMAND]) + assert.ok(meta[TEST_MODULE]) + assert.ok(testSuiteId) assert.strictEqual(testModuleId.toString(10), moduleEventContent.test_module_id.toString(10)) assert.strictEqual(testSessionId.toString(10), moduleEventContent.test_session_id.toString(10)) }) @@ -1268,13 +1268,13 @@ describe(`mocha@${MOCHA_VERSION}`, function () { test_module_id: testModuleId, test_session_id: testSessionId }) => { - assert.ok(meta[TEST_COMMAND] != null) - assert.ok(meta[TEST_MODULE] != null) - assert.ok(testSuiteId != null) + assert.ok(meta[TEST_COMMAND]) + assert.ok(meta[TEST_MODULE]) + assert.ok(testSuiteId) assert.strictEqual(testModuleId.toString(10), moduleEventContent.test_module_id.toString(10)) assert.strictEqual(testSessionId.toString(10), moduleEventContent.test_session_id.toString(10)) assert.strictEqual(meta[MOCHA_IS_PARALLEL], 'true') - assert.ok(metrics[TEST_SOURCE_START] != null) + assert.ok(metrics[TEST_SOURCE_START]) }) }) @@ -1531,11 +1531,11 @@ describe(`mocha@${MOCHA_VERSION}`, function () { ) const [coveragePayload] = codeCovRequest.payload - assert.ok(coveragePayload.content.coverages[0].test_session_id != null) - assert.ok(coveragePayload.content.coverages[0].test_suite_id != null) + assert.ok(coveragePayload.content.coverages[0].test_session_id) + assert.ok(coveragePayload.content.coverages[0].test_suite_id) const testSession = eventsRequest.payload.events.find(event => event.type === 'test_session_end').content - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const eventTypes = eventsRequest.payload.events.map(event => event.type) assertObjectContains(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end']) @@ -1583,7 +1583,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { assert.strictEqual(testSession.meta[TEST_ITR_TESTS_SKIPPED], 'false') assert.strictEqual(testSession.meta[TEST_CODE_COVERAGE_ENABLED], 'false') assert.strictEqual(testSession.meta[TEST_ITR_SKIPPING_ENABLED], 'false') - assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] != null) + assert.ok(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const testModule = payload.events.find(event => event.type === 'test_module_end').content assert.strictEqual(testModule.meta[TEST_ITR_TESTS_SKIPPED], 'false') assert.strictEqual(testModule.meta[TEST_CODE_COVERAGE_ENABLED], 'false') @@ -3302,7 +3302,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` - assert.ok(retriedTest.meta[snapshotIdKey] != null) + assert.ok(retriedTest.meta[snapshotIdKey]) snapshotIdByTest = retriedTest.meta[snapshotIdKey] spanIdByTest = retriedTest.span_id.toString() diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index f49b6a34185..9cddc3d8359 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -1,13 +1,12 @@ 'use strict' +const assert = require('node:assert') const { once } = require('node:events') const { exec, execSync } = require('child_process') const satisfies = require('semifies') const path = require('path') const fs = require('fs') -const { assert } = require('chai') - const { sandboxCwd, useSandbox, @@ -145,7 +144,7 @@ versions.forEach((version) => { metadataDicts.forEach(metadata => { for (const testLevel of TEST_LEVEL_EVENT_TYPES) { - assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + assert.strictEqual(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') } }) @@ -159,14 +158,14 @@ versions.forEach((version) => { const stepEvents = events.filter(event => event.type === 'span') assert.ok(testSessionEvent.content.resource.includes('test_session.playwright test')) - assert.equal(testSessionEvent.content.meta[TEST_STATUS], 'fail') + assert.strictEqual(testSessionEvent.content.meta[TEST_STATUS], 'fail') assert.ok(testModuleEvent.content.resource.includes('test_module.playwright test')) - assert.equal(testModuleEvent.content.meta[TEST_STATUS], 'fail') - assert.equal(testSessionEvent.content.meta[TEST_TYPE], 'browser') - assert.equal(testModuleEvent.content.meta[TEST_TYPE], 'browser') + assert.strictEqual(testModuleEvent.content.meta[TEST_STATUS], 'fail') + assert.strictEqual(testSessionEvent.content.meta[TEST_TYPE], 'browser') + assert.strictEqual(testModuleEvent.content.meta[TEST_TYPE], 'browser') - assert.exists(testSessionEvent.content.meta[ERROR_MESSAGE]) - assert.exists(testModuleEvent.content.meta[ERROR_MESSAGE]) + assert.strictEqual(typeof testSessionEvent.content.meta[ERROR_MESSAGE], 'string') + assert.strictEqual(typeof testModuleEvent.content.meta[ERROR_MESSAGE], 'string') assert.deepStrictEqual(testSuiteEvents.map(suite => suite.content.resource).sort(), [ 'test_suite.landing-page-test.js', @@ -182,11 +181,11 @@ versions.forEach((version) => { testSuiteEvents.forEach(testSuiteEvent => { if (testSuiteEvent.content.meta[TEST_STATUS] === 'fail') { - assert.exists(testSuiteEvent.content.meta[ERROR_MESSAGE]) + assert.ok(testSuiteEvent.content.meta[ERROR_MESSAGE]) } assert.ok(testSuiteEvent.content.meta[TEST_SOURCE_FILE].endsWith('-test.js')) - assert.equal(testSuiteEvent.content.metrics[TEST_SOURCE_START], 1) - assert.exists(testSuiteEvent.content.metrics[DD_HOST_CPU_COUNT]) + assert.strictEqual(testSuiteEvent.content.metrics[TEST_SOURCE_START], 1) + assert.ok(testSuiteEvent.content.metrics[DD_HOST_CPU_COUNT]) }) assert.deepStrictEqual(testEvents.map(test => test.content.resource).sort(), [ @@ -210,11 +209,12 @@ versions.forEach((version) => { ]) testEvents.forEach(testEvent => { - assert.exists(testEvent.content.metrics[TEST_SOURCE_START]) - assert.equal( - testEvent.content.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/playwright-tests/'), true + assert.ok(testEvent.content.metrics[TEST_SOURCE_START]) + assert.strictEqual( + testEvent.content.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/playwright-tests/'), + true ) - assert.equal(testEvent.content.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'false') + assert.strictEqual(testEvent.content.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'false') // Can read DD_TAGS assertObjectContains(testEvent.content.meta, { 'test.customtag': 'customvalue', @@ -223,7 +223,7 @@ versions.forEach((version) => { [TEST_BROWSER_NAME]: 'chromium', [TEST_PARAMETERS]: JSON.stringify({ arguments: { browser: 'chromium' }, metadata: {} }) }) - assert.exists(testEvent.content.metrics[DD_HOST_CPU_COUNT]) + assert.ok(testEvent.content.metrics[DD_HOST_CPU_COUNT]) if (version === 'latest' || satisfies(version, '>=1.38.0')) { if (testEvent.content.meta[TEST_STATUS] !== 'skip' && testEvent.content.meta[TEST_SUITE].includes('landing-page-test.js')) { @@ -241,7 +241,7 @@ versions.forEach((version) => { }) stepEvents.forEach(stepEvent => { - assert.equal(stepEvent.content.name, 'playwright.step') + assert.strictEqual(stepEvent.content.name, 'playwright.step') assert.ok(Object.hasOwn(stepEvent.content.meta, 'playwright.step')) }) const annotatedTest = testEvents.find(test => @@ -291,7 +291,7 @@ versions.forEach((version) => { ]) assert.match(testOutput, /1 passed/) assert.match(testOutput, /1 skipped/) - assert.notInclude(testOutput, 'TypeError') + assert.doesNotMatch(testOutput, /TypeError/) }, 25000).then(() => done()).catch(done) childProcess = exec( @@ -328,7 +328,7 @@ versions.forEach((version) => { assertObjectContains(testSessionEvent.meta, { [TEST_STATUS]: 'fail' }) - assert.exists(testSuiteEvent.meta[ERROR_MESSAGE]) + assert.ok(testSuiteEvent.meta[ERROR_MESSAGE]) assert.match(testSessionEvent.meta[ERROR_MESSAGE], /Test suites failed: 1/) }).then(() => done()).catch(done) @@ -340,7 +340,7 @@ versions.forEach((version) => { ...getCiVisAgentlessConfig(receiver.port), PW_BASE_URL: `http://localhost:${webAppPort}`, TEST_DIR: './ci-visibility/playwright-tests-error', - TEST_TIMEOUT: 3000 + TEST_TIMEOUT: '3000' }, stdio: 'pipe' } @@ -399,7 +399,7 @@ versions.forEach((version) => { [TEST_IS_NEW]: 'true' }) }) - assert.equal( + assert.strictEqual( newPassingTests.length, NUM_RETRIES_EFD + 1, 'passing test has not been retried the correct number of times' @@ -412,7 +412,7 @@ versions.forEach((version) => { [TEST_IS_NEW]: 'true' }) }) - assert.equal( + assert.strictEqual( newAnnotatedTests.length, NUM_RETRIES_EFD + 1, 'annotated test has not been retried the correct number of times' @@ -420,7 +420,7 @@ versions.forEach((version) => { // The only new tests are the passing and annotated tests const totalNewTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') - assert.equal( + assert.strictEqual( totalNewTests.length, newPassingTests.length + newAnnotatedTests.length, 'total new tests is not the sum of the passing and annotated tests' @@ -428,12 +428,12 @@ versions.forEach((version) => { // The only retried tests are the passing and annotated tests const totalRetriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal( + assert.strictEqual( totalRetriedTests.length, newPassingTests.length - 1 + newAnnotatedTests.length - 1, 'total retried tests is not the sum of the passing and annotated tests' ) - assert.equal( + assert.strictEqual( totalRetriedTests.length, NUM_RETRIES_EFD * 2, 'total retried tests is not the correct number of times' @@ -446,7 +446,7 @@ versions.forEach((version) => { }) // all but one has been retried - assert.equal(totalRetriedTests.length, totalNewTests.length - 2) + assert.strictEqual(totalRetriedTests.length, totalNewTests.length - 2) }) childProcess = exec( @@ -515,7 +515,7 @@ versions.forEach((version) => { }) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -579,7 +579,7 @@ versions.forEach((version) => { test.resource.endsWith('should work with fixme') ) // no retries - assert.equal(newTests.length, 2) + assert.strictEqual(newTests.length, 2) newTests.forEach(test => { assertObjectContains(test.meta, { [TEST_IS_NEW]: 'true' @@ -588,7 +588,7 @@ versions.forEach((version) => { const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -629,15 +629,15 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 7) + assert.strictEqual(tests.length, 7) const testSession = events.find(event => event.type === 'test_session_end').content assert.ok(!(TEST_EARLY_FLAKE_ENABLED in testSession.meta)) const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') - assert.equal(newTests.length, 0) + assert.strictEqual(newTests.length, 0) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -707,7 +707,7 @@ versions.forEach((version) => { }) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -763,7 +763,7 @@ versions.forEach((version) => { }) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -824,9 +824,9 @@ versions.forEach((version) => { }) const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') - assert.equal(newTests.length, 0) + assert.strictEqual(newTests.length, 0) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) ]) }) @@ -868,39 +868,39 @@ versions.forEach((version) => { .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { const events = payloads.flatMap(({ payload }) => payload.events) const testSession = events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.strictEqual(testSession.meta[TEST_EARLY_FLAKE_ENABLED], 'true') const tests = events.filter(event => event.type === 'test').map(event => event.content) const newTests = tests.filter( test => test.meta[TEST_NAME] === 'playwright should not retry new tests' ) - assert.equal(newTests.length, NUM_RETRIES_EFD + 1) + assert.strictEqual(newTests.length, NUM_RETRIES_EFD + 1) newTests.forEach(test => { // tests always fail because ATR and --retries are disabled for EFD, // so testInfo.retry is always 0 - assert.propertyVal(test.meta, TEST_STATUS, 'fail') - assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + assert.strictEqual(test.meta[TEST_STATUS], 'fail') + assert.strictEqual(test.meta[TEST_IS_NEW], 'true') }) const retriedNewTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedNewTests.length, NUM_RETRIES_EFD) + assert.strictEqual(retriedNewTests.length, NUM_RETRIES_EFD) retriedNewTests.forEach(test => { - assert.propertyVal(test.meta, TEST_RETRY_REASON, TEST_RETRY_REASON_TYPES.efd) - assert.propertyVal(test.meta, TEST_STATUS, 'fail') + assert.strictEqual(test.meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.efd) + assert.strictEqual(test.meta[TEST_STATUS], 'fail') }) // --retries works normally for old flaky tests const oldFlakyTests = tests.filter( test => test.meta[TEST_NAME] === 'playwright should retry old flaky tests' ) - assert.equal(oldFlakyTests.length, 2) + assert.strictEqual(oldFlakyTests.length, 2) const passedFlakyTests = oldFlakyTests.filter(test => test.meta[TEST_STATUS] === 'pass') - assert.equal(passedFlakyTests.length, 1) - assert.propertyVal(passedFlakyTests[0].meta, TEST_IS_RETRY, 'true') - assert.propertyVal(passedFlakyTests[0].meta, TEST_RETRY_REASON, TEST_RETRY_REASON_TYPES.ext) + assert.strictEqual(passedFlakyTests.length, 1) + assert.strictEqual(passedFlakyTests[0].meta[TEST_IS_RETRY], 'true') + assert.strictEqual(passedFlakyTests[0].meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.ext) const failedFlakyTests = oldFlakyTests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(failedFlakyTests.length, 1) + assert.strictEqual(failedFlakyTests.length, 1) }) ]) }) @@ -943,37 +943,37 @@ versions.forEach((version) => { .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { const events = payloads.flatMap(({ payload }) => payload.events) const testSession = events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.strictEqual(testSession.meta[TEST_EARLY_FLAKE_ENABLED], 'true') const tests = events.filter(event => event.type === 'test').map(event => event.content) const newTests = tests.filter( test => test.meta[TEST_NAME] === 'playwright should not retry new tests' ) - assert.equal(newTests.length, NUM_RETRIES_EFD + 1) + assert.strictEqual(newTests.length, NUM_RETRIES_EFD + 1) newTests.forEach(test => { - assert.propertyVal(test.meta, TEST_STATUS, 'fail') - assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + assert.strictEqual(test.meta[TEST_STATUS], 'fail') + assert.strictEqual(test.meta[TEST_IS_NEW], 'true') }) const retriedNewTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedNewTests.length, NUM_RETRIES_EFD) + assert.strictEqual(retriedNewTests.length, NUM_RETRIES_EFD) retriedNewTests.forEach(test => { - assert.propertyVal(test.meta, TEST_RETRY_REASON, TEST_RETRY_REASON_TYPES.efd) - assert.propertyVal(test.meta, TEST_STATUS, 'fail') + assert.strictEqual(test.meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.efd) + assert.strictEqual(test.meta[TEST_STATUS], 'fail') }) // ATR works normally for old flaky tests const oldFlakyTests = tests.filter( test => test.meta[TEST_NAME] === 'playwright should retry old flaky tests' ) - assert.equal(oldFlakyTests.length, 2) + assert.strictEqual(oldFlakyTests.length, 2) const passedFlakyTests = oldFlakyTests.filter(test => test.meta[TEST_STATUS] === 'pass') - assert.equal(passedFlakyTests.length, 1) - assert.propertyVal(passedFlakyTests[0].meta, TEST_IS_RETRY, 'true') - assert.propertyVal(passedFlakyTests[0].meta, TEST_RETRY_REASON, TEST_RETRY_REASON_TYPES.atr) + assert.strictEqual(passedFlakyTests.length, 1) + assert.strictEqual(passedFlakyTests[0].meta[TEST_IS_RETRY], 'true') + assert.strictEqual(passedFlakyTests[0].meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.atr) const failedFlakyTests = oldFlakyTests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(failedFlakyTests.length, 1) + assert.strictEqual(failedFlakyTests.length, 1) }) ]) }) @@ -1023,20 +1023,20 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 3) + assert.strictEqual(tests.length, 3) const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(failedTests.length, 2) + assert.strictEqual(failedTests.length, 2) const failedRetryTests = failedTests.filter( test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr ) - assert.equal(failedRetryTests.length, 1) // the first one is not a retry + assert.strictEqual(failedRetryTests.length, 1) // the first one is not a retry const passedTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') - assert.equal(passedTests.length, 1) - assert.equal(passedTests[0].meta[TEST_IS_RETRY], 'true') - assert.equal(passedTests[0].meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.atr) + assert.strictEqual(passedTests.length, 1) + assert.strictEqual(passedTests[0].meta[TEST_IS_RETRY], 'true') + assert.strictEqual(passedTests[0].meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.atr) }, 30000) childProcess = exec( @@ -1075,8 +1075,8 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 1) - assert.equal(tests.filter( + assert.strictEqual(tests.length, 1) + assert.strictEqual(tests.filter( (test) => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr ).length, 0) }, 30000) @@ -1118,15 +1118,15 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 2) + assert.strictEqual(tests.length, 2) const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(failedTests.length, 2) + assert.strictEqual(failedTests.length, 2) const failedRetryTests = failedTests.filter( test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr ) - assert.equal(failedRetryTests.length, 1) + assert.strictEqual(failedRetryTests.length, 1) }, 30000) childProcess = exec( @@ -1159,9 +1159,9 @@ versions.forEach((version) => { const test = events.find(event => event.type === 'test').content const testSuite = events.find(event => event.type === 'test_suite_end').content // The test is in a subproject - assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) - assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) - assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.notStrictEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) + assert.strictEqual(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.strictEqual(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) }) childProcess = exec( @@ -1231,7 +1231,7 @@ versions.forEach((version) => { }) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -1259,7 +1259,7 @@ versions.forEach((version) => { const tests = events.filter(event => event.type === 'test').map(event => event.content) tests.forEach(test => { - assert.equal(test.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'true') + assert.strictEqual(test.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'true') }) }) @@ -1334,7 +1334,7 @@ versions.forEach((version) => { ) if (isDisabled) { - assert.equal(attemptedToFixTests.length, 2) + assert.strictEqual(attemptedToFixTests.length, 2) assert.ok(attemptedToFixTests.every(test => test.meta[TEST_MANAGEMENT_IS_DISABLED] === 'true' )) @@ -1343,9 +1343,9 @@ versions.forEach((version) => { } if (isAttemptingToFix) { - assert.equal(attemptedToFixTests.length, 2 * (ATTEMPT_TO_FIX_NUM_RETRIES + 1)) + assert.strictEqual(attemptedToFixTests.length, 2 * (ATTEMPT_TO_FIX_NUM_RETRIES + 1)) } else { - assert.equal(attemptedToFixTests.length, 2) + assert.strictEqual(attemptedToFixTests.length, 2) } if (isQuarantined) { @@ -1353,7 +1353,7 @@ versions.forEach((version) => { test.meta[TEST_MANAGEMENT_IS_QUARANTINED] === 'true' ).length // quarantined tests still run and are retried - assert.equal(numQuarantinedTests, 2 * (ATTEMPT_TO_FIX_NUM_RETRIES + 1)) + assert.strictEqual(numQuarantinedTests, 2 * (ATTEMPT_TO_FIX_NUM_RETRIES + 1)) } // Retried tests are in randomly order, so we just count number of tests @@ -1381,39 +1381,39 @@ versions.forEach((version) => { // One of the tests is passing always if (isAttemptingToFix) { - assert.equal(countAttemptToFixTests, 2 * (ATTEMPT_TO_FIX_NUM_RETRIES + 1)) - assert.equal(countRetriedAttemptToFixTests, 2 * ATTEMPT_TO_FIX_NUM_RETRIES) + assert.strictEqual(countAttemptToFixTests, 2 * (ATTEMPT_TO_FIX_NUM_RETRIES + 1)) + assert.strictEqual(countRetriedAttemptToFixTests, 2 * ATTEMPT_TO_FIX_NUM_RETRIES) if (shouldAlwaysPass) { - assert.equal(testsMarkedAsFailedAllRetries, 0) - assert.equal(testsMarkedAsFailed, 0) - assert.equal(testsMarkedAsPassedAllRetries, 2) + assert.strictEqual(testsMarkedAsFailedAllRetries, 0) + assert.strictEqual(testsMarkedAsFailed, 0) + assert.strictEqual(testsMarkedAsPassedAllRetries, 2) } else if (shouldFailSometimes) { // one test failed sometimes, the other always passed - assert.equal(testsMarkedAsFailedAllRetries, 0) - assert.equal(testsMarkedAsFailed, 1) - assert.equal(testsMarkedAsPassedAllRetries, 1) + assert.strictEqual(testsMarkedAsFailedAllRetries, 0) + assert.strictEqual(testsMarkedAsFailed, 1) + assert.strictEqual(testsMarkedAsPassedAllRetries, 1) } else { // one test failed always, the other always passed - assert.equal(testsMarkedAsFailedAllRetries, 1) - assert.equal(testsMarkedAsFailed, 1) - assert.equal(testsMarkedAsPassedAllRetries, 1) + assert.strictEqual(testsMarkedAsFailedAllRetries, 1) + assert.strictEqual(testsMarkedAsFailed, 1) + assert.strictEqual(testsMarkedAsPassedAllRetries, 1) } } else { - assert.equal(countAttemptToFixTests, 0) - assert.equal(countRetriedAttemptToFixTests, 0) - assert.equal(testsMarkedAsFailedAllRetries, 0) - assert.equal(testsMarkedAsPassedAllRetries, 0) + assert.strictEqual(countAttemptToFixTests, 0) + assert.strictEqual(countRetriedAttemptToFixTests, 0) + assert.strictEqual(testsMarkedAsFailedAllRetries, 0) + assert.strictEqual(testsMarkedAsPassedAllRetries, 0) } if (shouldIncludeFlakyTest) { const flakyTests = tests.filter( test => test.meta[TEST_NAME] === 'flaky test is retried without attempt to fix' ) // it passes at the second attempt - assert.equal(flakyTests.length, 2) + assert.strictEqual(flakyTests.length, 2) const passedFlakyTest = flakyTests.filter(test => test.meta[TEST_STATUS] === 'pass') const failedFlakyTest = flakyTests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(passedFlakyTest.length, 1) - assert.equal(failedFlakyTest.length, 1) + assert.strictEqual(passedFlakyTest.length, 1) + assert.strictEqual(failedFlakyTest.length, 1) } }, 30000) @@ -1460,9 +1460,9 @@ versions.forEach((version) => { if (isQuarantined || isDisabled || shouldAlwaysPass) { // even though a test fails, the exit code is 0 because the test is quarantined - assert.equal(exitCode, 0) + assert.strictEqual(exitCode, 0) } else { - assert.equal(exitCode, 1) + assert.strictEqual(exitCode, 1) } } @@ -1652,19 +1652,19 @@ versions.forEach((version) => { } const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 8) + assert.strictEqual(tests.length, 8) const disabledTests = tests.filter(test => test.meta[TEST_NAME] === 'disable should disable test') - assert.equal(disabledTests.length, 2) + assert.strictEqual(disabledTests.length, 2) disabledTests.forEach(test => { if (isDisabling) { - assert.equal(test.meta[TEST_STATUS], 'skip') + assert.strictEqual(test.meta[TEST_STATUS], 'skip') assertObjectContains(test.meta, { [TEST_MANAGEMENT_IS_DISABLED]: 'true' }) } else { - assert.equal(test.meta[TEST_STATUS], 'fail') + assert.strictEqual(test.meta[TEST_STATUS], 'fail') assert.ok(!(TEST_MANAGEMENT_IS_DISABLED in test.meta)) } }) @@ -1703,11 +1703,11 @@ versions.forEach((version) => { // the testOutput checks whether the test is actually skipped if (isDisabling) { - assert.notMatch(testOutput, /SHOULD NOT BE EXECUTED/) - assert.equal(exitCode, 0) + assert.doesNotMatch(testOutput, /SHOULD NOT BE EXECUTED/) + assert.strictEqual(exitCode, 0) } else { assert.match(testOutput, /SHOULD NOT BE EXECUTED/) - assert.equal(exitCode, 1) + assert.strictEqual(exitCode, 1) } } @@ -1770,27 +1770,27 @@ versions.forEach((version) => { ) quarantinedTests.forEach(test => { - assert.equal(test.meta[TEST_STATUS], 'fail') + assert.strictEqual(test.meta[TEST_STATUS], 'fail') }) if (hasFlakyTests) { - assert.equal(flakyTests.length, 2) // first attempt fails, second attempt passes - assert.equal(quarantinedTests.length, 2) // both fail + assert.strictEqual(flakyTests.length, 2) // first attempt fails, second attempt passes + assert.strictEqual(quarantinedTests.length, 2) // both fail assert.ok(!(TEST_MANAGEMENT_IS_QUARANTINED in flakyTests[0].meta)) assert.ok(!(TEST_MANAGEMENT_IS_QUARANTINED in flakyTests[1].meta)) const failedFlakyTest = flakyTests.filter(test => test.meta[TEST_STATUS] === 'fail') const passedFlakyTest = flakyTests.filter(test => test.meta[TEST_STATUS] === 'pass') - assert.equal(failedFlakyTest.length, 1) - assert.equal(passedFlakyTest.length, 1) + assert.strictEqual(failedFlakyTest.length, 1) + assert.strictEqual(passedFlakyTest.length, 1) } if (isQuarantining) { if (hasFlakyTests) { - assert.equal(quarantinedTests[1].meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') + assert.strictEqual(quarantinedTests[1].meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') } else { - assert.equal(quarantinedTests.length, 1) + assert.strictEqual(quarantinedTests.length, 1) } - assert.equal(quarantinedTests[0].meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') + assert.strictEqual(quarantinedTests[0].meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') assertObjectContains(testSession.meta, { [TEST_MANAGEMENT_ENABLED]: 'true' }) @@ -1798,7 +1798,7 @@ versions.forEach((version) => { if (hasFlakyTests) { assert.ok(!(TEST_MANAGEMENT_IS_QUARANTINED in quarantinedTests[1].meta)) } else { - assert.equal(quarantinedTests.length, 1) + assert.strictEqual(quarantinedTests.length, 1) } assert.ok(!(TEST_MANAGEMENT_IS_QUARANTINED in quarantinedTests[0].meta)) assert.ok(!(TEST_MANAGEMENT_ENABLED in testSession.meta)) @@ -1833,9 +1833,9 @@ versions.forEach((version) => { ]) if (isQuarantining) { - assert.equal(exitCode, 0) + assert.strictEqual(exitCode, 0) } else { - assert.equal(exitCode, 1) + assert.strictEqual(exitCode, 1) } } @@ -1897,9 +1897,9 @@ versions.forEach((version) => { assert.ok(!(TEST_MANAGEMENT_ENABLED in testSession.meta)) const tests = events.filter(event => event.type === 'test').map(event => event.content) // they are not retried - assert.equal(tests.length, 2) + assert.strictEqual(tests.length, 2) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 0) + assert.strictEqual(retriedTests.length, 0) }) childProcess = exec( @@ -1941,25 +1941,25 @@ versions.forEach((version) => { assert.ok(metadataDicts.length > 0) metadataDicts.forEach(metadata => { - assert.equal(metadata.test[DD_CAPABILITIES_TEST_IMPACT_ANALYSIS], undefined) - assert.equal(metadata.test[DD_CAPABILITIES_AUTO_TEST_RETRIES], '1') + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_IMPACT_ANALYSIS], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_AUTO_TEST_RETRIES], '1') if (satisfies(version, '>=1.38.0') || version === 'latest') { - assert.equal(metadata.test[DD_CAPABILITIES_EARLY_FLAKE_DETECTION], '1') - assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') - assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') + assert.strictEqual(metadata.test[DD_CAPABILITIES_EARLY_FLAKE_DETECTION], '1') + assert.strictEqual(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], '1') + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], '1') + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], '1') + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], '5') + assert.strictEqual(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], '1') } else { - assert.equal(metadata.test[DD_CAPABILITIES_EARLY_FLAKE_DETECTION], undefined) - assert.equal(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], undefined) - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], undefined) - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], undefined) - assert.equal(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], undefined) - assert.equal(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_EARLY_FLAKE_DETECTION], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_IMPACTED_TESTS], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_QUARANTINE], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_DISABLE], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_TEST_MANAGEMENT_ATTEMPT_TO_FIX], undefined) + assert.strictEqual(metadata.test[DD_CAPABILITIES_FAILED_TEST_REPLAY], undefined) } // capabilities logic does not overwrite test session name - assert.equal(metadata.test[TEST_SESSION_NAME], 'my-test-session-name') + assert.strictEqual(metadata.test[TEST_SESSION_NAME], 'my-test-session-name') }) }) @@ -1993,7 +1993,7 @@ versions.forEach((version) => { const test = events.find(event => event.type === 'test').content - assert.equal(test.meta['test.custom_tag'], 'this is custom') + assert.strictEqual(test.meta['test.custom_tag'], 'this is custom') }) childProcess = exec( @@ -2024,12 +2024,12 @@ versions.forEach((version) => { const customSpan = spans.find(span => span.name === 'my custom span') - assert.exists(customSpan) - assert.equal(customSpan.meta['test.really_custom_tag'], 'this is really custom') + assert.ok(customSpan) + assert.strictEqual(customSpan.meta['test.really_custom_tag'], 'this is really custom') // custom span is children of active test span - assert.equal(customSpan.trace_id.toString(), test.trace_id.toString()) - assert.equal(customSpan.parent_id.toString(), test.span_id.toString()) + assert.strictEqual(customSpan.trace_id.toString(), test.trace_id.toString()) + assert.strictEqual(customSpan.parent_id.toString(), test.span_id.toString()) }) childProcess = exec( @@ -2113,7 +2113,7 @@ versions.forEach((version) => { .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { const events = payloads.flatMap(({ payload }) => payload.events) const testSession = events.find(event => event.type === 'test_session_end').content - assert.equal(testSession.meta[TEST_STATUS], 'fail') + assert.strictEqual(testSession.meta[TEST_STATUS], 'fail') }) receiver.setSettings({ test_management: { enabled: true } }) @@ -2132,7 +2132,7 @@ versions.forEach((version) => { ) childProcess.on('exit', (exitCode) => { - assert.equal(exitCode, 1) + assert.strictEqual(exitCode, 1) receiverPromise.then(() => done()).catch(done) }) }) @@ -2211,9 +2211,9 @@ versions.forEach((version) => { test.meta[TEST_SOURCE_FILE] === 'ci-visibility/playwright-tests-impacted-tests/impacted-test.js') if (isEfd) { - assert.equal(impactedTests.length, (NUM_RETRIES_EFD + 1) * 2) // Retries + original test + assert.strictEqual(impactedTests.length, (NUM_RETRIES_EFD + 1) * 2) // Retries + original test } else { - assert.equal(impactedTests.length, 2) + assert.strictEqual(impactedTests.length, 2) } for (const impactedTest of impactedTests) { @@ -2237,7 +2237,7 @@ versions.forEach((version) => { const retriedTests = tests.filter( test => test.meta[TEST_IS_RETRY] === 'true' ) - assert.equal(retriedTests.length, NUM_RETRIES_EFD * 2) + assert.strictEqual(retriedTests.length, NUM_RETRIES_EFD * 2) let retriedTestNew = 0 let retriedTestsWithReason = 0 retriedTests.forEach(test => { @@ -2248,8 +2248,8 @@ versions.forEach((version) => { retriedTestsWithReason++ } }) - assert.equal(retriedTestNew, isNew ? NUM_RETRIES_EFD * 2 : 0) - assert.equal(retriedTestsWithReason, NUM_RETRIES_EFD * 2) + assert.strictEqual(retriedTestNew, isNew ? NUM_RETRIES_EFD * 2 : 0) + assert.strictEqual(retriedTestsWithReason, NUM_RETRIES_EFD * 2) } }, 25000) @@ -2333,7 +2333,7 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, NUM_RETRIES_EFD + 1) + assert.strictEqual(tests.length, NUM_RETRIES_EFD + 1) for (const test of tests) { assert.ok(!(TEST_MANAGEMENT_ATTEMPT_TO_FIX_PASSED in test.meta)) assert.ok(!(TEST_HAS_FAILED_ALL_RETRIES in test.meta)) @@ -2390,9 +2390,9 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) - assert.equal(testSuites.length, 2) + assert.strictEqual(testSuites.length, 2) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 3) + assert.strictEqual(tests.length, 3) const skippedTest = tests.find(test => test.meta[TEST_STATUS] === 'skip') assertObjectContains( @@ -2422,8 +2422,8 @@ versions.forEach((version) => { ...getCiVisAgentlessConfig(receiver.port), PW_BASE_URL: `http://localhost:${webAppPort}`, TEST_DIR: './ci-visibility/playwright-test-duration', - FULLY_PARALLEL: parallelism, - PLAYWRIGHT_WORKERS: 2 + FULLY_PARALLEL: String(parallelism), + PLAYWRIGHT_WORKERS: '2' }, stdio: 'pipe' } @@ -2443,7 +2443,7 @@ versions.forEach((version) => { .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 2) + assert.strictEqual(tests.length, 2) const failedTest = tests.find(test => test.meta[TEST_STATUS] === 'fail') assertObjectContains(failedTest.meta, { [TEST_NAME]: 'failing test fails and causes early bail' diff --git a/integration-tests/profiler/profiler.spec.js b/integration-tests/profiler/profiler.spec.js index 57ef866d247..0a2c2fd61c0 100644 --- a/integration-tests/profiler/profiler.spec.js +++ b/integration-tests/profiler/profiler.spec.js @@ -1,5 +1,6 @@ 'use strict' +const assert = require('node:assert/strict') const { FakeAgent, sandboxCwd, @@ -9,7 +10,7 @@ const { const childProcess = require('child_process') const { fork } = childProcess const path = require('path') -const { assert } = require('chai') + const fs = require('fs/promises') const fsync = require('fs') const net = require('net') @@ -55,7 +56,7 @@ function expectProfileMessagePromise (agent, timeout, const attachments = event.attachments assert.ok(Array.isArray(attachments)) // Profiler encodes the files with Promise.all, so their ordering is not guaranteed - assert.sameMembers(attachments, fileNames) + assert.deepStrictEqual(attachments.slice().sort(), fileNames.sort()) for (const [index, fileName] of attachments.entries()) { assertObjectContains(files[index + 1], { originalname: fileName @@ -372,7 +373,7 @@ describe('profiler', () => { // Must've counted the number of times each endpoint was hit const event = JSON.parse((await readLatestFile(cwd, /^event_.+\.json$/)).toString()) - assert.deepEqual(event.endpoint_counts, { 'endpoint-0': 1, 'endpoint-1': 1, 'endpoint-2': 1 }) + assert.deepStrictEqual(event.endpoint_counts, { 'endpoint-0': 1, 'endpoint-1': 1, 'endpoint-2': 1 }) const { profile, encoded } = await getLatestProfile(cwd, /^wall_.+\.pprof$/) @@ -424,13 +425,13 @@ describe('profiler', () => { // Timestamp must be defined and be between process start and end time assert.notStrictEqual(ts, undefined, encoded) assert.strictEqual(typeof osThreadId, 'number', encoded) - assert.equal(threadId, strings.dedup('0'), encoded) + assert.strictEqual(threadId, strings.dedup('0'), encoded) assert.ok(ts <= procEnd, encoded) assert.ok(ts >= procStart, encoded) // Thread name must be defined and exactly equal "Main Event Loop" - assert.equal(threadName, threadNameValue, encoded) + assert.strictEqual(threadName, threadNameValue, encoded) } else { - assert.equal(threadId, strings.dedup('NA'), encoded) + assert.strictEqual(threadId, strings.dedup('NA'), encoded) } // Either all or none of span-related labels are defined if (endpoint === undefined) { @@ -454,7 +455,7 @@ describe('profiler', () => { const existingSpanData = spans.get(spanId) if (existingSpanData) { // Span's root span and endpoint must be consistent across samples - assert.deepEqual(spanData, existingSpanData, encoded) + assert.deepStrictEqual(spanData, existingSpanData, encoded) } else { // New span id, store span data spans.set(spanId, spanData) @@ -474,18 +475,18 @@ describe('profiler', () => { } // Need to have a total of 9 different spans, with 3 different root spans // and 3 different endpoints. - assert.equal(spans.size, 9, encoded) - assert.equal(rootSpans.size, 3, encoded) - assert.equal(endpoints.size, 3, encoded) + assert.strictEqual(spans.size, 9, encoded) + assert.strictEqual(rootSpans.size, 3, encoded) + assert.strictEqual(endpoints.size, 3, encoded) }) it('fs timeline events work', async () => { const fsEvents = await gatherFilesystemTimelineEvents(cwd, 'profiler/fstest.js', agent.port) - assert.equal(fsEvents.length, 6) + assert.strictEqual(fsEvents.length, 6) const path = fsEvents[0].path const fd = fsEvents[1].fd assert(path.endsWith('tempfile.txt')) - assert.sameDeepMembers(fsEvents, [ + assertObjectContains(fsEvents, [ { flag: 'w', mode: '', operation: 'open', path }, { fd, operation: 'write' }, { fd, operation: 'close' }, @@ -497,12 +498,15 @@ describe('profiler', () => { it('dns timeline events work', async () => { const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', agent.port, 'dns') - assert.sameDeepMembers(dnsEvents, [ - { operation: 'lookup', host: 'example.org' }, - { operation: 'lookup', host: 'example.com' }, + const compare = (a, b) => { + return a.operation.localeCompare(b.operation) || (a.host?.localeCompare(b.host) ?? 0) + } + assertObjectContains(dnsEvents.sort(compare), [ { operation: 'lookup', host: 'datadoghq.com' }, + { operation: 'lookup', host: 'example.com' }, + { operation: 'lookup', host: 'example.org' }, + { operation: 'lookupService', address: '13.224.103.60', port: 80 }, { operation: 'queryA', host: 'datadoghq.com' }, - { operation: 'lookupService', address: '13.224.103.60', port: 80 } ]) }) @@ -537,7 +541,7 @@ describe('profiler', () => { const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', agent.port, 'net', args) // The profiled program should have two TCP connection events to the two // servers. - assert.sameDeepMembers(events, [ + assertObjectContains(events, [ { operation: 'connect', host: '127.0.0.1', port: port1 }, { operation: 'connect', host: '127.0.0.1', port: port2 } ]) @@ -698,10 +702,10 @@ describe('profiler', () => { const checkMetrics = agent.assertTelemetryReceived(({ _, payload }) => { const pp = payload.payload - assert.equal(pp.namespace, 'profilers') + assert.strictEqual(pp.namespace, 'profilers') const series = pp.series const requests = series.find(s => s.metric === 'profile_api.requests') - assert.equal(requests.type, 'count') + assert.strictEqual(requests.type, 'count') // There's a race between metrics and on-shutdown profile, so metric // value will be between 1 and 3 requestCount = requests.points[0][1] @@ -709,30 +713,30 @@ describe('profiler', () => { assert.ok(requestCount <= 3) const responses = series.find(s => s.metric === 'profile_api.responses') - assert.equal(responses.type, 'count') - assert.match(responses.tags, /status_code:200/) + assert.strictEqual(responses.type, 'count') + assert.deepStrictEqual(responses.tags, ['status_code:200']) // Same number of requests and responses - assert.equal(responses.points[0][1], requestCount) + assert.strictEqual(responses.points[0][1], requestCount) }, 'generate-metrics', timeout) const checkDistributions = agent.assertTelemetryReceived(({ _, payload }) => { const pp = payload.payload - assert.equal(pp.namespace, 'profilers') + assert.strictEqual(pp.namespace, 'profilers') const series = pp.series assert.strictEqual(series.length, 2) - assert.equal(series[0].metric, 'profile_api.bytes') - assert.equal(series[1].metric, 'profile_api.ms') + assert.strictEqual(series[0].metric, 'profile_api.bytes') + assert.strictEqual(series[1].metric, 'profile_api.ms') // Same number of points pointsCount = series[0].points.length - assert.equal(pointsCount, series[1].points.length) + assert.strictEqual(pointsCount, series[1].points.length) }, 'distributions', timeout) await Promise.all([checkProfiles(agent, proc, timeout), checkMetrics, checkDistributions]) // Same number of requests and points - assert.equal(requestCount, pointsCount) + assert.strictEqual(requestCount, pointsCount) }) it('sends wall profiler sample context telemetry', async function () { @@ -759,11 +763,11 @@ describe('profiler', () => { const checkMetrics = agent.assertTelemetryReceived(({ _, payload }) => { const pp = payload.payload - assert.equal(pp.namespace, 'profilers'); + assert.strictEqual(pp.namespace, 'profilers'); ['live', 'used'].forEach(metricName => { const sampleContexts = pp.series.find(s => s.metric === `wall.async_contexts_${metricName}`) assert.notStrictEqual(sampleContexts, undefined) - assert.equal(sampleContexts.type, 'gauge') + assert.strictEqual(sampleContexts.type, 'gauge') assert.ok(sampleContexts.points[0][1] >= 1) }) }, 'generate-metrics', timeout) diff --git a/integration-tests/remote_config.spec.js b/integration-tests/remote_config.spec.js index 50d478e8dd2..9c05cf9295a 100644 --- a/integration-tests/remote_config.spec.js +++ b/integration-tests/remote_config.spec.js @@ -42,7 +42,7 @@ describe('Remote config client id', () => { await axios.get('/') return agent.assertMessageReceived(({ payload }) => { - assert.ok(payload[0][0].meta['_dd.rc.client_id'] != null) + assert.ok(payload[0][0].meta['_dd.rc.client_id']) }) }) }) diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js index 160eb098ea3..11e7ce2b45b 100644 --- a/integration-tests/vitest/vitest.spec.js +++ b/integration-tests/vitest/vitest.spec.js @@ -217,7 +217,7 @@ versions.forEach((version) => { assert.strictEqual(test.content.meta[TEST_IS_TEST_FRAMEWORK_WORKER], 'true') } assert.strictEqual(test.content.meta[TEST_COMMAND], 'vitest run') - assert.ok(test.content.metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(test.content.metrics[DD_HOST_CPU_COUNT]) assert.strictEqual(test.content.meta[DD_TEST_IS_USER_PROVIDED_SERVICE], 'false') }) @@ -232,7 +232,7 @@ versions.forEach((version) => { true ) assert.strictEqual(testSuite.content.metrics[TEST_SOURCE_START], 1) - assert.ok(testSuite.content.metrics[DD_HOST_CPU_COUNT] != null) + assert.ok(testSuite.content.metrics[DD_HOST_CPU_COUNT]) }) }) ]) @@ -1277,7 +1277,7 @@ versions.forEach((version) => { assert.strictEqual(retriedTest.metrics[`${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_LINE_SUFFIX}`], 4) const snapshotIdKey = `${DI_DEBUG_ERROR_PREFIX}.0.${DI_DEBUG_ERROR_SNAPSHOT_ID_SUFFIX}` - assert.ok(retriedTest.meta[snapshotIdKey] != null) + assert.ok(retriedTest.meta[snapshotIdKey]) snapshotIdByTest = retriedTest.meta[snapshotIdKey] spanIdByTest = retriedTest.span_id.toString() diff --git a/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js b/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js index 99692597bb8..a1326b268a6 100644 --- a/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js +++ b/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js @@ -3,7 +3,7 @@ const assert = require('node:assert/strict') const axios = require('axios') -const { expect } = require('chai') + const { channel } = require('dc-polyfill') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -195,7 +195,7 @@ describe('express-mongo-sanitize', () => { const sanitizedObject = expressMongoSanitize.sanitize(objectToSanitize) assert.strictEqual(sanitizedObject.safeKey, objectToSanitize.safeKey) - expect(subscription).to.be.calledOnceWith({ sanitizedObject }) + sinon.assert.calledOnceWithMatch(subscription, { sanitizedObject }) }) it('it works as expected with modifications', () => { @@ -212,7 +212,7 @@ describe('express-mongo-sanitize', () => { assert.strictEqual(sanitizedObject.safeKey, objectToSanitize.safeKey) assert.strictEqual(sanitizedObject.unsafeKey.$ne, undefined) - expect(subscription).to.be.calledOnceWith({ sanitizedObject }) + sinon.assert.calledOnceWithMatch(subscription, { sanitizedObject }) }) }) }) diff --git a/packages/datadog-instrumentations/test/helpers/check-require-cache.spec.js b/packages/datadog-instrumentations/test/helpers/check-require-cache.spec.js index 64618c4d3eb..43e0c102a66 100644 --- a/packages/datadog-instrumentations/test/helpers/check-require-cache.spec.js +++ b/packages/datadog-instrumentations/test/helpers/check-require-cache.spec.js @@ -3,7 +3,6 @@ const assert = require('node:assert/strict') const { exec } = require('node:child_process') -const { expect } = require('chai') const { describe, it } = require('mocha') describe('check-require-cache', () => { @@ -17,7 +16,7 @@ describe('check-require-cache', () => { it('should be no warnings when tracer is loaded first', (done) => { exec(`${process.execPath} ./check-require-cache/good-order.js`, opts, (error, stdout, stderr) => { assert.strictEqual(error, null) - expect(stderr).to.not.include("Package 'express' was loaded") + assert.doesNotMatch(stderr, /Package 'express' was loaded/) done() }) }) diff --git a/packages/datadog-instrumentations/test/multer.spec.js b/packages/datadog-instrumentations/test/multer.spec.js index c767664fc92..8119a097388 100644 --- a/packages/datadog-instrumentations/test/multer.spec.js +++ b/packages/datadog-instrumentations/test/multer.spec.js @@ -3,7 +3,7 @@ const assert = require('node:assert/strict') const axios = require('axios') -const { expect } = require('chai') + const dc = require('dc-polyfill') const { after, before, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -52,7 +52,7 @@ withVersions('multer', 'multer', version => { it('should not abort the request by default', async () => { const res = await axios.post(`http://localhost:${port}/`, formData) - expect(middlewareProcessBodyStub).to.be.calledOnceWithExactly(formData.get('key')) + sinon.assert.calledOnceWithExactly(middlewareProcessBodyStub, formData.get('key')) assert.strictEqual(res.data, 'DONE') }) @@ -63,7 +63,7 @@ withVersions('multer', 'multer', version => { try { const res = await axios.post(`http://localhost:${port}/`, formData) - expect(middlewareProcessBodyStub).to.be.calledOnceWithExactly(formData.get('key')) + sinon.assert.calledOnceWithExactly(middlewareProcessBodyStub, formData.get('key')) assert.strictEqual(res.data, 'DONE') } finally { multerReadCh.unsubscribe(noop) @@ -104,7 +104,7 @@ withVersions('multer', 'multer', version => { assert.strictEqual(store.res, payload.res) assert.ok(Object.hasOwn(store, 'span')) - expect(middlewareProcessBodyStub).to.be.calledOnceWithExactly(formData.get('key')) + sinon.assert.calledOnceWithExactly(middlewareProcessBodyStub, formData.get('key')) assert.strictEqual(res.data, 'DONE') } finally { multerReadCh.unsubscribe(handler) diff --git a/packages/datadog-instrumentations/test/passport-http.spec.js b/packages/datadog-instrumentations/test/passport-http.spec.js index 8d8cd7121c6..11d0e631e90 100644 --- a/packages/datadog-instrumentations/test/passport-http.spec.js +++ b/packages/datadog-instrumentations/test/passport-http.spec.js @@ -2,7 +2,7 @@ const assert = require('node:assert/strict') const axios = require('axios').create({ validateStatus: null }) -const { expect } = require('chai') + const dc = require('dc-polyfill') const { after, before, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -131,7 +131,7 @@ withVersions('passport-http', 'passport-http', version => { assert.strictEqual(res.status, 200) assert.strictEqual(res.data, 'Granted') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-basic', login: 'test', user: { _id: 1, username: 'test', password: '1234', email: 'testuser@ddog.com' }, @@ -150,7 +150,7 @@ withVersions('passport-http', 'passport-http', version => { assert.strictEqual(res.status, 200) assert.strictEqual(res.data, 'Granted') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-basic', login: 'test', user: { _id: 1, username: 'test', password: '1234', email: 'testuser@ddog.com' }, @@ -169,7 +169,7 @@ withVersions('passport-http', 'passport-http', version => { assert.strictEqual(res.status, 200) assert.strictEqual(res.data, 'Denied') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-basic', login: 'test', user: false, @@ -193,7 +193,7 @@ withVersions('passport-http', 'passport-http', version => { assert.strictEqual(res.status, 403) assert.strictEqual(res.data, 'Blocked') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-basic', login: 'test', user: { _id: 1, username: 'test', password: '1234', email: 'testuser@ddog.com' }, diff --git a/packages/datadog-instrumentations/test/passport-local.spec.js b/packages/datadog-instrumentations/test/passport-local.spec.js index 77eeb8fe1b0..cf4bf77f5e3 100644 --- a/packages/datadog-instrumentations/test/passport-local.spec.js +++ b/packages/datadog-instrumentations/test/passport-local.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const dc = require('dc-polyfill') const { after, before, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -122,7 +121,7 @@ withVersions('passport-local', 'passport-local', version => { assert.strictEqual(res.status, 200) assert.strictEqual(res.data, 'Granted') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-local', login: 'test', user: { _id: 1, username: 'test', password: '1234', email: 'testuser@ddog.com' }, @@ -136,7 +135,7 @@ withVersions('passport-local', 'passport-local', version => { assert.strictEqual(res.status, 200) assert.strictEqual(res.data, 'Granted') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-local', login: 'test', user: { _id: 1, username: 'test', password: '1234', email: 'testuser@ddog.com' }, @@ -150,7 +149,7 @@ withVersions('passport-local', 'passport-local', version => { assert.strictEqual(res.status, 200) assert.strictEqual(res.data, 'Denied') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-local', login: 'test', user: false, @@ -169,7 +168,7 @@ withVersions('passport-local', 'passport-local', version => { assert.strictEqual(res.status, 403) assert.strictEqual(res.data, 'Blocked') - expect(subscriberStub).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(subscriberStub, { framework: 'passport-local', login: 'test', user: { _id: 1, username: 'test', password: '1234', email: 'testuser@ddog.com' }, diff --git a/packages/datadog-plugin-child_process/test/index.spec.js b/packages/datadog-plugin-child_process/test/index.spec.js index 13fca709a84..ec8e0b3c292 100644 --- a/packages/datadog-plugin-child_process/test/index.spec.js +++ b/packages/datadog-plugin-child_process/test/index.spec.js @@ -404,7 +404,7 @@ describe('Child process plugin', () => { const execFileAsync = util.promisify(childProcess.execFile) assert.strictEqual(global.Promise, Bluebird) - assert.ok(global.Promise.version != null) + assert.ok(global.Promise.version) const expectedPromise = expectSomeSpan(agent, { type: 'system', @@ -417,7 +417,7 @@ describe('Child process plugin', () => { }) const result = await execFileAsync('echo', ['bluebird-test']) - assert.ok(result != null) + assert.ok(result) assert.strictEqual(result.stdout, 'bluebird-test\n') return expectedPromise @@ -460,8 +460,8 @@ describe('Child process plugin', () => { await execFileAsync('node', ['-invalidFlag'], { stdio: 'pipe' }) throw new Error('Expected command to fail') } catch (error) { - assert.ok(error != null) - assert.ok(error.code != null) + assert.ok(error) + assert.ok(error.code) } return expectedPromise @@ -476,7 +476,7 @@ describe('Child process plugin', () => { const promise = execFileAsync('echo', ['util-promisify-test']) assert.strictEqual(promise.constructor, Bluebird) - assert.ok(promise.constructor.version != null) + assert.ok(promise.constructor.version) const result = await promise assert.strictEqual(result.stdout, 'util-promisify-test\n') diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js index 27cafbcdb2f..4ddf4d34638 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js @@ -1,6 +1,6 @@ 'use strict' -const { expect } = require('chai') +const assert = require('node:assert/strict') const { describe, it, beforeEach, afterEach } = require('mocha') const sinon = require('sinon') @@ -15,6 +15,7 @@ const { withVersions } = require('../../dd-trace/test/setup/mocha') const DataStreamsContext = require('../../dd-trace/src/datastreams/context') const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') +const { assertObjectContains } = require('../../../integration-tests/helpers') const getDsmPathwayHash = (testTopic, isProducer, parentHash) => { let edgeTags @@ -115,14 +116,14 @@ describe('Plugin', () => { const expectedSpanPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] - expect(span).to.include({ + assertObjectContains(span, { name: expectedSchema.send.opName, service: expectedSchema.send.serviceName, resource: testTopic, error: 1 }) - expect(span.meta).to.include({ + assertObjectContains(span.meta, { [ERROR_TYPE]: error.name, [ERROR_MESSAGE]: error.message, [ERROR_STACK]: error.stack, @@ -189,8 +190,8 @@ describe('Plugin', () => { const currentSpan = tracer.scope().active() try { - expect(currentSpan).to.not.equal(firstSpan) - expect(currentSpan.context()._name).to.equal(expectedSchema.receive.opName) + assert.notStrictEqual(currentSpan, firstSpan) + assert.strictEqual(currentSpan.context()._name, expectedSchema.receive.opName) done() } catch (e) { done(e) @@ -209,13 +210,13 @@ describe('Plugin', () => { const expectedSpanPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] - expect(span).to.include({ + assertObjectContains(span, { name: 'kafka.consume', service: 'test-kafka', resource: testTopic }) - expect(parseInt(span.parent_id.toString())).to.be.gt(0) + assert.ok(parseInt(span.parent_id.toString()) > 0) }, { timeoutMs: 10000 }) let consumerReceiveMessagePromise @@ -335,18 +336,18 @@ describe('Plugin', () => { const expectedSpanPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] - expect(span).to.include({ + assertObjectContains(span, { name: expectedSchema.send.opName, service: expectedSchema.send.serviceName, error: 1 }) - expect(span.meta).to.include({ + assertObjectContains(span.meta, { component: 'confluentinc-kafka-javascript' }) - expect(span.meta[ERROR_TYPE]).to.exist - expect(span.meta[ERROR_MESSAGE]).to.exist + assert.ok(span.meta[ERROR_TYPE]) + assert.ok(span.meta[ERROR_MESSAGE]) }, { timeoutMs: 10000 }) try { @@ -456,13 +457,13 @@ describe('Plugin', () => { const expectedSpanPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] - expect(span).to.include({ + assertObjectContains(span, { name: 'kafka.consume', service: 'test-kafka', resource: testTopic }) - expect(parseInt(span.parent_id.toString())).to.be.gt(0) + assert.ok(parseInt(span.parent_id.toString()) > 0) }, { timeoutMs: 10000 }) nativeConsumer.setDefaultConsumeTimeout(10) nativeConsumer.subscribe([testTopic]) @@ -543,7 +544,7 @@ describe('Plugin', () => { it('Should set a checkpoint on produce', async () => { const messages = [{ key: 'consumerDSM1', value: 'test2' }] await sendMessages(kafka, testTopic, messages) - expect(setDataStreamsContextSpy.args[0][0].hash).to.equal(expectedProducerHash) + assert.strictEqual(setDataStreamsContextSpy.args[0][0].hash, expectedProducerHash) }) it('Should set a checkpoint on consume (eachMessage)', async () => { @@ -560,7 +561,7 @@ describe('Plugin', () => { ) for (const runArg of runArgs) { - expect(runArg.hash).to.equal(expectedConsumerHash) + assert.strictEqual(runArg.hash, expectedConsumerHash) } }) @@ -577,7 +578,7 @@ describe('Plugin', () => { async () => await consumerReceiveMessagePromise ) for (const runArg of runArgs) { - expect(runArg.hash).to.equal(expectedConsumerHash) + assert.strictEqual(runArg.hash, expectedConsumerHash) } }) @@ -588,7 +589,7 @@ describe('Plugin', () => { } const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') await sendMessages(kafka, testTopic, messages) - expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) recordCheckpointSpy.restore() }) @@ -601,7 +602,7 @@ describe('Plugin', () => { let consumerReceiveMessagePromise await consumer.run({ eachMessage: async () => { - expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) recordCheckpointSpy.restore() consumerReceiveMessagePromise = Promise.resolve() } @@ -652,7 +653,7 @@ describe('Plugin', () => { await consumer.disconnect() for (const call of setOffsetSpy.getCalls()) { - expect(call.args[0]).to.not.have.property('type', 'kafka_commit') + assert.notStrictEqual(call.args[0]?.type, 'kafka_commit') } const newConsumer = kafka.consumer({ @@ -670,18 +671,18 @@ describe('Plugin', () => { // Check our work const runArg = setOffsetSpy.lastCall.args[0] - expect(runArg).to.have.property('offset', commitMeta.offset) - expect(runArg).to.have.property('partition', commitMeta.partition) - expect(runArg).to.have.property('topic', commitMeta.topic) - expect(runArg).to.have.property('type', 'kafka_commit') - expect(runArg).to.have.property('consumer_group', groupId) + assert.strictEqual(runArg?.offset, commitMeta.offset) + assert.strictEqual(runArg?.partition, commitMeta.partition) + assert.strictEqual(runArg?.topic, commitMeta.topic) + assert.strictEqual(runArg?.type, 'kafka_commit') + assert.strictEqual(runArg?.consumer_group, groupId) }) it('Should add backlog on producer response', async () => { await sendMessages(kafka, testTopic, messages) - expect(setOffsetSpy).to.be.calledOnce + sinon.assert.calledOnce(setOffsetSpy) const { topic } = setOffsetSpy.lastCall.args[0] - expect(topic).to.equal(testTopic) + assert.strictEqual(topic, testTopic) }) }) @@ -722,19 +723,19 @@ describe('Plugin', () => { try { await producer.send({ topic: testTopic, messages: testMessages }) - expect.fail('First producer.send() should have thrown an error') + assert.fail('First producer.send() should have thrown an error') } catch (e) { - expect(e).to.equal(error) + assert.strictEqual(e, error) } // Verify headers were injected in the first attempt - expect(testMessages[0].headers[0]).to.have.property('x-datadog-trace-id') + assert.ok(Object.hasOwn(testMessages[0].headers[0], 'x-datadog-trace-id')) // restore the stub to allow the next send to succeed produceStub.restore() const result = await producer.send({ topic: testTopic, messages: testMessages2 }) - expect(testMessages2[0].headers).to.be.null - expect(result).to.not.be.undefined + assert.strictEqual(testMessages2[0].headers, null) + assert.notStrictEqual(result, undefined) }) }) }) diff --git a/packages/datadog-plugin-fs/test/index.spec.js b/packages/datadog-plugin-fs/test/index.spec.js index 1469988cdc3..6fe14a0f0e9 100644 --- a/packages/datadog-plugin-fs/test/index.spec.js +++ b/packages/datadog-plugin-fs/test/index.spec.js @@ -5,7 +5,6 @@ const os = require('node:os') const path = require('node:path') const util = require('node:util') -const { expect } = require('chai') const { channel } = require('dc-polyfill') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const realFS = Object.assign({}, require('node:fs')) @@ -44,7 +43,7 @@ describe('Plugin', () => { data.forEach((arr) => { arr.forEach((trace) => { if (trace.name === 'fs.operation') { - expect.fail('should not have been any fs traces') + assert.fail('should not have been any fs traces') } }) }) @@ -98,7 +97,7 @@ describe('Plugin', () => { describe('open', () => { it('should not be instrumented', (done) => { agent.assertSomeTraces(() => { - expect.fail('should not have been any traces') + assert.fail('should not have been any traces') }).catch(done) setTimeout(done, 1500) // allow enough time to ensure no traces happened diff --git a/packages/datadog-plugin-google-genai/test/integration-test/client.spec.js b/packages/datadog-plugin-google-genai/test/integration-test/client.spec.js index 9a7764ad55c..97ac41efda4 100644 --- a/packages/datadog-plugin-google-genai/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-google-genai/test/integration-test/client.spec.js @@ -1,5 +1,6 @@ 'use strict' +const assert = require('node:assert/strict') const { FakeAgent, sandboxCwd, @@ -8,7 +9,7 @@ const { spawnPluginIntegrationTestProc } = require('../../../../integration-tests/helpers') const { withVersions } = require('../../../dd-trace/test/setup/mocha') -const { assert } = require('chai') + const { describe, it, beforeEach, afterEach } = require('mocha') describe('esm', () => { @@ -33,8 +34,8 @@ describe('esm', () => { it('is instrumented', async () => { const res = agent.assertMessageReceived(({ headers, payload }) => { - assert.propertyVal(headers, 'host', `127.0.0.1:${agent.port}`) - assert.isArray(payload) + assert.strictEqual(headers.host, `127.0.0.1:${agent.port}`) + assert.ok(Array.isArray(payload)) assert.strictEqual(checkSpansForServiceName(payload, 'google_genai.request'), true) }) diff --git a/packages/datadog-plugin-http/test/http_endpoint.spec.js b/packages/datadog-plugin-http/test/http_endpoint.spec.js index 93f746cbd9d..089829935ee 100644 --- a/packages/datadog-plugin-http/test/http_endpoint.spec.js +++ b/packages/datadog-plugin-http/test/http_endpoint.spec.js @@ -1,7 +1,8 @@ 'use strict' +const assert = require('node:assert/strict') const axios = require('axios') -const { expect } = require('chai') + const { describe, it, beforeEach, afterEach, before } = require('mocha') const agent = require('../../dd-trace/test/plugins/agent') @@ -56,10 +57,10 @@ describe('Plugin', () => { it('should set http.endpoint with int when no route is available', done => { agent .assertSomeTraces(traces => { - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/users/123`) - expect(traces[0][0].meta).to.not.have.property('http.route') - expect(traces[0][0].meta).to.have.property('http.endpoint', '/users/{param:int}') + assert.strictEqual(traces[0][0].name, 'web.request') + assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/users/123`) + assert.ok(!('http.route' in traces[0][0].meta)) + assert.strictEqual(traces[0][0].meta['http.endpoint'], '/users/{param:int}') }) .then(done) .catch(done) @@ -70,9 +71,9 @@ describe('Plugin', () => { it('should set http.endpoint with int_id when no route is available', done => { agent .assertSomeTraces(traces => { - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0].meta).to.not.have.property('http.route') - expect(traces[0][0].meta).to.have.property('http.endpoint', '/resources/{param:int_id}') + assert.strictEqual(traces[0][0].name, 'web.request') + assert.ok(!('http.route' in traces[0][0].meta)) + assert.strictEqual(traces[0][0].meta['http.endpoint'], '/resources/{param:int_id}') }) .then(done) .catch(done) @@ -83,10 +84,10 @@ describe('Plugin', () => { it('should set http.endpoint with hex when no route is available', done => { agent .assertSomeTraces(traces => { - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/orders/abc123`) - expect(traces[0][0].meta).to.not.have.property('http.route') - expect(traces[0][0].meta).to.have.property('http.endpoint', '/orders/{param:hex}') + assert.strictEqual(traces[0][0].name, 'web.request') + assert.strictEqual(traces[0][0].meta['http.url'], `http://localhost:${port}/orders/abc123`) + assert.ok(!('http.route' in traces[0][0].meta)) + assert.strictEqual(traces[0][0].meta['http.endpoint'], '/orders/{param:hex}') }) .then(done) .catch(done) @@ -97,9 +98,9 @@ describe('Plugin', () => { it('should set http.endpoint with hex_id when no route is available', done => { agent .assertSomeTraces(traces => { - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0].meta).to.not.have.property('http.route') - expect(traces[0][0].meta).to.have.property('http.endpoint', '/resources/{param:hex_id}') + assert.strictEqual(traces[0][0].name, 'web.request') + assert.ok(!('http.route' in traces[0][0].meta)) + assert.strictEqual(traces[0][0].meta['http.endpoint'], '/resources/{param:hex_id}') }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index e9560e25977..9d58bbbd994 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -3,7 +3,6 @@ const assert = require('node:assert/strict') const { randomUUID } = require('node:crypto') -const { expect } = require('chai') const dc = require('dc-polyfill') const { describe, it, beforeEach, afterEach, before } = require('mocha') const semver = require('semver') @@ -161,7 +160,7 @@ describe('Plugin', () => { it('should not extract bootstrap servers when initialized with a function', async () => { const expectedSpanPromise = agent.assertSomeTraces(traces => { const span = traces[0][0] - expect(span.meta).to.not.have.any.keys(['messaging.kafka.bootstrap.servers']) + assert.ok(!((['messaging.kafka.bootstrap.servers']).some(k => Object.hasOwn((span.meta), k)))) }) kafka = new Kafka({ @@ -216,7 +215,7 @@ describe('Plugin', () => { it('should hit an error for the first send and not inject headers in later sends', async () => { await assert.rejects(producer.send({ topic: testTopic, messages }), error) - expect(messages[0].headers).to.have.property('x-datadog-trace-id') + assert.ok(Object.hasOwn(messages[0].headers, 'x-datadog-trace-id')) // restore the stub to allow the next send to succeed sendRequestStub.restore() @@ -407,7 +406,7 @@ describe('Plugin', () => { let eachMessage = async ({ topic, partition, message }) => { setImmediate(() => { try { - expect(spy).to.have.been.calledOnceWith(undefined, beforeFinish.name) + sinon.assert.calledOnceWithExactly(spy, undefined, beforeFinish.name) done() } catch (e) { @@ -560,7 +559,7 @@ describe('Plugin', () => { await deferred.promise await consumer.disconnect() // Flush ongoing `eachMessage` calls for (const call of setOffsetSpy.getCalls()) { - expect(call.args[0]).to.not.have.property('type', 'kafka_commit') + assert.notStrictEqual(call.args[0]?.type, 'kafka_commit') } /** @@ -576,18 +575,20 @@ describe('Plugin', () => { // Check our work const runArg = setOffsetSpy.lastCall.args[0] - expect(setOffsetSpy).to.be.calledOnce - expect(runArg).to.have.property('offset', commitMeta.offset) - expect(runArg).to.have.property('partition', commitMeta.partition) - expect(runArg).to.have.property('topic', commitMeta.topic) - expect(runArg).to.have.property('type', 'kafka_commit') - expect(runArg).to.have.property('consumer_group', 'test-group') + sinon.assert.calledOnce(setOffsetSpy) + assert.strictEqual(runArg?.offset, commitMeta.offset) + assert.strictEqual(runArg?.partition, commitMeta.partition) + assert.strictEqual(runArg?.topic, commitMeta.topic) + assertObjectContains(runArg, { + type: 'kafka_commit', + consumer_group: 'test-group' + }) }) } it('Should add backlog on producer response', async () => { await sendMessages(kafka, testTopic, messages) - expect(setOffsetSpy).to.be.calledOnce + sinon.assert.calledOnce(setOffsetSpy) const { topic } = setOffsetSpy.lastCall.args[0] assert.strictEqual(topic, testTopic) }) diff --git a/packages/datadog-plugin-koa/test/index.spec.js b/packages/datadog-plugin-koa/test/index.spec.js index 02d8f2572a2..fec6e2a85e4 100644 --- a/packages/datadog-plugin-koa/test/index.spec.js +++ b/packages/datadog-plugin-koa/test/index.spec.js @@ -4,7 +4,7 @@ const assert = require('node:assert/strict') const { AsyncLocalStorage } = require('node:async_hooks') const axios = require('axios') -const { expect } = require('chai') + const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const semver = require('semver') const sinon = require('sinon') @@ -200,7 +200,7 @@ describe('Plugin', () => { try { sinon.assert.called(childSpan.finish) sinon.assert.called(parentSpan.finish) - expect(parentSpan.finish).to.have.been.calledAfter(childSpan.finish) + assert.strictEqual(parentSpan.finish.calledAfter(childSpan.finish), true) assert.strictEqual(childSpan.context()._parentId.toString(10), parentSpan.context().toSpanId()) assert.notStrictEqual(parentSpan.context()._parentId, null) done() diff --git a/packages/datadog-plugin-langchain/test/index.spec.js b/packages/datadog-plugin-langchain/test/index.spec.js index 502a8f73c1e..5de9b36d29a 100644 --- a/packages/datadog-plugin-langchain/test/index.spec.js +++ b/packages/datadog-plugin-langchain/test/index.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { after, before, beforeEach, describe, it } = require('mocha') const { assertObjectContains, useEnv } = require('../../../integration-tests/helpers') @@ -176,7 +175,7 @@ describe('Plugin', () => { const result = await llm.generate(['what is 2 + 2?']) - assert.ok(result.generations[0][0].text != null) + assert.ok(result.generations[0][0].text) await checkTraces }) @@ -195,8 +194,8 @@ describe('Plugin', () => { const llm = getLangChainOpenAiClient('llm', { model: 'gpt-3.5-turbo-instruct' }) const result = await llm.generate(['what is 2 + 2?', 'what is the circumference of the earth?']) - assert.ok(result.generations[0][0].text != null) - assert.ok(result.generations[1][0].text != null) + assert.ok(result.generations[0][0].text) + assert.ok(result.generations[1][0].text) await checkTraces }) @@ -216,8 +215,8 @@ describe('Plugin', () => { const llm = getLangChainOpenAiClient('llm', { model: 'gpt-3.5-turbo-instruct', n: 2 }) const result = await llm.generate(['what is 2 + 2?']) - assert.ok(result.generations[0][0].text != null) - assert.ok(result.generations[0][1].text != null) + assert.ok(result.generations[0][0].text) + assert.ok(result.generations[0][1].text) await checkTraces }) @@ -268,7 +267,7 @@ describe('Plugin', () => { const chatModel = getLangChainOpenAiClient('chat', { model: 'gpt-4' }) const result = await chatModel.invoke('Hello!') - assert.ok(result.content != null) + assert.ok(result.content) await checkTraces }) @@ -292,7 +291,7 @@ describe('Plugin', () => { ] const result = await chatModel.invoke(messages) - assert.ok(result.content != null) + assert.ok(result.content) await checkTraces }) @@ -316,7 +315,7 @@ describe('Plugin', () => { ] const result = await chatModel.invoke(messages) - assert.ok(result.content != null) + assert.ok(result.content) await checkTraces }) @@ -382,7 +381,7 @@ describe('Plugin', () => { const chatModel = getLangChainAnthropicClient('chat', { modelName: 'claude-3-5-sonnet-20241022' }) const result = await chatModel.invoke('Hello!') - assert.ok(result.content != null) + assert.ok(result.content) await checkTraces }) @@ -448,7 +447,7 @@ describe('Plugin', () => { ] const result = await chain.invoke(messages) - assert.ok(result != null) + assert.ok(result) await checkTraces }) @@ -488,7 +487,7 @@ describe('Plugin', () => { const result = await chain.invoke({ topic: 'chickens', style: 'dad joke' }) - assert.ok(result != null) + assert.ok(result) await checkTraces }) @@ -524,8 +523,8 @@ describe('Plugin', () => { const result = await chain.batch(['chickens', 'dogs']) assert.strictEqual(result.length, 2) - assert.ok(result[0] != null) - assert.ok(result[1] != null) + assert.ok(result[0]) + assert.ok(result[1]) await checkTraces }) @@ -718,7 +717,7 @@ describe('Plugin', () => { try { await myTool.invoke() - expect.fail('Expected an error to be thrown') + assert.fail('Expected an error to be thrown') } catch {} await checkTraces @@ -759,7 +758,7 @@ describe('Plugin', () => { // we need the spanResourceMatch, otherwise we'll match from the beforeEach const result = await vectorstore.similaritySearch('The powerhouse of the cell is the mitochondria', 2) - assert.ok(result != null) + assert.ok(result) await checkTraces }) @@ -784,7 +783,7 @@ describe('Plugin', () => { const result = await vectorstore.similaritySearchWithScore( 'The powerhouse of the cell is the mitochondria', 2 ) - assert.ok(result != null) + assert.ok(result) await checkTraces }) diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index 87a69a703af..b46b9b868b9 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -434,8 +434,7 @@ describe('Plugin', function () { 'error.type': 'Error' } }) - - assert.ok(spans[1].meta['error.stack'] != null) + assert.ok(spans[1].meta['error.stack']) }) .then(done) .catch(done) @@ -603,7 +602,7 @@ describe('Plugin', function () { } }) - assert.ok(spans[1].meta['error.stack'] != null) + assert.ok(spans[1].meta['error.stack']) }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-openai/test/index.spec.js b/packages/datadog-plugin-openai/test/index.spec.js index 49ef23f434a..9f1ce739cc0 100644 --- a/packages/datadog-plugin-openai/test/index.spec.js +++ b/packages/datadog-plugin-openai/test/index.spec.js @@ -114,18 +114,12 @@ describe('Plugin', () => { }) it('should attach an error to the span', async () => { - const checkTraces = agent - .assertSomeTraces(traces => { - assertObjectContains(traces[0][0], { - error: 1, - meta: { - 'error.type': 'Error' - } - }) - // the message content differs on OpenAI version, even between patches - assert.ok(traces[0][0].meta['error.message'] != null) - assert.ok(traces[0][0].meta['error.stack'] != null) - }) + const checkTraces = agent.assertFirstTraceSpan({ + error: 1, + meta: { + 'error.type': 'Error' + } + }) const params = { model: 'gpt-3.5-turbo', // incorrect model @@ -178,10 +172,10 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.completions.create(params) - assert.ok(result.id != null) + assert.ok(result.id) } else { const result = await openai.createCompletion(params) - assert.ok(result.data.id != null) + assert.ok(result.data.id) } tracer.trace('child of outer', innerSpan => { @@ -263,10 +257,10 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.completions.create(params) - assert.ok(result.id != null) + assert.ok(result.id) } else { const result = await openai.createCompletion(params) - assert.ok(result.data.id != null) + assert.ok(result.data.id) } await checkTraces @@ -295,10 +289,10 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.completions.create(params) - assert.ok(result.id != null) + assert.ok(result.id) } else { const result = await openai.createCompletion(params) - assert.ok(result.data.id != null) + assert.ok(result.data.id) } await checkTraces @@ -423,10 +417,10 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.embeddings.create(params) - assert.ok(result.model != null) + assert.ok(result.model) } else { const result = await openai.createEmbedding(params) - assert.ok(result.data.model != null) + assert.ok(result.data.model) } await checkTraces @@ -458,11 +452,11 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.models.list() assert.deepStrictEqual(result.object, 'list') - assert.ok(result.data.length != null) + assert.ok(result.data.length) } else { const result = await openai.listModels() assert.deepStrictEqual(result.data.object, 'list') - assert.ok(result.data.data.length != null) + assert.ok(result.data.data.length) } await checkTraces @@ -565,13 +559,13 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.files.list() - assert.ok(result.data.length != null) - assert.ok(result.data[0].id != null) + assert.ok(result.data.length) + assert.ok(result.data[0].id) } else { const result = await openai.listFiles() - assert.ok(result.data.data.length != null) - assert.ok(result.data.data[0].id != null) + assert.ok(result.data.data.length) + assert.ok(result.data.data[0].id) } await checkTraces @@ -653,11 +647,11 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.files.retrieve('file-RpTpuvRVtnKpdKZb7DDGto') - assert.ok(result.filename != null) + assert.ok(result.filename) } else { const result = await openai.retrieveFile('file-RpTpuvRVtnKpdKZb7DDGto') - assert.ok(result.data.filename != null) + assert.ok(result.data.filename) } await checkTraces @@ -687,7 +681,7 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0 < 4.17.1')) { const result = await openai.files.retrieveContent('file-RpTpuvRVtnKpdKZb7DDGto') - assert.ok(result != null) + assert.ok(result) } else if (semver.satisfies(realVersion, '>=4.17.1')) { const result = await openai.files.content('file-RpTpuvRVtnKpdKZb7DDGto') @@ -695,7 +689,7 @@ describe('Plugin', () => { } else { const result = await openai.downloadFile('file-RpTpuvRVtnKpdKZb7DDGto') - assert.ok(result.data != null) + assert.ok(result.data) } await checkTraces @@ -772,7 +766,7 @@ describe('Plugin', () => { } const result = await openai.fineTuning.jobs.create(params) - assert.ok(result.id != null) + assert.ok(result.id) await checkTraces }) @@ -986,7 +980,7 @@ describe('Plugin', () => { if (responseFormat === 'url') { assert.strictEqual(result.data[0].url.startsWith('https://'), true) } else { - assert.ok(result.data[0].b64_json != null) + assert.ok(result.data[0].b64_json) } } else { const result = await openai.createImage({ @@ -1000,7 +994,7 @@ describe('Plugin', () => { if (responseFormat === 'url') { assert.strictEqual(result.data.data[0].url.startsWith('https://'), true) } else { - assert.ok(result.data.data[0].b64_json != null) + assert.ok(result.data.data[0].b64_json) } } @@ -1187,7 +1181,7 @@ describe('Plugin', () => { temperature: 0.5 }) - assert.ok(result.text != null) + assert.ok(result.text) } else { const result = await openai.createTranslation( fs.createReadStream(Path.join(__dirname, 'translation.m4a')), @@ -1197,7 +1191,7 @@ describe('Plugin', () => { 0.5 ) - assert.ok(result.data.text != null) + assert.ok(result.data.text) } await checkTraces @@ -1259,19 +1253,19 @@ describe('Plugin', () => { const result = await prom - assert.ok(result.id != null) - assert.ok(result.model != null) + assert.ok(result.id) + assert.ok(result.model) assert.deepStrictEqual(result.choices[0].message.role, 'assistant') - assert.ok(result.choices[0].message.content != null) - assert.ok(result.choices[0].finish_reason != null) + assert.ok(result.choices[0].message.content) + assert.ok(result.choices[0].finish_reason) } else { const result = await openai.createChatCompletion(params) - assert.ok(result.data.id != null) - assert.ok(result.data.model != null) + assert.ok(result.data.id) + assert.ok(result.data.model) assert.deepStrictEqual(result.data.choices[0].message.role, 'assistant') - assert.ok(result.data.choices[0].message.content != null) - assert.ok(result.data.choices[0].finish_reason != null) + assert.ok(result.data.choices[0].message.content) + assert.ok(result.data.choices[0].finish_reason) } await checkTraces @@ -1349,10 +1343,10 @@ describe('Plugin', () => { if (semver.satisfies(realVersion, '>=4.0.0')) { const result = await openai.chat.completions.create(params) - assert.ok(result.id != null) + assert.ok(result.id) } else { const result = await openai.createChatCompletion(params) - assert.ok(result.data.id != null) + assert.ok(result.data.id) } await checkTraces @@ -1669,7 +1663,7 @@ describe('Plugin', () => { assert.ok(!Object.hasOwn(prom, 'withResponse') && ('withResponse' in prom)) const response = await prom - assert.ok(response.choices[0].message.content != null) + assert.ok(response.choices[0].message.content) await checkTraces }) diff --git a/packages/datadog-plugin-prisma/test/index.spec.js b/packages/datadog-plugin-prisma/test/index.spec.js index 969ee6d5e34..276b2b3be43 100644 --- a/packages/datadog-plugin-prisma/test/index.spec.js +++ b/packages/datadog-plugin-prisma/test/index.spec.js @@ -173,14 +173,16 @@ describe('Plugin', () => { const tracingPromise = agent.assertSomeTraces(traces => { // Find the db_query span const dbQuerySpan = traces[0].find(span => span.meta['prisma.name'] === 'db_query') - assert.ok(dbQuerySpan != null) - // Verify database connection attributes are present - assert.strictEqual(dbQuerySpan.meta['db.name'], 'postgres') - assert.strictEqual(dbQuerySpan.meta['db.user'], 'foo') - assert.strictEqual(dbQuerySpan.meta['out.host'], 'localhost') - assert.strictEqual(dbQuerySpan.meta['network.destination.port'], '5432') - assert.strictEqual(dbQuerySpan.meta['db.type'], 'postgres') + assertObjectContains(dbQuerySpan, { + meta: { + 'db.name': 'postgres', + 'db.user': 'foo', + 'out.host': 'localhost', + 'network.destination.port': '5432', + 'db.type': 'postgres' + } + }) }) const engineSpans = [ diff --git a/packages/datadog-plugin-winston/test/index.spec.js b/packages/datadog-plugin-winston/test/index.spec.js index f29a54443df..ed1736791de 100644 --- a/packages/datadog-plugin-winston/test/index.spec.js +++ b/packages/datadog-plugin-winston/test/index.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const { assertObjectContains } = require('../../../integration-tests/helpers') @@ -291,7 +290,7 @@ describe('Plugin', () => { winston.log('info', 'test', meta) assert.strictEqual(meta.dd, undefined) - expect(spy).to.have.been.calledWith() + sinon.assert.calledWithMatch(spy) }) assert.strictEqual(await logServer.logPromise, undefined) }) diff --git a/packages/datadog-shimmer/test/shimmer.spec.js b/packages/datadog-shimmer/test/shimmer.spec.js index 8529c8f0d46..43521149b18 100644 --- a/packages/datadog-shimmer/test/shimmer.spec.js +++ b/packages/datadog-shimmer/test/shimmer.spec.js @@ -2,8 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') - const shimmer = require('../src/shimmer') describe('shimmer', () => { @@ -237,7 +235,7 @@ describe('shimmer', () => { shimmer.wrap(obj, 'count', () => () => {}) assert.strictEqual(obj.count.test, 'test') - expect(Object.getOwnPropertyNames(obj.count)).to.not.include('test') + assert.strictEqual(Object.hasOwn(obj.count, 'test'), false) }) it('should inherit from the original method prototype 2', () => { @@ -253,7 +251,7 @@ describe('shimmer', () => { assert.strictEqual(obj.count.test, 'test') assert.strictEqual(obj.count.foo, 42) - expect(Object.getOwnPropertyNames(obj.count)).to.not.include('test') + assert.strictEqual(Object.hasOwn(obj.count, 'test'), false) }) it('should preserve the property descriptor of the original', () => { @@ -477,7 +475,7 @@ describe('shimmer', () => { const wrapped = shimmer.wrapFunction(count, count => () => {}) assert.strictEqual(wrapped.test, 'test') - expect(Object.getOwnPropertyNames(wrapped)).to.not.include('test') + assert.strictEqual(Object.hasOwn(wrapped, 'test'), false) }) it('should mass wrap methods on objects', () => { diff --git a/packages/dd-trace/test/appsec/blocking.spec.js b/packages/dd-trace/test/appsec/blocking.spec.js index 62af7130d64..43b2710f7b1 100644 --- a/packages/dd-trace/test/appsec/blocking.spec.js +++ b/packages/dd-trace/test/appsec/blocking.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -75,12 +74,14 @@ describe('blocking', () => { const blocked = block(req, res, rootSpan) assert.strictEqual(blocked, false) - expect(log.warn).to.have.been - .calledOnceWithExactly('[ASM] Cannot send blocking response when headers have already been sent') + sinon.assert.calledOnceWithExactly( + log.warn, + '[ASM] Cannot send blocking response when headers have already been sent' + ) sinon.assert.calledOnceWithExactly(rootSpan.setTag, '_dd.appsec.block.failed', 1) sinon.assert.notCalled(res.setHeader) sinon.assert.notCalled(res.constructor.prototype.end) - expect(telemetry.updateBlockFailureMetric).to.be.calledOnceWithExactly(req) + sinon.assert.calledOnceWithExactly(telemetry.updateBlockFailureMetric, req) }) it('should send blocking response with html type if present in the headers', () => { diff --git a/packages/dd-trace/test/appsec/graphql.spec.js b/packages/dd-trace/test/appsec/graphql.spec.js index 12e6e90b232..ac1cbeb4cb8 100644 --- a/packages/dd-trace/test/appsec/graphql.spec.js +++ b/packages/dd-trace/test/appsec/graphql.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -274,7 +273,7 @@ describe('GraphQL', () => { sinon.assert.calledOnceWithExactly(blocking.getBlockingData, req, 'graphql', blockParameters) sinon.assert.calledOnceWithExactly(rootSpan.setTag, '_dd.appsec.block.failed', 1) - expect(telemetry.updateBlockFailureMetric).to.be.calledOnceWithExactly(req) + sinon.assert.calledOnceWithExactly(telemetry.updateBlockFailureMetric, req) }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-password-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-password-analyzer.spec.js index d020d0e6cc8..705a87b6006 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-password-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-password-analyzer.spec.js @@ -6,7 +6,6 @@ const fs = require('node:fs') const os = require('node:os') const path = require('node:path') -const { expect } = require('chai') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -102,7 +101,12 @@ describe('Hardcoded Password Analyzer', () => { }) const evidence = { value: ident } - expect(reportEvidence).to.be.calledOnceWithExactly({ file: relFile, line, column, ident, data: ruleId }, undefined, evidence) + sinon.assert.calledOnceWithExactly( + reportEvidence, + { file: relFile, line, column, ident, data: ruleId }, + undefined, + evidence + ) }) }) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js index 9a48dbd915c..0d90ebaa3bb 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js @@ -52,7 +52,7 @@ describe('Hardcoded Secret Analyzer', () => { }] }) - assert.ok([NameAndValue, ValueOnly].includes(testCase.type)) + assertObjectContains([NameAndValue, ValueOnly], [testCase.type]) sinon.assert.calledOnceWithExactly(report, { file: relFile, line, column, ident, data: testCase.id }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.spec.js index cf886e530db..20c96c0a54d 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -131,6 +130,6 @@ describe('ldap-injection-analyzer', () => { onLdapClientSearch({ base: 'base', filter: 'filter', name: 'datadog:ldapjs:client:search' }) - expect(analyzeAll.firstCall).to.be.calledWith('base', 'filter') + sinon.assert.calledWith(analyzeAll.firstCall, 'base', 'filter') }) }) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js index 5cf042b3398..3a00d76fbd4 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/path-traversal-analyzer.spec.js @@ -6,7 +6,7 @@ const path = require('path') const { storage } = require('../../../../../datadog-core') const iastContextFunctions = require('../../../../src/appsec/iast/iast-context') -const expect = require('chai').expect + const sinon = require('sinon') const proxyquire = require('proxyquire') const pathTraversalAnalyzer = require('../../../../src/appsec/iast/analyzers/path-traversal-analyzer') @@ -183,7 +183,7 @@ describe('path-traversal-analyzer', () => { hasQuota.returns(true) proxyPathAnalyzer.analyze(['arg1']) - expect(addVulnerability).not.have.been.called + sinon.assert.notCalled(addVulnerability) }) }) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js index 287631f9028..ad4764cd4f3 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const dc = require('dc-polyfill') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') @@ -214,7 +213,7 @@ describe('sql-injection-analyzer', () => { onMysqlQueryStart({ sql: 'SELECT 1' }) - expect(analyze).to.be.calledOnceWith('SELECT 1') + sinon.assert.calledOnceWithMatch(analyze, 'SELECT 1') }) it('should call analyze on apm:mysql2:query:start', () => { @@ -222,7 +221,7 @@ describe('sql-injection-analyzer', () => { onMysql2QueryStart({ sql: 'SELECT 1' }) - expect(analyze).to.be.calledOnceWith('SELECT 1') + sinon.assert.calledOnceWithMatch(analyze, 'SELECT 1') }) it('should call analyze on apm:pg:query:start', () => { @@ -230,7 +229,7 @@ describe('sql-injection-analyzer', () => { onPgQueryStart({ originalText: 'SELECT 1', query: { text: 'modified-query SELECT 1' } }) - expect(analyze).to.be.calledOnceWith('SELECT 1') + sinon.assert.calledOnceWithMatch(analyze, 'SELECT 1') }) }) diff --git a/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js b/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js index b75879f9ee5..4d96b8d925f 100644 --- a/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -68,8 +67,8 @@ describe('IastContextPlugin', () => { it('should add a subscription to the channel', () => { plugin.startCtxOn(channelName, tag) - expect(addSub).to.be.calledOnceWith(channelName) - expect(getAndRegisterSubscription).to.be.calledOnceWith({ channelName, tag, tagKey: TagKey.SOURCE_TYPE }) + sinon.assert.calledOnceWithMatch(addSub, channelName) + sinon.assert.calledOnceWithExactly(getAndRegisterSubscription, { channelName, tag, tagKey: TagKey.SOURCE_TYPE }) }) it('should call startContext when event is published', () => { @@ -90,7 +89,7 @@ describe('IastContextPlugin', () => { it('should add a subscription to the channel', () => { plugin.finishCtxOn(channelName) - expect(addSub).to.be.calledOnceWith(channelName) + sinon.assert.calledOnceWithMatch(addSub, channelName) }) it('should call finishContext when event is published', () => { @@ -135,20 +134,20 @@ describe('IastContextPlugin', () => { plugin.startContext() sinon.assert.calledOnce(plugin.getTopContext) - expect(plugin.getRootSpan).to.be.calledWith(store) + sinon.assert.calledWith(plugin.getRootSpan, store) }) it('should call overheadController before starting iast context', () => { plugin.startContext() - expect(acquireRequest).to.be.calledOnceWith(rootSpan) + sinon.assert.calledOnceWithExactly(acquireRequest, rootSpan) }) it('should add _dd.iast.enabled:0 tag in the rootSpan', () => { const addTags = sinon.stub(rootSpan, 'addTags') plugin.startContext() - expect(addTags).to.be.calledOnceWith({ [IAST_ENABLED_TAG_KEY]: 0 }) + sinon.assert.calledOnceWithExactly(addTags, { [IAST_ENABLED_TAG_KEY]: 0 }) }) it('should not fail if store does not contain span', () => { @@ -156,7 +155,7 @@ describe('IastContextPlugin', () => { plugin.startContext() - expect(acquireRequest).to.be.calledOnceWith(undefined) + sinon.assert.calledOnceWithExactly(acquireRequest, undefined) }) describe('if acquireRequest', () => { @@ -175,26 +174,26 @@ describe('IastContextPlugin', () => { const addTags = sinon.stub(rootSpan, 'addTags') plugin.startContext() - expect(addTags).to.be.calledOnceWith({ [IAST_ENABLED_TAG_KEY]: 1 }) + sinon.assert.calledOnceWithExactly(addTags, { [IAST_ENABLED_TAG_KEY]: 1 }) }) it('should create and save new IAST context and store it', () => { plugin.startContext() - expect(newIastContext).to.be.calledOnceWith(rootSpan) - expect(saveIastContext).to.be.calledOnceWith(store, topContext, context) + sinon.assert.calledOnceWithExactly(newIastContext, rootSpan) + sinon.assert.calledOnceWithExactly(saveIastContext, store, topContext, context) }) it('should create new taint-tracking transaction', () => { plugin.startContext() - expect(createTransaction).to.be.calledOnceWith('span-id', context) + sinon.assert.calledOnceWithExactly(createTransaction, 'span-id', context) }) it('should obtain needed info from data before starting iast context', () => { plugin.startContext() - expect(initializeRequestContext).to.be.calledOnceWith(context) + sinon.assert.calledOnceWithExactly(initializeRequestContext, context) }) }) }) @@ -217,7 +216,7 @@ describe('IastContextPlugin', () => { plugin.finishContext() - expect(sendVulnerabilities).to.be.calledOnceWith(vulnerabilities, rootSpan) + sinon.assert.calledOnceWithExactly(sendVulnerabilities, vulnerabilities, rootSpan) }) it('should remove the taint-tracking transaction', () => { @@ -230,7 +229,7 @@ describe('IastContextPlugin', () => { plugin.finishContext() - expect(removeTransaction).to.be.calledOnceWith(iastContext) + sinon.assert.calledOnceWithExactly(removeTransaction, iastContext) }) it('should clear iastContext and releaseRequest from OCE', () => { diff --git a/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js b/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js index e90d8a6bb3b..e7c08b57151 100644 --- a/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { channel } = require('dc-polyfill') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') @@ -113,12 +112,12 @@ describe('IAST Plugin', () => { iastPlugin._getTelemetryHandler = getTelemetryHandler iastPlugin.addSub({ channelName, tagKey: VULNERABILITY_TYPE }, handler) - expect(getTelemetryHandler).to.be.not.called + sinon.assert.notCalled(getTelemetryHandler) getTelemetryHandler.reset() iastPlugin.addSub({ channelName, tagKey: SOURCE_TYPE, tag: 'test-tag' }, handler) - expect(getTelemetryHandler).to.be.not.called + sinon.assert.notCalled(getTelemetryHandler) }) }) @@ -217,7 +216,7 @@ describe('IAST Plugin', () => { loadChannel.publish({ name: 'test' }) - expect(onInstrumentationLoadedMock).to.be.calledWith('test') + sinon.assert.calledWith(onInstrumentationLoadedMock, 'test') }) }) @@ -227,12 +226,12 @@ describe('IAST Plugin', () => { iastPlugin._getTelemetryHandler = getTelemetryHandler iastPlugin.addSub({ channelName, tagKey: VULNERABILITY_TYPE }, handler) - expect(getTelemetryHandler).to.be.calledOnceWith(iastPlugin.pluginSubs[0]) + sinon.assert.calledOnceWithExactly(getTelemetryHandler, iastPlugin.pluginSubs[0]) getTelemetryHandler.reset() iastPlugin.addSub({ channelName, tagKey: SOURCE_TYPE, tag: 'test-tag' }, handler) - expect(getTelemetryHandler).to.be.calledOnceWith(iastPlugin.pluginSubs[1]) + sinon.assert.calledOnceWithExactly(getTelemetryHandler, iastPlugin.pluginSubs[1]) }) it('should register a pluginSubscription and increment a sink metric when a sink module is loaded', () => { @@ -249,7 +248,7 @@ describe('IAST Plugin', () => { loadChannel.publish({ name: 'sink' }) - expect(metricInc).to.be.calledOnceWith(undefined, vulnTags) + sinon.assert.calledOnceWithExactly(metricInc, undefined, vulnTags) }) it('should register and increment a sink metric when a sink module is loaded using a tracingChannel', () => { @@ -265,7 +264,7 @@ describe('IAST Plugin', () => { loadChannel.publish({ name: 'sink' }) - expect(metricInc).to.be.calledOnceWith(undefined, vulnTags) + sinon.assert.calledOnceWithExactly(metricInc, undefined, vulnTags) }) it('should register an pluginSubscription and increment a source metric when a source module is loaded', () => { @@ -282,7 +281,7 @@ describe('IAST Plugin', () => { loadChannel.publish({ name: 'source' }) - expect(metricInc).to.be.calledOnceWith(undefined, sourceTags) + sinon.assert.calledOnceWithExactly(metricInc, undefined, sourceTags) }) it('should increment a sink metric when event is received', () => { @@ -300,7 +299,7 @@ describe('IAST Plugin', () => { const telemetryHandler = addSubMock.secondCall.args[1] telemetryHandler() - expect(metricInc).to.be.calledOnceWith(undefined, vulnTags) + sinon.assert.calledOnceWithExactly(metricInc, undefined, vulnTags) }) it('should increment a source metric when event is received', () => { @@ -318,7 +317,7 @@ describe('IAST Plugin', () => { const telemetryHandler = addSubMock.secondCall.args[1] telemetryHandler() - expect(metricInc).to.be.calledOnceWith(undefined, sourceTags) + sinon.assert.calledOnceWithExactly(metricInc, undefined, sourceTags) }) it('should increment a source metric when event is received for every tag', () => { @@ -336,10 +335,10 @@ describe('IAST Plugin', () => { const telemetryHandler = addSubMock.secondCall.args[1] telemetryHandler() - expect(metricInc).to.be.calledThrice - expect(metricInc.firstCall).to.be.calledWith(undefined, [`${SOURCE_TYPE}:http.source`]) - expect(metricInc.secondCall).to.be.calledWith(undefined, [`${SOURCE_TYPE}:http.source2`]) - expect(metricInc.thirdCall).to.be.calledWith(undefined, [`${SOURCE_TYPE}:http.source3`]) + sinon.assert.calledThrice(metricInc) + sinon.assert.calledWith(metricInc.firstCall, undefined, [`${SOURCE_TYPE}:http.source`]) + sinon.assert.calledWith(metricInc.secondCall, undefined, [`${SOURCE_TYPE}:http.source2`]) + sinon.assert.calledWith(metricInc.thirdCall, undefined, [`${SOURCE_TYPE}:http.source3`]) }) }) @@ -379,7 +378,7 @@ describe('IAST Plugin', () => { }) sinon.assert.calledOnce(handler) - expect(metric.inc).to.be.calledOnceWithExactly(iastContext, tags) + sinon.assert.calledOnceWithExactly(metric.inc, iastContext, tags) }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/index.spec.js b/packages/dd-trace/test/appsec/iast/index.spec.js index f56dac4a73d..c6c646d0810 100644 --- a/packages/dd-trace/test/appsec/iast/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/index.spec.js @@ -3,7 +3,7 @@ const assert = require('node:assert/strict') const axios = require('axios') -const { expect } = require('chai') + const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -170,7 +170,7 @@ describe('IAST Index', () => { it('should enable AppsecFsPlugin', () => { mockIast.enable(config) sinon.assert.calledOnceWithExactly(appsecFsPlugin.enable, IAST_MODULE) - expect(analyzers.enableAllAnalyzers).to.have.been.calledAfter(appsecFsPlugin.enable) + assert.strictEqual(analyzers.enableAllAnalyzers.calledAfter(appsecFsPlugin.enable), true) }) }) @@ -205,7 +205,7 @@ describe('IAST Index', () => { it('should not finish global context if not enabled before ', () => { mockIast.disable(config) - expect(mockOverheadController.finishGlobalContext).to.have.been.not.called + sinon.assert.notCalled(mockOverheadController.finishGlobalContext) }) }) diff --git a/packages/dd-trace/test/appsec/iast/overhead-controller.spec.js b/packages/dd-trace/test/appsec/iast/overhead-controller.spec.js index dfa4f518799..625770eaddd 100644 --- a/packages/dd-trace/test/appsec/iast/overhead-controller.spec.js +++ b/packages/dd-trace/test/appsec/iast/overhead-controller.spec.js @@ -4,7 +4,7 @@ const assert = require('node:assert/strict') const { EventEmitter } = require('node:events') const axios = require('axios') -const { expect } = require('chai') + const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -50,7 +50,7 @@ describe('Overhead controller', () => { it('should populate request context', () => { const iastContext = {} overheadController.initializeRequestContext(iastContext) - expect(iastContext).to.have.nested.property(overheadController.OVERHEAD_CONTROLLER_CONTEXT_KEY) + assert.ok(hasNestedProperty(iastContext, overheadController.OVERHEAD_CONTROLLER_CONTEXT_KEY)) }) }) }) @@ -670,3 +670,16 @@ describe('Overhead controller', () => { }) }) }) + +function hasNestedProperty (obj, path) { + if (obj == null || typeof path !== 'string') return false + + const parts = path.split('.') + let cur = obj + for (const part of parts) { + if (cur == null || !Object.hasOwn(cur, part)) return false + cur = cur[part] + } + + return true +} diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/index.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/index.spec.js index dbe2784bbc8..491829d0a38 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/index.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('mocha') const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -37,8 +36,7 @@ describe('IAST TaintTracking', () => { taintTracking.enableTaintTracking(config.iast) sinon.assert.calledOnce(taintTrackingOperations.enableTaintOperations) sinon.assert.calledOnce(taintTrackingPlugin.enable) - expect(taintTrackingOperations.setMaxTransactions) - .to.have.been.calledOnceWithExactly(config.iast.maxConcurrentRequests) + sinon.assert.calledOnceWithExactly(taintTrackingOperations.setMaxTransactions, config.iast.maxConcurrentRequests) }) it('Should disable both rewriter, taint tracking operations, plugin', () => { diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js index 4cd955e3ec6..54d51b46770 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const dc = require('dc-polyfill') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') @@ -104,7 +103,7 @@ describe('IAST Taint tracking plugin', () => { } taintTrackingPlugin._taintTrackingHandler(originType, objToBeTainted) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith(iastContext, objToBeTainted, originType) + sinon.assert.calledOnceWithExactly(taintTrackingOperations.taintObject, iastContext, objToBeTainted, originType) }) it('Should taint property in object', () => { @@ -116,7 +115,8 @@ describe('IAST Taint tracking plugin', () => { } taintTrackingPlugin._taintTrackingHandler(originType, objToBeTainted, propertyToBeTainted) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, objToBeTainted[propertyToBeTainted], originType @@ -134,7 +134,8 @@ describe('IAST Taint tracking plugin', () => { objToBeTainted[propertyToBeTainted].self = objToBeTainted taintTrackingPlugin._taintTrackingHandler(originType, objToBeTainted, propertyToBeTainted) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, objToBeTainted[propertyToBeTainted], originType @@ -160,7 +161,8 @@ describe('IAST Taint tracking plugin', () => { queryReadFinishChannel.publish({ query: req.query }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.query, 'http.request.parameter' @@ -176,7 +178,8 @@ describe('IAST Taint tracking plugin', () => { bodyParserFinishChannel.publish({ req }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.body, 'http.request.body' @@ -192,7 +195,8 @@ describe('IAST Taint tracking plugin', () => { middlewareNextChannel.publish({ req }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.body, 'http.request.body' @@ -210,7 +214,8 @@ describe('IAST Taint tracking plugin', () => { middlewareNextChannel.publish({ req }) bodyParserFinishChannel.publish({ req }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.body, 'http.request.body' @@ -224,7 +229,8 @@ describe('IAST Taint tracking plugin', () => { cookieParseFinishCh.publish({ cookies }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, cookies, HTTP_REQUEST_COOKIE_VALUE @@ -239,7 +245,8 @@ describe('IAST Taint tracking plugin', () => { } processParamsStartCh.publish({ req }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.params, HTTP_REQUEST_PATH_PARAM @@ -254,7 +261,8 @@ describe('IAST Taint tracking plugin', () => { } routerParamStartCh.publish({ req }) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.params, HTTP_REQUEST_PATH_PARAM @@ -277,13 +285,15 @@ describe('IAST Taint tracking plugin', () => { } taintTrackingPlugin.taintRequest(req, iastContext) - expect(taintTrackingOperations.taintObject).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.taintObject, iastContext, req.headers, HTTP_REQUEST_HEADER_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.newTaintedString, iastContext, req.url, HTTP_REQUEST_URI, @@ -325,7 +335,8 @@ describe('IAST Taint tracking plugin', () => { }] sequelizeFinish.publish({ result }) - expect(taintTrackingOperations.newTaintedString).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.newTaintedString, iastContext, 'string value 1', '0.name', @@ -337,7 +348,8 @@ describe('IAST Taint tracking plugin', () => { const result = { id: 1, description: 'value' } sequelizeFinish.publish({ result }) - expect(taintTrackingOperations.newTaintedString).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.newTaintedString, iastContext, 'value', 'description', @@ -378,14 +390,16 @@ describe('IAST Taint tracking plugin', () => { ] sequelizeFinish.publish({ result }) - expect(taintTrackingOperations.newTaintedString).to.be.calledTwice - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledTwice(taintTrackingOperations.newTaintedString) + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'value', '0.description', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'child1', '0.children.0.name', @@ -418,14 +432,16 @@ describe('IAST Taint tracking plugin', () => { }] sequelizeFinish.publish({ result }) - expect(taintTrackingOperations.newTaintedString).to.be.calledTwice - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledTwice(taintTrackingOperations.newTaintedString) + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'string value 1', '0.name', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'string value 2', '1.name', @@ -437,7 +453,8 @@ describe('IAST Taint tracking plugin', () => { const result = { id: 1, description: 'value' } sequelizeFinish.publish({ result }) - expect(taintTrackingOperations.newTaintedString).to.be.calledOnceWith( + sinon.assert.calledOnceWithExactly( + taintTrackingOperations.newTaintedString, iastContext, 'value', 'description', @@ -504,38 +521,44 @@ describe('IAST Taint tracking plugin', () => { ] sequelizeFinish.publish({ result }) - expect(taintTrackingOperations.newTaintedString).to.callCount(6) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.callCount(taintTrackingOperations.newTaintedString, 6) + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'value', '0.description', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'child1', '0.children.0.name', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'child2', '0.children.1.name', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'value2', '1.description', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'child4', '1.children.0.name', SQL_ROW_VALUE ) - expect(taintTrackingOperations.newTaintedString).to.be.calledWith( + sinon.assert.calledWith( + taintTrackingOperations.newTaintedString, iastContext, 'child5', '1.children.1.name', diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/plugins/kafka.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/plugins/kafka.spec.js index 5306d30ead5..51e20e4fda9 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/plugins/kafka.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/plugins/kafka.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -46,7 +45,7 @@ describe('Kafka consumer plugin', () => { afterEach(sinon.restore) it('should subscribe to dd-trace:kafkajs:consumer:afterStart channel', () => { - expect(addSub).to.be.calledOnceWith({ + sinon.assert.calledOnceWithMatch(addSub, { channelName: 'dd-trace:kafkajs:consumer:afterStart', tag: [KAFKA_MESSAGE_KEY, KAFKA_MESSAGE_VALUE] }) @@ -60,10 +59,10 @@ describe('Kafka consumer plugin', () => { handler({ message }) - expect(newTaintedObject).to.be.calledTwice + sinon.assert.calledTwice(newTaintedObject) - expect(newTaintedObject.firstCall).to.be.calledWith(iastContext, message.key, undefined, KAFKA_MESSAGE_KEY) - expect(newTaintedObject.secondCall).to.be.calledWith(iastContext, message.value, undefined, KAFKA_MESSAGE_VALUE) + sinon.assert.calledWith(newTaintedObject.firstCall, iastContext, message.key, undefined, KAFKA_MESSAGE_KEY) + sinon.assert.calledWith(newTaintedObject.secondCall, iastContext, message.value, undefined, KAFKA_MESSAGE_VALUE) }) it('should taint key Buffer.toString method', () => { @@ -76,7 +75,7 @@ describe('Kafka consumer plugin', () => { const keyStr = message.key.toString() - expect(newTaintedString).to.be.calledOnceWith(iastContext, keyStr, undefined, KAFKA_MESSAGE_KEY) + sinon.assert.calledOnceWithExactly(newTaintedString, iastContext, keyStr, undefined, KAFKA_MESSAGE_KEY) }) it('should taint value Buffer.toString method', () => { @@ -89,7 +88,7 @@ describe('Kafka consumer plugin', () => { const valueStr = message.value.toString() - expect(newTaintedString).to.be.calledOnceWith(iastContext, valueStr, undefined, KAFKA_MESSAGE_VALUE) + sinon.assert.calledOnceWithExactly(newTaintedString, iastContext, valueStr, undefined, KAFKA_MESSAGE_VALUE) }) it('should not fail with an unknown kafka message', () => { diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter-telemetry.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter-telemetry.spec.js index 17ba5d9b00f..9d886a2972a 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter-telemetry.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter-telemetry.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('mocha') const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -45,7 +44,7 @@ describe('rewriter telemetry', () => { } incrementTelemetryIfNeeded(metrics) - expect(instrumentedPropagationInc).to.be.calledOnceWith(undefined, metrics.instrumentedPropagation) + sinon.assert.calledOnceWithExactly(instrumentedPropagationInc, undefined, metrics.instrumentedPropagation) }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter.spec.js index 85c99dda5f6..10cae4437f1 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/rewriter.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const dc = require('dc-polyfill') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') @@ -277,7 +276,7 @@ describe('IAST Rewriter', () => { port.postMessage({ type: constants.REWRITTEN_MESSAGE, data }) waitUntilCheckSuccess(() => { - expect(cacheRewrittenSourceMap).to.be.calledOnceWith('file.js', content) + sinon.assert.calledOnceWithExactly(cacheRewrittenSourceMap, 'file.js', content) done() }) @@ -297,7 +296,7 @@ describe('IAST Rewriter', () => { port.postMessage({ type: constants.REWRITTEN_MESSAGE, data }) waitUntilCheckSuccess(() => { - expect(rewriterTelemetry.incrementTelemetryIfNeeded).to.be.calledOnceWith(metrics) + sinon.assert.calledOnceWithExactly(rewriterTelemetry.incrementTelemetryIfNeeded, metrics) done() }) @@ -339,13 +338,13 @@ describe('IAST Rewriter', () => { port.postMessage({ type: constants.LOG_MESSAGE, data }) waitUntilCheckSuccess(() => { - expect(log.error).to.be.calledOnceWith(...messages) + sinon.assert.calledOnceWithExactly(log.error, ...messages) done() }) }) it('should call port1.on before port1.unref', () => { - expect(port1On).to.be.calledBefore(port1Unref) + assert.strictEqual(port1On.calledBefore(port1Unref), true) }) }) }) @@ -377,7 +376,7 @@ describe('IAST Rewriter', () => { const location = { path: 'test', line: 42, column: 4 } rewriter.getOriginalPathAndLineFromSourceMap(location) - expect(getOriginalPathAndLineFromSourceMap).to.be.calledOnceWithExactly('test', 42, 4) + sinon.assert.calledOnceWithExactly(getOriginalPathAndLineFromSourceMap, 'test', 42, 4) }) it('should not call native getOriginalPathAndLineFromSourceMap if --enable-source-maps is present', () => { diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js index 40143c6bf29..e22a79021e9 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -156,10 +155,20 @@ describe('IAST TaintTracking Operations', () => { const result = taintTrackingOperations.taintObject(iastContext, obj, null) sinon.assert.calledTwice(taintedUtilsMock.newTaintedString) - expect(taintedUtilsMock.newTaintedString.firstCall).to.have.been - .calledWithExactly(transactionId, 'child', 'child.value', null) - expect(taintedUtilsMock.newTaintedString.secondCall).to.have.been - .calledWithExactly(transactionId, 'parent', 'value', null) + sinon.assert.calledWithExactly( + taintedUtilsMock.newTaintedString.firstCall, + transactionId, + 'child', + 'child.value', + null + ) + sinon.assert.calledWithExactly( + taintedUtilsMock.newTaintedString.secondCall, + transactionId, + 'parent', + 'value', + null + ) assert.deepStrictEqual(result, expected) }) @@ -227,9 +236,7 @@ describe('IAST TaintTracking Operations', () => { [taintTrackingOperations.IAST_TRANSACTION_ID]: transactionId } taintTrackingOperations.removeTransaction(iastContext) - expect(taintedUtils.removeTransaction).to.be.calledWithExactly( - transactionId - ) + sinon.assert.calledWithExactly(taintedUtils.removeTransaction, transactionId) assert.strictEqual(iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], undefined) }) @@ -258,7 +265,7 @@ describe('IAST TaintTracking Operations', () => { taintTrackingOperations.enableTaintOperations(iastTelemetry.verbosity) taintTrackingOperations.removeTransaction(iastContext) - expect(requestTaintedInc).to.be.calledOnceWith(iastContext, 5) + sinon.assert.calledOnceWithExactly(requestTaintedInc, iastContext, 5) }) }) @@ -325,7 +332,7 @@ describe('IAST TaintTracking Operations', () => { global._ddiast.plusOperator('helloworld', 'hello', 'world') sinon.assert.called(taintedUtils.concat) - expect(executedPropagationIncrease).to.be.calledOnceWith(context) + sinon.assert.calledOnceWithExactly(executedPropagationIncrease, context) }) }) @@ -339,8 +346,13 @@ describe('IAST TaintTracking Operations', () => { const type = 'REQUEST' taintTrackingOperations.newTaintedString(iastContext, value, param, type) sinon.assert.called(taintedUtils.newTaintedString) - expect(taintedUtils.newTaintedString).to.be - .calledWithExactly(iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], value, param, type) + sinon.assert.calledWithExactly( + taintedUtils.newTaintedString, + iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], + value, + param, + type + ) }) it('Given iastContext with undefined IAST_TRANSACTION_ID should not call TaintedUtils.newTaintedString', () => { @@ -373,8 +385,13 @@ describe('IAST TaintTracking Operations', () => { const type = 'REQUEST' taintTrackingOperations.newTaintedObject(iastContext, value, param, type) sinon.assert.called(taintedUtils.newTaintedObject) - expect(taintedUtils.newTaintedObject).to.be - .calledWithExactly(iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], value, param, type) + sinon.assert.calledWithExactly( + taintedUtils.newTaintedObject, + iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], + value, + param, + type + ) }) it('Given iastContext with undefined IAST_TRANSACTION_ID should not call TaintedUtils.newTaintedObject', () => { @@ -405,7 +422,8 @@ describe('IAST TaintTracking Operations', () => { const value = 'value' taintTrackingOperations.isTainted(iastContext, value) sinon.assert.called(taintedUtils.isTainted) - expect(taintedUtils.isTainted).to.be.calledWithExactly( + sinon.assert.calledWithExactly( + taintedUtils.isTainted, iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], value ) @@ -432,7 +450,8 @@ describe('IAST TaintTracking Operations', () => { const value = 'value' taintTrackingOperations.getRanges(iastContext, value) sinon.assert.called(taintedUtils.getRanges) - expect(taintedUtils.getRanges).to.be.calledWithExactly( + sinon.assert.calledWithExactly( + taintedUtils.getRanges, iastContext[taintTrackingOperations.IAST_TRANSACTION_ID], value ) diff --git a/packages/dd-trace/test/appsec/iast/telemetry/iast-metric.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/iast-metric.spec.js index 3db8e17ac07..0d4f489622a 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/iast-metric.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/iast-metric.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -50,8 +49,8 @@ describe('Metrics', () => { metric.inc(context) - expect(reqNamespace.count).to.be.calledOnceWith(metric.name) - expect(inc).to.be.calledOnceWith(1) + sinon.assert.calledOnceWithMatch(reqNamespace.count, metric.name) + sinon.assert.calledOnceWithExactly(inc, 1) }) it('should add by 42 the metric value', () => { @@ -59,8 +58,8 @@ describe('Metrics', () => { metric.inc(context, 42) - expect(reqNamespace.count).to.be.calledOnceWith(metric.name) - expect(inc).to.be.calledOnceWith(42) + sinon.assert.calledOnceWithMatch(reqNamespace.count, metric.name) + sinon.assert.calledOnceWithExactly(inc, 42) }) it('should increase by one the metric tag value', () => { @@ -68,8 +67,8 @@ describe('Metrics', () => { metric.inc(context, 'tagKey:tag1') - expect(reqNamespace.count).to.be.calledOnceWith(metric.name, 'tagKey:tag1') - expect(inc).to.be.calledOnceWith(1) + sinon.assert.calledOnceWithExactly(reqNamespace.count, metric.name, 'tagKey:tag1') + sinon.assert.calledOnceWithExactly(inc, 1) }) it('should add by 42 the metric tag value', () => { @@ -77,8 +76,8 @@ describe('Metrics', () => { metric.inc(context, 'tagKey:tag1', 42) - expect(reqNamespace.count).to.be.calledOnceWith(metric.name, 'tagKey:tag1') - expect(inc).to.be.calledOnceWith(42) + sinon.assert.calledOnceWithExactly(reqNamespace.count, metric.name, 'tagKey:tag1') + sinon.assert.calledOnceWithExactly(inc, 42) }) it('should format tags according with its tagKey', () => { @@ -86,7 +85,7 @@ describe('Metrics', () => { metric.formatTags('tag1', 'tag2').forEach(tag => metric.inc(context, tag, 42)) - expect(reqNamespace.count).to.be.calledTwice + sinon.assert.calledTwice(reqNamespace.count) assert.deepStrictEqual(reqNamespace.count.firstCall.args, [metric.name, ['tagKey:tag1']]) assert.deepStrictEqual(reqNamespace.count.secondCall.args, [metric.name, ['tagKey:tag2']]) }) @@ -122,7 +121,7 @@ describe('Metrics', () => { const noTagged = new NoTaggedIastMetric('notagged', 'scope') noTagged.inc() - expect(inc).to.be.calledOnceWith(1) + sinon.assert.calledOnceWithExactly(inc, 1) }) it('should reuse previous metric when calling add multiple times', () => { @@ -134,7 +133,7 @@ describe('Metrics', () => { noTagged.inc(undefined, 42) noTagged.inc(undefined, 42) - expect(superCount).to.be.calledOnceWith('notagged') + sinon.assert.calledOnceWithMatch(superCount, 'notagged') }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js index be9949710d5..fe5b5543b58 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js @@ -3,7 +3,7 @@ const assert = require('node:assert/strict') const axios = require('axios') -const { expect } = require('chai') + const { after, afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -116,7 +116,7 @@ describe('Telemetry', () => { const iastContext = {} iastTelemetry.onRequestStart(iastContext) - expect(initRequestNamespace).to.be.calledOnceWith(iastContext) + sinon.assert.calledOnceWithExactly(initRequestNamespace, iastContext) }) it('should not call init if enabled and verbosity is Off', () => { @@ -133,7 +133,7 @@ describe('Telemetry', () => { const iastContext = {} iastTelemetry.onRequestStart(iastContext) - expect(initRequestNamespace).to.not.be.calledOnce + sinon.assert.notCalled(initRequestNamespace) }) }) @@ -144,7 +144,7 @@ describe('Telemetry', () => { const iastContext = {} iastTelemetry.onRequestEnd(iastContext) - expect(finalizeRequestNamespace).to.be.calledOnceWith(iastContext) + sinon.assert.calledOnceWithMatch(finalizeRequestNamespace, iastContext) }) it('should not call finalizeRequestNamespace if enabled and verbosity is Off', () => { @@ -161,7 +161,7 @@ describe('Telemetry', () => { const iastContext = {} iastTelemetry.onRequestEnd(iastContext) - expect(finalizeRequestNamespace).to.not.be.calledOnce + sinon.assert.notCalled(finalizeRequestNamespace) }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/telemetry/logs.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/logs.spec.js index 1317d1d4975..a70e6721ec6 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/logs.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/logs.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, before, after } = require('mocha') const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -59,7 +58,7 @@ describe('Telemetry logs', () => { clock.tick(3000) - expect(start).to.be.calledOnceWith(config) - expect(send).to.be.calledOnceWith(config) + sinon.assert.calledOnceWithExactly(start, config) + sinon.assert.calledOnceWithMatch(send, config) }) }) diff --git a/packages/dd-trace/test/appsec/iast/telemetry/namespaces.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/namespaces.spec.js index dce859d3d2c..7bf3530d5c6 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/namespaces.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/namespaces.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') const { @@ -87,9 +86,9 @@ describe('IAST metric namespaces', () => { finalizeRequestNamespace(context, rootSpan) - expect(count).to.be.calledTwice + sinon.assert.calledTwice(count) assert.deepStrictEqual(count.firstCall.args, [REQUEST_TAINTED, ['tag1:test']]) - expect(metric.inc).to.be.calledTwice + sinon.assert.calledTwice(metric.inc) assert.strictEqual(metric.inc.firstCall.args[0], 10) assert.deepStrictEqual(count.secondCall.args, [EXECUTED_SINK, undefined]) @@ -201,7 +200,7 @@ describe('IastNamespace', () => { namespace.getMetric('metric.name', tags) namespace.getMetric('metric.name', tags) - expect(count).to.be.calledOnceWith('metric.name', tags) + sinon.assert.calledOnceWithExactly(count, 'metric.name', tags) }) it('should reuse a previously created metric', () => { diff --git a/packages/dd-trace/test/appsec/iast/telemetry/span-tags.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/span-tags.spec.js index 51103445ce2..c6e5ad59fb0 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/span-tags.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/span-tags.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -35,7 +34,7 @@ describe('Telemetry Span tags', () => { addMetricsToSpan(rootSpan, metrics.series, tagPrefix) - expect(rootSpan.addTags).to.be.calledTwice + sinon.assert.calledTwice(rootSpan.addTags) assert.deepStrictEqual(rootSpan.addTags.firstCall.args[0], { '_dd.test.executed.source.source_type_1': 42 }) assert.deepStrictEqual(rootSpan.addTags.secondCall.args[0], { '_dd.test.executed.sink.sink_type_1': 3 }) }) @@ -49,7 +48,7 @@ describe('Telemetry Span tags', () => { addMetricsToSpan(rootSpan, metrics.series, tagPrefix) - expect(rootSpan.addTags).to.be.calledOnceWithExactly({ '_dd.test.executed.source.source_type_1': 74 }) + sinon.assert.calledOnceWithExactly(rootSpan.addTags, { '_dd.test.executed.source.source_type_1': 74 }) }) it('should add span tags with tag name like \'tagPrefix.metricName.tagKey\' for different tagged metrics', () => { @@ -63,7 +62,7 @@ describe('Telemetry Span tags', () => { addMetricsToSpan(rootSpan, metrics.series, tagPrefix) - expect(rootSpan.addTags).to.be.calledTwice + sinon.assert.calledTwice(rootSpan.addTags) assert.deepStrictEqual(rootSpan.addTags.firstCall.args[0], { '_dd.test.executed.source.source_type_1': 74 }) assert.deepStrictEqual(rootSpan.addTags.secondCall.args[0], { '_dd.test.executed.source.source_type_2': 2 }) }) @@ -75,6 +74,6 @@ describe('Telemetry Span tags', () => { addMetricsToSpan(rootSpan, metrics.series, tagPrefix) - expect(rootSpan.addTags).to.be.calledOnceWithExactly({ '_dd.test.request.tainted': 42 }) + sinon.assert.calledOnceWithExactly(rootSpan.addTags, { '_dd.test.request.tainted': 42 }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/utils.js b/packages/dd-trace/test/appsec/iast/utils.js index 93b376b1c5d..66e179a5af4 100644 --- a/packages/dd-trace/test/appsec/iast/utils.js +++ b/packages/dd-trace/test/appsec/iast/utils.js @@ -7,7 +7,7 @@ const path = require('node:path') const msgpack = require('@msgpack/msgpack') const axios = require('axios') -const { expect } = require('chai') + const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const iast = require('../../../src/appsec/iast') @@ -163,7 +163,7 @@ function checkNoVulnerabilityInRequest (vulnerability, config, done, makeRequest if (traces[0][0].type !== 'web') throw new Error('Not a web span') // iastJson == undefiend is valid const iastJson = traces[0][0].meta['_dd.iast.json'] || '' - expect(iastJson).to.not.include(`"${vulnerability}"`) + assert.ok(!(iastJson).includes(`"${vulnerability}"`)) }) .then(done) .catch(done) diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js index 10abfad787c..d7821bf8797 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const sinon = require('sinon') @@ -145,8 +144,11 @@ describe('Vulnerability formatter', () => { it('should set custom redaction patterns', () => { vulnerabilityFormatter.setRedactVulnerabilities(true, 'customNamePattern', 'customValuePattern') - expect(sensitiveHandler.setRedactionPatterns) - .to.have.been.calledOnceWithExactly('customNamePattern', 'customValuePattern') + sinon.assert.calledOnceWithExactly( + sensitiveHandler.setRedactionPatterns, + 'customNamePattern', + 'customValuePattern' + ) }) }) }) diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js index 141942f16e8..796b755efdb 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const sinon = require('sinon') const { addVulnerability, sendVulnerabilities, clearCache, start, stop } = require('../../../src/appsec/iast/vulnerability-reporter') @@ -144,8 +143,7 @@ describe('vulnerability-reporter', () => { '_dd.iast.json': '{"sources":[],"vulnerabilities":[{"type":"INSECURE_HASHING","hash":3410512655,' + '"evidence":{"value":"sha1"},"location":{"spanId":42,"stackId":"1","path":"filename.js","line":73}}]}' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(onTheFlySpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, onTheFlySpan, USER_KEEP, ASM) sinon.assert.calledOnce(onTheFlySpan.finish) }) @@ -385,10 +383,8 @@ describe('vulnerability-reporter', () => { }) sinon.assert.calledTwice(prioritySampler.setPriority) - expect(prioritySampler.setPriority.firstCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) - expect(prioritySampler.setPriority.secondCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.firstCall, span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.secondCall, span, USER_KEEP, ASM) }) it('should send multiple vulnerabilities with same tainted source', () => { @@ -445,10 +441,8 @@ describe('vulnerability-reporter', () => { }) sinon.assert.calledTwice(prioritySampler.setPriority) - expect(prioritySampler.setPriority.firstCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) - expect(prioritySampler.setPriority.secondCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.firstCall, span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.secondCall, span, USER_KEEP, ASM) }) it('should send once with multiple vulnerabilities', () => { @@ -470,12 +464,9 @@ describe('vulnerability-reporter', () => { '"location":{"spanId":-5,"path":"/path/to/file3.js","line":3}}]}' }) sinon.assert.calledThrice(prioritySampler.setPriority) - expect(prioritySampler.setPriority.firstCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) - expect(prioritySampler.setPriority.secondCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) - expect(prioritySampler.setPriority.thirdCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.firstCall, span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.secondCall, span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.thirdCall, span, USER_KEEP, ASM) }) it('should send once vulnerability with one vulnerability', () => { @@ -536,10 +527,8 @@ describe('vulnerability-reporter', () => { '{"spanId":888,"path":"filename.js","line":88}}]}' }) sinon.assert.calledTwice(prioritySampler.setPriority) - expect(prioritySampler.setPriority.firstCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) - expect(prioritySampler.setPriority.secondCall) - .to.have.been.calledWithExactly(span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.firstCall, span, USER_KEEP, ASM) + sinon.assert.calledWithExactly(prioritySampler.setPriority.secondCall, span, USER_KEEP, ASM) }) }) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 800e44f10f3..efd4763ae70 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -4,7 +4,7 @@ const assert = require('node:assert/strict') const fs = require('node:fs') const axios = require('axios') -const { expect } = require('chai') + const { after, afterEach, before, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -178,8 +178,7 @@ describe('AppSec Index', function () { sinon.assert.calledOnceWithExactly(RuleManager.loadRules, config.appsec) sinon.assert.calledOnceWithExactly(Reporter.init, config.appsec) sinon.assert.calledOnceWithExactly(UserTracking.setCollectionMode, 'anon', false) - expect(incomingHttpRequestStart.subscribe) - .to.have.been.calledOnceWithExactly(AppSec.incomingHttpStartTranslator) + sinon.assert.calledOnceWithExactly(incomingHttpRequestStart.subscribe, AppSec.incomingHttpStartTranslator) sinon.assert.calledOnceWithExactly(incomingHttpRequestEnd.subscribe, AppSec.incomingHttpEndTranslator) sinon.assert.calledOnce(graphql.enable) }) @@ -257,13 +256,13 @@ describe('AppSec Index', function () { } AppSec.enable(config) - expect(appsecTelemetry.enable).to.be.calledOnceWithExactly(config) + sinon.assert.calledOnceWithExactly(appsecTelemetry.enable, config) }) it('should call rasp enable', () => { AppSec.enable(config) - expect(rasp.enable).to.be.calledOnceWithExactly(config) + sinon.assert.calledOnceWithExactly(rasp.enable, config) }) it('should not call rasp enable when rasp is disabled', () => { @@ -289,8 +288,7 @@ describe('AppSec Index', function () { AppSec.disable() sinon.assert.calledOnce(RuleManager.clearAllRules) - expect(incomingHttpRequestStart.unsubscribe) - .to.have.been.calledOnceWithExactly(AppSec.incomingHttpStartTranslator) + sinon.assert.calledOnceWithExactly(incomingHttpRequestStart.unsubscribe, AppSec.incomingHttpStartTranslator) sinon.assert.calledOnceWithExactly(incomingHttpRequestEnd.unsubscribe, AppSec.incomingHttpEndTranslator) sinon.assert.calledOnce(graphql.disable) sinon.assert.calledOnce(rasp.disable) @@ -422,7 +420,7 @@ describe('AppSec Index', function () { AppSec.incomingHttpEndTranslator({ req, res }) - expect(waf.run).to.have.not.been.called + sinon.assert.notCalled(waf.run) sinon.assert.calledOnceWithExactly(Reporter.finishRequest, req, res, {}, undefined) }) @@ -505,7 +503,7 @@ describe('AppSec Index', function () { AppSec.incomingHttpEndTranslator({ req, res }) - expect(waf.run).to.have.not.been.called + sinon.assert.notCalled(waf.run) sinon.assert.calledOnceWithExactly(Reporter.finishRequest, req, res, {}, undefined) }) @@ -696,8 +694,8 @@ describe('AppSec Index', function () { responseBody.publish({ req: {}, body: 'string' }) responseBody.publish({ req: {}, body: null }) - expect(apiSecuritySampler.sampleRequest).to.not.been.called - expect(waf.run).to.not.been.called + sinon.assert.notCalled(apiSecuritySampler.sampleRequest) + sinon.assert.notCalled(waf.run) }) it('should not call to the waf if it is not a sampled request', () => { @@ -708,7 +706,7 @@ describe('AppSec Index', function () { responseBody.publish({ req, res, body: {} }) sinon.assert.calledOnceWithMatch(apiSecuritySampler.sampleRequest, req, res) - expect(waf.run).to.not.been.called + sinon.assert.notCalled(waf.run) }) it('should call to the waf if it is a sampled request', () => { @@ -915,7 +913,7 @@ describe('AppSec Index', function () { passportVerify.publish(payload) - expect(storage('legacy').getStore).to.have.been.calledOnce + sinon.assert.calledOnce(storage('legacy').getStore) sinon.assert.calledOnceWithExactly(web.root, req) sinon.assert.calledOnceWithExactly(UserTracking.trackLogin, payload.framework, @@ -942,7 +940,7 @@ describe('AppSec Index', function () { passportVerify.publish(payload) - expect(storage('legacy').getStore).to.have.been.calledOnce + sinon.assert.calledOnce(storage('legacy').getStore) sinon.assert.calledOnceWithExactly(web.root, req) sinon.assert.calledOnceWithExactly(UserTracking.trackLogin, payload.framework, @@ -969,7 +967,7 @@ describe('AppSec Index', function () { passportVerify.publish(payload) - expect(storage('legacy').getStore).to.have.been.calledOnce + sinon.assert.calledOnce(storage('legacy').getStore) sinon.assert.calledOnceWithExactly(log.warn, '[ASM] No rootSpan found in onPassportVerify') sinon.assert.notCalled(UserTracking.trackLogin) assert.strictEqual(abortController.signal.aborted, false) @@ -993,7 +991,7 @@ describe('AppSec Index', function () { passportUser.publish(payload) - expect(storage('legacy').getStore).to.have.been.calledOnce + sinon.assert.calledOnce(storage('legacy').getStore) sinon.assert.calledOnceWithExactly(web.root, req) sinon.assert.calledOnceWithExactly(UserTracking.trackUser, payload.user, @@ -1014,7 +1012,7 @@ describe('AppSec Index', function () { passportUser.publish(payload) - expect(storage('legacy').getStore).to.have.been.calledOnce + sinon.assert.calledOnce(storage('legacy').getStore) sinon.assert.calledOnceWithExactly(web.root, req) sinon.assert.calledOnceWithExactly(UserTracking.trackUser, payload.user, @@ -1035,7 +1033,7 @@ describe('AppSec Index', function () { passportUser.publish(payload) - expect(storage('legacy').getStore).to.have.been.calledOnce + sinon.assert.calledOnce(storage('legacy').getStore) sinon.assert.calledOnceWithExactly(log.warn, '[ASM] No rootSpan found in onPassportDeserializeUser') sinon.assert.notCalled(UserTracking.trackUser) assert.strictEqual(abortController.signal.aborted, false) @@ -1164,14 +1162,14 @@ describe('AppSec Index', function () { } } }, req) - expect(abortController.abort).to.have.not.been.called - expect(res.constructor.prototype.end).to.have.not.been.called + sinon.assert.notCalled(abortController.abort) + sinon.assert.notCalled(res.constructor.prototype.end) responseWriteHead.publish({ req, res, abortController, statusCode: 404, responseHeaders }) sinon.assert.calledOnce(waf.run) - expect(abortController.abort).to.have.not.been.called - expect(res.constructor.prototype.end).to.have.not.been.called + sinon.assert.notCalled(abortController.abort) + sinon.assert.notCalled(res.constructor.prototype.end) }) it('should not do anything without a root span', () => { @@ -1186,9 +1184,9 @@ describe('AppSec Index', function () { responseWriteHead.publish({ req, res, abortController, statusCode: 404, responseHeaders }) - expect(waf.run).to.have.not.been.called - expect(abortController.abort).to.have.not.been.called - expect(res.constructor.prototype.end).to.have.not.been.called + sinon.assert.notCalled(waf.run) + sinon.assert.notCalled(abortController.abort) + sinon.assert.notCalled(res.constructor.prototype.end) }) it('should call the WAF with responde code and headers', () => { @@ -1240,7 +1238,7 @@ describe('AppSec Index', function () { it('should not call abortController if response was not blocked', () => { responseSetHeader.publish({ res, abortController }) - expect(abortController.abort).to.have.not.been.calledOnce + sinon.assert.notCalled(abortController.abort) }) }) }) diff --git a/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js b/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js index ced35eff30a..09b58f936f2 100644 --- a/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js +++ b/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js @@ -71,12 +71,12 @@ describe('RASP - command_injection - integration', () => { const evalSerie = series.find(s => s.metric === 'rasp.rule.eval') const matchSerie = series.find(s => s.metric === 'rasp.rule.match') - assert.ok(evalSerie != null) + assert.ok(evalSerie) assert.ok(evalSerie.tags.includes('rule_type:command_injection')) assert.ok(evalSerie.tags.includes(`rule_variant:${variant}`)) assert.strictEqual(evalSerie.type, 'count') - assert.ok(matchSerie != null) + assert.ok(matchSerie) assert.ok(matchSerie.tags.includes('rule_type:command_injection')) assert.ok(matchSerie.tags.includes(`rule_variant:${variant}`)) assert.strictEqual(matchSerie.type, 'count') diff --git a/packages/dd-trace/test/appsec/rasp/rasp-metrics.integration.spec.js b/packages/dd-trace/test/appsec/rasp/rasp-metrics.integration.spec.js index 744b035323e..943a3d434a7 100644 --- a/packages/dd-trace/test/appsec/rasp/rasp-metrics.integration.spec.js +++ b/packages/dd-trace/test/appsec/rasp/rasp-metrics.integration.spec.js @@ -30,9 +30,9 @@ describe('RASP metrics', () => { DD_TRACE_AGENT_PORT: agent.port, DD_APPSEC_ENABLED: 'true', DD_APPSEC_RASP_ENABLED: 'true', - DD_TELEMETRY_HEARTBEAT_INTERVAL: 1, + DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', DD_APPSEC_RULES: path.join(cwd, 'resources', 'rasp_rules.json'), - DD_APPSEC_WAF_TIMEOUT: 0.1 + DD_APPSEC_WAF_TIMEOUT: '0.1' } }) axios = Axios.create({ baseURL: proc.url }) @@ -62,7 +62,7 @@ describe('RASP metrics', () => { const series = payload.payload.series const errorSerie = series.find(s => s.metric === 'rasp.error') - assert.ok(errorSerie != null) + assert.ok(errorSerie) assert.ok(errorSerie.tags.includes('waf_error:-127')) assert.strictEqual(errorSerie.type, 'count') } @@ -83,8 +83,8 @@ describe('RASP metrics', () => { DD_TRACE_AGENT_PORT: agent.port, DD_APPSEC_ENABLED: 'true', DD_APPSEC_RASP_ENABLED: 'true', - DD_TELEMETRY_HEARTBEAT_INTERVAL: 1, - DD_APPSEC_WAF_TIMEOUT: 1 + DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', + DD_APPSEC_WAF_TIMEOUT: '1' } }) axios = Axios.create({ baseURL: proc.url }) @@ -112,7 +112,7 @@ describe('RASP metrics', () => { const series = payload.payload.series const timeoutSerie = series.find(s => s.metric === 'rasp.timeout') - assert.ok(timeoutSerie != null) + assert.ok(timeoutSerie) assert.ok(timeoutSerie.tags.includes('rule_type:command_injection')) assert.ok(timeoutSerie.tags.includes('rule_variant:shell')) assert.strictEqual(timeoutSerie.type, 'count') diff --git a/packages/dd-trace/test/appsec/rasp/utils.spec.js b/packages/dd-trace/test/appsec/rasp/utils.spec.js index 897c478aa69..ed745f40e25 100644 --- a/packages/dd-trace/test/appsec/rasp/utils.spec.js +++ b/packages/dd-trace/test/appsec/rasp/utils.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -215,9 +214,8 @@ describe('RASP - utils.js', () => { assert.strictEqual(error.message, 'DatadogRaspAbortError') assert.strictEqual(error.blockingAction, blockingAction) assert.strictEqual(error.raspRule, raspRule) - assert.ok(Object.hasOwn(error, 'req')) - assert.ok(Object.hasOwn(error, 'res')) - expect(Object.keys(error)).to.not.include.members(['req', 'res']) + assert.strictEqual(Object.getOwnPropertyDescriptor(error, 'req')?.enumerable, false) + assert.strictEqual(Object.getOwnPropertyDescriptor(error, 'res')?.enumerable, false) }) }) }) diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index 8ebe06ad9e6..7b758264105 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -3,7 +3,6 @@ const assert = require('node:assert/strict') const zlib = require('node:zlib') -const { expect } = require('chai') const dc = require('dc-polyfill') const { after, afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') @@ -342,28 +341,28 @@ describe('reporter', () => { Reporter.reportWafConfigUpdate(product, rcConfigId, diagnostics) sinon.assert.calledThrice(telemetryLogHandlerAssert) - expect(telemetryLogHandlerAssert.getCall(0)).to.have.been.calledWithExactly({ + assert.strictEqual(telemetryLogHandlerAssert.getCall(0).calledWithExactly({ message: '"missing key operator": ["blk-001-001"]', level: 'ERROR', tags: 'log_type:rc::asm_dd::diagnostic,appsec_config_key:rules,rc_config_id:1' - }, 'datadog:telemetry:log') - expect(telemetryLogHandlerAssert.getCall(1)).to.have.been.calledWithExactly({ + }, 'datadog:telemetry:log'), true) + assert.strictEqual(telemetryLogHandlerAssert.getCall(1).calledWithExactly({ message: '"invalid tag": ["blk-001-001"]', level: 'WARN', tags: 'log_type:rc::asm_dd::diagnostic,appsec_config_key:rules,rc_config_id:1' - }, 'datadog:telemetry:log') - expect(telemetryLogHandlerAssert.getCall(2)).to.have.been.calledWithExactly({ + }, 'datadog:telemetry:log'), true) + assert.strictEqual(telemetryLogHandlerAssert.getCall(2).calledWithExactly({ message: '"no mappings defined": ["http-endpoint-fingerprint"]', level: 'ERROR', tags: 'log_type:rc::asm_dd::diagnostic,appsec_config_key:processors,rc_config_id:1' - }, 'datadog:telemetry:log') + }, 'datadog:telemetry:log'), true) }) it('should increment waf.config_errors metric', () => { Reporter.reportWafConfigUpdate(product, rcConfigId, diagnostics, '1.24.1') sinon.assert.calledTwice(telemetry.incrementWafConfigErrorsMetric) - expect(telemetry.incrementWafConfigErrorsMetric).to.always.have.been.calledWithExactly('1.24.1', '1.42.11') + sinon.assert.calledWithExactly(telemetry.incrementWafConfigErrorsMetric, '1.24.1', '1.42.11') }) }) @@ -718,7 +717,7 @@ describe('reporter', () => { it('should call addTags with an empty array', () => { Reporter.reportAttributes([]) - expect(span.addTags).to.be.calledOnceWithExactly({}) + sinon.assert.calledOnceWithExactly(span.addTags, {}) }) it('should call addTags', () => { @@ -740,7 +739,7 @@ describe('reporter', () => { Reporter.reportAttributes(attributes) const schemaEncoded = zlib.gzipSync(JSON.stringify(schemaValue)).toString('base64') - expect(span.addTags).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(span.addTags, { '_dd.appsec.fp.http.endpoint': 'endpoint_fingerprint', '_dd.appsec.fp.http.header': 'header_fingerprint', '_dd.appsec.fp.http.network': 'network_fingerprint', @@ -1014,7 +1013,7 @@ describe('reporter', () => { const res = {} Reporter.finishRequest(req, res) - expect(telemetry.incrementWafRequestsMetric).to.be.calledOnceWithExactly(req) + sinon.assert.calledOnceWithExactly(telemetry.incrementWafRequestsMetric, req) }) it('should set waf.duration tags if there are metrics stored', () => { diff --git a/packages/dd-trace/test/appsec/rule_manager.spec.js b/packages/dd-trace/test/appsec/rule_manager.spec.js index 871e5fc8e3d..07f4854c12a 100644 --- a/packages/dd-trace/test/appsec/rule_manager.spec.js +++ b/packages/dd-trace/test/appsec/rule_manager.spec.js @@ -235,9 +235,11 @@ describe('AppSec Rule Manager', () => { ) assert.strictEqual(waf.wafManager.ddwaf.configPaths.length, 3) - assert.ok(waf.wafManager.ddwaf.configPaths.includes(waf.wafManager.constructor.defaultWafConfigPath)) - assert.ok(waf.wafManager.ddwaf.configPaths.includes(rcConfigs.toApply[0].path)) - assert.ok(waf.wafManager.ddwaf.configPaths.includes(rcConfigs.toModify[0].path)) + assert.deepStrictEqual(waf.wafManager.ddwaf.configPaths.sort(), [ + waf.wafManager.constructor.defaultWafConfigPath, + rcConfigs.toApply[0].path, + rcConfigs.toModify[0].path + ].sort()) }) it('should update apply_state and apply_error on successful apply', () => { diff --git a/packages/dd-trace/test/appsec/sdk/set_user.spec.js b/packages/dd-trace/test/appsec/sdk/set_user.spec.js index c2d30631577..394a574282c 100644 --- a/packages/dd-trace/test/appsec/sdk/set_user.spec.js +++ b/packages/dd-trace/test/appsec/sdk/set_user.spec.js @@ -4,7 +4,7 @@ const assert = require('node:assert/strict') const path = require('node:path') const axios = require('axios') -const { expect } = require('chai') + const { after, before, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -63,7 +63,7 @@ describe('set_user', () => { getRootSpan.returns(undefined) setUser(tracer, { id: 'user' }) - expect(getRootSpan).to.be.calledOnceWithExactly(tracer) + sinon.assert.calledOnceWithExactly(getRootSpan, tracer) sinon.assert.calledOnceWithExactly(log.warn, '[ASM] Root span not available in setUser') sinon.assert.notCalled(rootSpan.setTag) sinon.assert.notCalled(waf.run) @@ -80,11 +80,11 @@ describe('set_user', () => { setUser(tracer, user) sinon.assert.notCalled(log.warn) assert.strictEqual(rootSpan.setTag.callCount, 5) - expect(rootSpan.setTag.getCall(0)).to.have.been.calledWithExactly('usr.id', '123') - expect(rootSpan.setTag.getCall(1)).to.have.been.calledWithExactly('usr.email', 'a@b.c') - expect(rootSpan.setTag.getCall(2)).to.have.been.calledWithExactly('usr.custom', 'hello') - expect(rootSpan.setTag.getCall(3)).to.have.been.calledWithExactly('usr.session_id', '133769') - expect(rootSpan.setTag.getCall(4)).to.have.been.calledWithExactly('_dd.appsec.user.collection_mode', 'sdk') + assert.strictEqual(rootSpan.setTag.getCall(0).calledWithExactly('usr.id', '123'), true) + assert.strictEqual(rootSpan.setTag.getCall(1).calledWithExactly('usr.email', 'a@b.c'), true) + assert.strictEqual(rootSpan.setTag.getCall(2).calledWithExactly('usr.custom', 'hello'), true) + assert.strictEqual(rootSpan.setTag.getCall(3).calledWithExactly('usr.session_id', '133769'), true) + assert.strictEqual(rootSpan.setTag.getCall(4).calledWithExactly('_dd.appsec.user.collection_mode', 'sdk'), true) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { 'usr.id': '123', diff --git a/packages/dd-trace/test/appsec/sdk/track_event.spec.js b/packages/dd-trace/test/appsec/sdk/track_event.spec.js index 0b7387f6117..e266267d7a2 100644 --- a/packages/dd-trace/test/appsec/sdk/track_event.spec.js +++ b/packages/dd-trace/test/appsec/sdk/track_event.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, beforeEach } = require('mocha') const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -72,10 +71,8 @@ describe('track_event - Internal API', () => { trackUserLoginSuccessEvent(tracer, {}, { key: 'value' }) sinon.assert.calledTwice(log.warn) - expect(log.warn.firstCall) - .to.have.been.calledWithExactly('[ASM] Invalid user provided to trackUserLoginSuccessEvent') - expect(log.warn.secondCall) - .to.have.been.calledWithExactly('[ASM] Invalid user provided to trackUserLoginSuccessEvent') + sinon.assert.calledWithExactly(log.warn.firstCall, '[ASM] Invalid user provided to trackUserLoginSuccessEvent') + sinon.assert.calledWithExactly(log.warn.secondCall, '[ASM] Invalid user provided to trackUserLoginSuccessEvent') sinon.assert.notCalled(setUserTags) sinon.assert.notCalled(rootSpan.addTags) sinon.assert.notCalled(telemetry.incrementSdkEventMetric) @@ -86,8 +83,7 @@ describe('track_event - Internal API', () => { trackUserLoginSuccessEvent(tracer, { id: 'user_id' }, { key: 'value' }) - expect(log.warn) - .to.have.been.calledOnceWithExactly('[ASM] Root span not available in trackUserLoginSuccessEvent') + sinon.assert.calledOnceWithExactly(log.warn, '[ASM] Root span not available in trackUserLoginSuccessEvent') sinon.assert.notCalled(setUserTags) sinon.assert.calledWithExactly(telemetry.incrementSdkEventMetric, 'login_success', 'v1') }) @@ -111,8 +107,7 @@ describe('track_event - Internal API', () => { 'appsec.events.users.login.success.metakey2': 'metaValue2', 'appsec.events.users.login.success.metakey3': 'metaValue3' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { [LOGIN_SUCCESS]: null, @@ -135,8 +130,7 @@ describe('track_event - Internal API', () => { '_dd.appsec.events.users.login.success.sdk': 'true', 'appsec.events.users.login.success.usr.login': 'user_id' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { [LOGIN_SUCCESS]: null, @@ -159,8 +153,7 @@ describe('track_event - Internal API', () => { '_dd.appsec.events.users.login.success.sdk': 'true', 'appsec.events.users.login.success.usr.login': 'user_login' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { [LOGIN_SUCCESS]: null, @@ -178,10 +171,8 @@ describe('track_event - Internal API', () => { trackUserLoginFailureEvent(tracer, [], false, { key: 'value' }) sinon.assert.calledTwice(log.warn) - expect(log.warn.firstCall) - .to.have.been.calledWithExactly('[ASM] Invalid userId provided to trackUserLoginFailureEvent') - expect(log.warn.secondCall) - .to.have.been.calledWithExactly('[ASM] Invalid userId provided to trackUserLoginFailureEvent') + sinon.assert.calledWithExactly(log.warn.firstCall, '[ASM] Invalid userId provided to trackUserLoginFailureEvent') + sinon.assert.calledWithExactly(log.warn.secondCall, '[ASM] Invalid userId provided to trackUserLoginFailureEvent') sinon.assert.notCalled(setUserTags) sinon.assert.notCalled(rootSpan.addTags) sinon.assert.notCalled(telemetry.incrementSdkEventMetric) @@ -192,8 +183,7 @@ describe('track_event - Internal API', () => { trackUserLoginFailureEvent(tracer, 'user_id', false, { key: 'value' }) - expect(log.warn) - .to.have.been.calledOnceWithExactly('[ASM] Root span not available in %s', 'trackUserLoginFailureEvent') + sinon.assert.calledOnceWithExactly(log.warn, '[ASM] Root span not available in %s', 'trackUserLoginFailureEvent') sinon.assert.notCalled(setUserTags) sinon.assert.calledWithExactly(telemetry.incrementSdkEventMetric, 'login_failure', 'v1') }) @@ -217,8 +207,7 @@ describe('track_event - Internal API', () => { 'appsec.events.users.login.failure.metakey2': 'metaValue2', 'appsec.events.users.login.failure.metakey3': 'metaValue3' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { [LOGIN_FAILURE]: null, @@ -247,8 +236,7 @@ describe('track_event - Internal API', () => { 'appsec.events.users.login.failure.metakey2': 'metaValue2', 'appsec.events.users.login.failure.metakey3': 'metaValue3' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { [LOGIN_FAILURE]: null, @@ -270,8 +258,7 @@ describe('track_event - Internal API', () => { 'appsec.events.users.login.failure.usr.login': 'user_id', 'appsec.events.users.login.failure.usr.exists': 'true' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledOnceWithExactly(waf.run, { persistent: { [LOGIN_FAILURE]: null, @@ -288,10 +275,8 @@ describe('track_event - Internal API', () => { trackCustomEvent(tracer, { name: 'name' }) sinon.assert.calledTwice(log.warn) - expect(log.warn.firstCall) - .to.have.been.calledWithExactly('[ASM] Invalid eventName provided to trackCustomEvent') - expect(log.warn.secondCall) - .to.have.been.calledWithExactly('[ASM] Invalid eventName provided to trackCustomEvent') + sinon.assert.calledWithExactly(log.warn.firstCall, '[ASM] Invalid eventName provided to trackCustomEvent') + sinon.assert.calledWithExactly(log.warn.secondCall, '[ASM] Invalid eventName provided to trackCustomEvent') sinon.assert.notCalled(setUserTags) sinon.assert.notCalled(rootSpan.addTags) sinon.assert.notCalled(telemetry.incrementSdkEventMetric) @@ -302,8 +287,7 @@ describe('track_event - Internal API', () => { trackCustomEvent(tracer, 'custom_event') - expect(log.warn) - .to.have.been.calledOnceWithExactly('[ASM] Root span not available in %s', 'trackCustomEvent') + sinon.assert.calledOnceWithExactly(log.warn, '[ASM] Root span not available in %s', 'trackCustomEvent') sinon.assert.notCalled(setUserTags) sinon.assert.calledWithExactly(telemetry.incrementSdkEventMetric, 'custom', 'v1') }) @@ -322,8 +306,7 @@ describe('track_event - Internal API', () => { 'appsec.events.custom_event.metaKey1': 'metaValue1', 'appsec.events.custom_event.metakey2': 'metaValue2' }) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.notCalled(waf.run) sinon.assert.calledWithExactly(telemetry.incrementSdkEventMetric, 'custom', 'v1') }) @@ -338,8 +321,7 @@ describe('track_event - Internal API', () => { '_dd.appsec.events.custom_event.sdk': 'true' }) sinon.assert.notCalled(waf.run) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) sinon.assert.calledWithExactly(telemetry.incrementSdkEventMetric, 'custom', 'v1') }) @@ -371,8 +353,10 @@ describe('track_event - Internal API', () => { trackUserLoginSuccessV2(tracer, 'login') - expect(log.warn) - .to.have.been.calledOnceWithExactly('[ASM] Root span not available in eventTrackingV2.trackUserLoginSuccess') + sinon.assert.calledOnceWithExactly( + log.warn, + '[ASM] Root span not available in eventTrackingV2.trackUserLoginSuccess' + ) sinon.assert.notCalled(setUserTags) }) @@ -381,10 +365,14 @@ describe('track_event - Internal API', () => { trackUserLoginSuccessV2(tracer, {}) sinon.assert.calledTwice(log.warn) - expect(log.warn.firstCall) - .to.have.been.calledWithExactly('[ASM] Invalid login provided to eventTrackingV2.trackUserLoginSuccess') - expect(log.warn.secondCall) - .to.have.been.calledWithExactly('[ASM] Invalid login provided to eventTrackingV2.trackUserLoginSuccess') + sinon.assert.calledWithExactly( + log.warn.firstCall, + '[ASM] Invalid login provided to eventTrackingV2.trackUserLoginSuccess' + ) + sinon.assert.calledWithExactly( + log.warn.secondCall, + '[ASM] Invalid login provided to eventTrackingV2.trackUserLoginSuccess' + ) sinon.assert.notCalled(setUserTags) sinon.assert.notCalled(rootSpan.addTags) sinon.assert.notCalled(waf.run) @@ -590,8 +578,7 @@ describe('track_event - Internal API', () => { it('should keep the trace', () => { trackUserLoginSuccessV2(tracer, 'login') - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) }) it('should update the metrics', () => { @@ -607,8 +594,10 @@ describe('track_event - Internal API', () => { trackUserLoginFailureV2(tracer, 'login', false) - expect(log.warn) - .to.have.been.calledOnceWithExactly('[ASM] Root span not available in eventTrackingV2.trackUserLoginFailure') + sinon.assert.calledOnceWithExactly( + log.warn, + '[ASM] Root span not available in eventTrackingV2.trackUserLoginFailure' + ) sinon.assert.notCalled(setUserTags) }) @@ -617,10 +606,14 @@ describe('track_event - Internal API', () => { trackUserLoginFailureV2(tracer, {}, false) sinon.assert.calledTwice(log.warn) - expect(log.warn.firstCall) - .to.have.been.calledWithExactly('[ASM] Invalid login provided to eventTrackingV2.trackUserLoginFailure') - expect(log.warn.secondCall) - .to.have.been.calledWithExactly('[ASM] Invalid login provided to eventTrackingV2.trackUserLoginFailure') + sinon.assert.calledWithExactly( + log.warn.firstCall, + '[ASM] Invalid login provided to eventTrackingV2.trackUserLoginFailure' + ) + sinon.assert.calledWithExactly( + log.warn.secondCall, + '[ASM] Invalid login provided to eventTrackingV2.trackUserLoginFailure' + ) sinon.assert.notCalled(setUserTags) sinon.assert.notCalled(rootSpan.addTags) sinon.assert.notCalled(waf.run) @@ -815,8 +808,7 @@ describe('track_event - Internal API', () => { it('should keep the trace', () => { trackUserLoginFailureV2(tracer, 'login', true) - expect(prioritySampler.setPriority) - .to.have.been.calledOnceWithExactly(rootSpan, USER_KEEP, ASM) + sinon.assert.calledOnceWithExactly(prioritySampler.setPriority, rootSpan, USER_KEEP, ASM) }) it('should update the metrics', () => { diff --git a/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js b/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js index 5b1ae6e6567..9fb9ffe718b 100644 --- a/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js +++ b/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { before, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -114,7 +113,7 @@ describe('user_blocking - Internal API', () => { const ret = userBlocking.blockRequest(tracer) assert.strictEqual(ret, true) sinon.assert.calledOnce(legacyStorage.getStore) - expect(block).to.be.calledOnceWithExactly(req, res, rootSpan) + sinon.assert.calledOnceWithExactly(block, req, res, rootSpan) }) it('should log warning when req or res is not available', () => { @@ -123,8 +122,7 @@ describe('user_blocking - Internal API', () => { const ret = userBlocking.blockRequest(tracer) assert.strictEqual(ret, false) sinon.assert.calledOnce(legacyStorage.getStore) - expect(log.warn) - .to.have.been.calledOnceWithExactly('[ASM] Requests or response object not available in blockRequest') + sinon.assert.calledOnceWithExactly(log.warn, '[ASM] Requests or response object not available in blockRequest') sinon.assert.notCalled(block) }) diff --git a/packages/dd-trace/test/appsec/waf-metrics.integration.spec.js b/packages/dd-trace/test/appsec/waf-metrics.integration.spec.js index c6b086256e4..ab20b01339d 100644 --- a/packages/dd-trace/test/appsec/waf-metrics.integration.spec.js +++ b/packages/dd-trace/test/appsec/waf-metrics.integration.spec.js @@ -29,8 +29,8 @@ describe('WAF Metrics', () => { env: { DD_TRACE_AGENT_PORT: agent.port, DD_APPSEC_ENABLED: 'true', - DD_TELEMETRY_HEARTBEAT_INTERVAL: 1, - DD_APPSEC_WAF_TIMEOUT: 0.1 + DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', + DD_APPSEC_WAF_TIMEOUT: '0.1' } }) axios = Axios.create({ baseURL: proc.url }) @@ -63,13 +63,13 @@ describe('WAF Metrics', () => { const series = payload.payload.series const wafRequests = series.find(s => s.metric === 'waf.requests') - assert.ok(wafRequests != null) + assert.ok(wafRequests) assert.strictEqual(wafRequests.type, 'count') assert.ok(wafRequests.tags.includes('waf_error:true')) assert.ok(wafRequests.tags.includes('rate_limited:false')) const wafError = series.find(s => s.metric === 'waf.error') - assert.ok(wafError != null) + assert.ok(wafError) assert.strictEqual(wafError.type, 'count') assert.ok(wafError.tags.includes('waf_error:-127')) } @@ -91,8 +91,8 @@ describe('WAF Metrics', () => { env: { DD_TRACE_AGENT_PORT: agent.port, DD_APPSEC_ENABLED: 'true', - DD_TELEMETRY_HEARTBEAT_INTERVAL: 1, - DD_APPSEC_WAF_TIMEOUT: 1 + DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', + DD_APPSEC_WAF_TIMEOUT: '1' } }) axios = Axios.create({ baseURL: proc.url }) @@ -122,7 +122,7 @@ describe('WAF Metrics', () => { const series = payload.payload.series const wafRequests = series.find(s => s.metric === 'waf.requests') - assert.ok(wafRequests != null) + assert.ok(wafRequests) assert.strictEqual(wafRequests.type, 'count') assert.ok(wafRequests.tags.includes('waf_timeout:true')) } @@ -144,7 +144,7 @@ describe('WAF Metrics', () => { env: { DD_TRACE_AGENT_PORT: agent.port, DD_APPSEC_ENABLED: 'true', - DD_TELEMETRY_HEARTBEAT_INTERVAL: 1 + DD_TELEMETRY_HEARTBEAT_INTERVAL: '1' } }) axios = Axios.create({ baseURL: proc.url }) @@ -176,12 +176,12 @@ describe('WAF Metrics', () => { const series = payload.payload.series const inputTruncated = series.find(s => s.metric === 'waf.input_truncated') - assert.ok(inputTruncated != null) + assert.ok(inputTruncated) assert.strictEqual(inputTruncated.type, 'count') assert.ok(inputTruncated.tags.includes('truncation_reason:7')) const wafRequests = series.find(s => s.metric === 'waf.requests') - assert.ok(wafRequests != null) + assert.ok(wafRequests) assert.ok(wafRequests.tags.includes('input_truncated:true')) } }, 'generate-metrics', 30_000, 2) diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js index ed6629e6674..f585f1c817d 100644 --- a/packages/dd-trace/test/appsec/waf/index.spec.js +++ b/packages/dd-trace/test/appsec/waf/index.spec.js @@ -1,8 +1,7 @@ 'use strict' -const assert = require('node:assert') +const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -107,7 +106,7 @@ describe('WAF Manager', () => { try { waf.init(rules, config.appsec) - expect.fail('waf init should have thrown an error') + assert.fail('waf init should have thrown an error') } catch (err) { assert.strictEqual(err, error) sinon.assert.calledWith(Reporter.reportWafInit, '1.2.3', 'unknown') @@ -131,7 +130,7 @@ describe('WAF Manager', () => { const req = {} waf.run(payload, req, 'ssrf') - expect(run).to.be.calledOnceWithExactly(payload, 'ssrf') + sinon.assert.calledOnceWithExactly(run, payload, 'ssrf') }) it('should call wafManager.run without raspRuleType', () => { @@ -143,7 +142,7 @@ describe('WAF Manager', () => { const req = {} waf.run(payload, req) - expect(run).to.be.calledOnceWithExactly(payload, undefined) + sinon.assert.calledOnceWithExactly(run, payload, undefined) }) describe('sampling priority', () => { @@ -436,7 +435,7 @@ describe('WAF Manager', () => { } }) - expect(ddwafContext.run).to.be.calledOnceWithExactly({ + sinon.assert.calledOnceWithExactly(ddwafContext.run, { persistent: { 'server.request.headers.no_cookies': { header: 'value' }, 'server.request.uri.raw': 'https://testurl', @@ -461,7 +460,7 @@ describe('WAF Manager', () => { wafContextWrapper.run(params) - expect(Reporter.reportAttack).to.be.calledOnceWith(match({ events: ['ATTACK DATA'] })) + sinon.assert.calledOnceWithExactly(Reporter.reportAttack, match({ events: ['ATTACK DATA'] })) }) it('should report if rule is triggered', () => { @@ -585,7 +584,7 @@ describe('WAF Manager', () => { ddwafContext.run.returns(result) wafContextWrapper.run(params) - expect(Reporter.reportAttributes).to.be.calledOnceWithExactly(result.attributes) + sinon.assert.calledOnceWithExactly(Reporter.reportAttributes, result.attributes) }) it('should report fingerprints when ddwafContext returns fingerprints in results attributes', () => { diff --git a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js index e09022c74ef..962703aa3f6 100644 --- a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js +++ b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -66,7 +65,7 @@ describe('WAFContextWrapper', () => { wafContextWrapper.run(payload) sinon.assert.calledTwice(ddwafContext.run) - expect(ddwafContext.run).to.always.have.been.calledWithExactly(payload, 1000) + sinon.assert.calledWithExactly(ddwafContext.run, payload, 1000) const firstCall = Reporter.reportMetrics.getCall(0).args[0] assert.strictEqual(firstCall.errorCode, -127) @@ -147,7 +146,7 @@ describe('WAFContextWrapper', () => { wafContextWrapper.run(payload) wafRunFinished.unsubscribe(finishedCallback) - expect(finishedCallback).to.be.calledOnceWith({ payload }) + sinon.assert.calledOnceWithMatch(finishedCallback, { payload }) }) it('should report error code when the waf run fails', () => { diff --git a/packages/dd-trace/test/ci-visibility/dynamic-instrumentation/dynamic-instrumentation.spec.js b/packages/dd-trace/test/ci-visibility/dynamic-instrumentation/dynamic-instrumentation.spec.js index 5b142146dd7..262fe3ffbf4 100644 --- a/packages/dd-trace/test/ci-visibility/dynamic-instrumentation/dynamic-instrumentation.spec.js +++ b/packages/dd-trace/test/ci-visibility/dynamic-instrumentation/dynamic-instrumentation.spec.js @@ -24,9 +24,9 @@ describe('test visibility with dynamic instrumentation', () => { childProcess = fork(path.join(__dirname, 'target-app', 'test-visibility-dynamic-instrumentation-script.js')) childProcess.on('message', ({ snapshot: { language, stack, probe, captures }, probeId }) => { - assert.ok(probeId != null) - assert.ok(probe != null) - assert.ok(stack != null) + assert.ok(probeId) + assert.ok(probe) + assert.ok(stack) assert.strictEqual(language, 'javascript') assert.deepStrictEqual(captures, { diff --git a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js index 417ef63c1b0..79521c49c76 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { assertObjectContains } = require('../../../../../../integration-tests/helpers') const { describe, it, beforeEach, context } = require('tap').mocha @@ -64,8 +63,8 @@ describe('AgentProxyCiVisibilityExporter', () => { await agentProxyCiVisibilityExporter._canUseCiVisProtocolPromise - expect(agentProxyCiVisibilityExporter.getUncodedTraces()).not.to.include(trace) - expect(agentProxyCiVisibilityExporter._coverageBuffer).not.to.include(coverage) + assert.ok(!(agentProxyCiVisibilityExporter.getUncodedTraces()).includes(trace)) + assert.ok(!(agentProxyCiVisibilityExporter._coverageBuffer).includes(coverage)) // old traces and coverages are exported at once sinon.assert.calledWith(agentProxyCiVisibilityExporter.export, trace) sinon.assert.calledWith(agentProxyCiVisibilityExporter.exportCoverage, coverage) diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index c63f6000c6b..14b4844da09 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { assertObjectContains } = require('../../../../../integration-tests/helpers') const { describe, it, beforeEach, afterEach, context } = require('tap').mocha @@ -948,7 +947,7 @@ describe('CI Visibility Exporter', () => { }, diLog ) - expect(ciVisibilityExporter._logsWriter.append).to.be.calledWith(sinon.match({ + sinon.assert.calledWith(ciVisibilityExporter._logsWriter.append, sinon.match({ ddtags: 'git.repository_url:https://github.com/datadog/dd-trace-js.git,git.commit.sha:1234', level: 'error', ddsource: 'dd_debugger', diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 83d6b6da389..6735277678f 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const sinon = require('sinon') const { it, describe, beforeEach, afterEach, context } = require('tap').mocha const proxyquire = require('proxyquire') @@ -38,6 +37,8 @@ describe('Config', () => { const BLOCKED_TEMPLATE_GRAPHQL_PATH = require.resolve('./fixtures/config/appsec-blocked-graphql-template.json') const BLOCKED_TEMPLATE_GRAPHQL = readFileSync(BLOCKED_TEMPLATE_GRAPHQL_PATH, { encoding: 'utf8' }) + const comparator = (a, b) => a.name.localeCompare(b.name) || a.origin.localeCompare(b.origin) + function reloadLoggerAndConfig () { log = proxyquire('../src/log', {}) log.use = sinon.spy() @@ -130,7 +131,7 @@ describe('Config', () => { process.env.DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED = 'true' assert.strictEqual(process.env.DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED, undefined) const config = getConfig() - expect(config).to.have.property('runtimeMetricsRuntimeId', true) + assert.strictEqual(config.runtimeMetricsRuntimeId, true) assert.strictEqual(getEnvironmentVariable('DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED'), 'true') delete process.env.DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED @@ -159,9 +160,11 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('debug', true) - expect(config).to.have.property('logger', undefined) - expect(config).to.have.property('logLevel', 'error') + assertObjectContains(config, { + debug: true, + logger: undefined, + logLevel: 'error' + }) }) it('should initialize from environment variables with DD env vars taking precedence OTEL env vars', () => { @@ -188,19 +191,28 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('debug', false) - expect(config).to.have.property('service', 'service') - expect(config).to.have.property('logLevel', 'error') - expect(config).to.have.property('sampleRate', 0.5) - expect(config).to.have.nested.property('runtimeMetrics.enabled', true) - expect(config.tags).to.include({ foo: 'bar', baz: 'qux' }) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['b3', 'tracecontext']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['b3', 'tracecontext']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.otelPropagators', false) + assertObjectContains(config, { + debug: false, + service: 'service', + logLevel: 'error', + sampleRate: 0.5, + runtimeMetrics: { + enabled: true + }, + tags: { + foo: 'bar', + baz: 'qux' + }, + tracePropagationStyle: { + inject: ['b3', 'tracecontext'], + extract: ['b3', 'tracecontext'], + otelPropagators: false + } + }) const indexFile = require('../src/index') const proxy = require('../src/proxy') - expect(indexFile).to.equal(proxy) + assert.strictEqual(indexFile, proxy) }) it('should initialize with OTEL environment variables when DD env vars are not set', () => { @@ -218,20 +230,29 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('debug', true) - expect(config).to.have.property('service', 'otel_service') - expect(config).to.have.property('logLevel', 'debug') - expect(config).to.have.property('sampleRate', 0.1) - expect(config).to.have.nested.property('runtimeMetrics.enabled', false) - expect(config.tags).to.include({ foo: 'bar1', baz: 'qux1' }) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['b3', 'datadog']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['b3', 'datadog']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.otelPropagators', true) + assertObjectContains(config, { + debug: true, + service: 'otel_service', + logLevel: 'debug', + sampleRate: 0.1, + runtimeMetrics: { + enabled: false + }, + tags: { + foo: 'bar1', + baz: 'qux1' + }, + tracePropagationStyle: { + inject: ['b3', 'datadog'], + extract: ['b3', 'datadog'], + otelPropagators: true + } + }) delete require.cache[require.resolve('../src/index')] const indexFile = require('../src/index') const noop = require('../src/noop/proxy') - expect(indexFile).to.equal(noop) + assert.strictEqual(indexFile, noop) }) it('should correctly map OTEL_RESOURCE_ATTRIBUTES', () => { @@ -239,142 +260,195 @@ describe('Config', () => { 'deployment.environment=test1,service.name=test2,service.version=5,foo=bar1,baz=qux1' const config = getConfig() - expect(config).to.have.property('env', 'test1') - expect(config).to.have.property('service', 'test2') - expect(config).to.have.property('version', '5') - expect(config.tags).to.include({ foo: 'bar1', baz: 'qux1' }) + assertObjectContains(config, { + env: 'test1', + service: 'test2', + version: '5', + tags: { + foo: 'bar1', + baz: 'qux1' + } + }) }) it('should correctly map OTEL_TRACES_SAMPLER and OTEL_TRACES_SAMPLER_ARG', () => { process.env.OTEL_TRACES_SAMPLER = 'always_on' process.env.OTEL_TRACES_SAMPLER_ARG = '0.1' let config = getConfig() - expect(config).to.have.property('sampleRate', 1.0) + assert.strictEqual(config.sampleRate, 1.0) process.env.OTEL_TRACES_SAMPLER = 'always_off' config = getConfig() - expect(config).to.have.property('sampleRate', 0.0) + assert.strictEqual(config.sampleRate, 0.0) process.env.OTEL_TRACES_SAMPLER = 'traceidratio' config = getConfig() - expect(config).to.have.property('sampleRate', 0.1) + assert.strictEqual(config.sampleRate, 0.1) process.env.OTEL_TRACES_SAMPLER = 'parentbased_always_on' config = getConfig() - expect(config).to.have.property('sampleRate', 1.0) + assert.strictEqual(config.sampleRate, 1.0) process.env.OTEL_TRACES_SAMPLER = 'parentbased_always_off' config = getConfig() - expect(config).to.have.property('sampleRate', 0.0) + assert.strictEqual(config.sampleRate, 0.0) process.env.OTEL_TRACES_SAMPLER = 'parentbased_traceidratio' config = getConfig() - expect(config).to.have.property('sampleRate', 0.1) + assert.strictEqual(config.sampleRate, 0.1) }) it('should initialize with the correct defaults', () => { const config = getConfig() - expect(config).to.have.nested.property('apmTracingEnabled', true) - expect(config).to.have.property('appKey', undefined) - expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) - expect(config).to.have.nested.property('appsec.apiSecurity.sampleDelay', 30) - expect(config).to.have.nested.property('appsec.apiSecurity.endpointCollectionEnabled', true) - expect(config).to.have.nested.property('appsec.apiSecurity.endpointCollectionMessageLimit', 300) - expect(config).to.have.nested.property('appsec.blockedTemplateHtml', undefined) - expect(config).to.have.nested.property('appsec.blockedTemplateJson', undefined) - expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', undefined) - expect(config).to.have.nested.property('appsec.enabled', undefined) - expect(config).to.have.nested.property('appsec.eventTracking.mode', 'identification') - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.enabled', false) - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.maxHeaders', 50) - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.redaction', true) - expect(config).to.have.nested.property('appsec.obfuscatorKeyRegex').with.length(190) - expect(config).to.have.nested.property('appsec.obfuscatorValueRegex').with.length(578) - expect(config).to.have.nested.property('appsec.rules', undefined) - expect(config).to.have.nested.property('appsec.rasp.bodyCollection', false) - expect(config).to.have.nested.property('appsec.rasp.enabled', true) - expect(config).to.have.nested.property('appsec.rateLimit', 100) - expect(config).to.have.nested.property('appsec.sca.enabled', null) - expect(config).to.have.nested.property('appsec.stackTrace.enabled', true) - expect(config).to.have.nested.property('appsec.stackTrace.maxDepth', 32) - expect(config).to.have.nested.property('appsec.stackTrace.maxStackTraces', 2) - expect(config).to.have.nested.property('appsec.wafTimeout', 5e3) - expect(config).to.have.property('clientIpEnabled', false) - expect(config).to.have.property('clientIpHeader', null) - expect(config).to.have.nested.property('codeOriginForSpans.enabled', true) - expect(config).to.have.nested.property('codeOriginForSpans.experimental.exit_spans.enabled', false) - expect(config).to.have.nested.property('crashtracking.enabled', true) - expect(config).to.have.property('debug', false) - expect(config).to.have.nested.property('dogstatsd.hostname', '127.0.0.1') - expect(config).to.have.nested.property('dogstatsd.port', '8125') - expect(config).to.have.nested.property('dynamicInstrumentation.enabled', false) - expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', undefined) - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', []) - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', []) - expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 1) - expect(config).to.have.property('env', undefined) - expect(config).to.have.nested.property('experimental.aiguard.enabled', false) - expect(config).to.have.nested.property('experimental.aiguard.endpoint', undefined) - expect(config).to.have.nested.property('experimental.aiguard.maxContentSize', 512 * 1024) - expect(config).to.have.nested.property('experimental.aiguard.maxMessagesLength', 16) - expect(config).to.have.nested.property('experimental.aiguard.timeout', 10_000) - expect(config).to.have.nested.property('experimental.exporter', undefined) - expect(config).to.have.nested.property('experimental.enableGetRumData', false) - expect(config).to.have.property('flushInterval', 2000) - expect(config).to.have.property('flushMinSpans', 1000) - expect(config.grpc.client.error.statuses).to.deep.equal(GRPC_CLIENT_ERROR_STATUSES) - expect(config.grpc.server.error.statuses).to.deep.equal(GRPC_SERVER_ERROR_STATUSES) - expect(config).to.have.nested.property('heapSnapshot.count', 0) - expect(config).to.have.nested.property('heapSnapshot.destination', '') - expect(config).to.have.nested.property('heapSnapshot.interval', 3600) - expect(config).to.have.nested.property('iast.enabled', false) - expect(config).to.have.nested.property('iast.redactionEnabled', true) - expect(config).to.have.nested.property('iast.redactionNamePattern', null) - expect(config).to.have.nested.property('iast.redactionValuePattern', null) - expect(config).to.have.nested.property('iast.telemetryVerbosity', 'INFORMATION') - expect(config).to.have.nested.property('iast.stackTrace.enabled', true) - expect(config).to.have.nested.property('injectForce', null) - expect(config).to.have.nested.deep.property('injectionEnabled', []) - expect(config).to.have.nested.property('installSignature.id', null) - expect(config).to.have.nested.property('installSignature.time', null) - expect(config).to.have.nested.property('installSignature.type', null) - expect(config).to.have.nested.property('instrumentationSource', 'manual') - expect(config).to.have.property('instrumentation_config_id', undefined) - expect(config).to.have.nested.property('llmobs.agentlessEnabled', undefined) - expect(config).to.have.nested.property('llmobs.enabled', false) - expect(config).to.have.nested.property('llmobs.mlApp', undefined) - expect(config).to.have.property('logLevel', 'debug') - expect(config).to.have.property('middlewareTracingEnabled', true) - expect(config).to.have.property('plugins', true) - expect(config).to.have.property('protocolVersion', '0.4') - expect(config).to.have.property('queryStringObfuscation').with.length(626) - expect(config).to.have.nested.property('remoteConfig.enabled', true) - expect(config).to.have.nested.property('remoteConfig.pollInterval', 5) - expect(config).to.have.property('reportHostname', false) - expect(config).to.have.nested.property('runtimeMetrics.enabled', false) - expect(config).to.have.nested.property('runtimeMetrics.eventLoop', true) - expect(config).to.have.nested.property('runtimeMetrics.gc', true) - expect(config).to.have.property('runtimeMetricsRuntimeId', false) - expect(config).to.have.property('sampleRate', undefined) - expect(config).to.have.property('scope', undefined) - expect(config).to.have.property('service', 'node') - expect(config).to.have.deep.property('serviceMapping', {}) - expect(config).to.have.property('spanAttributeSchema', 'v0') - expect(config).to.have.property('spanComputePeerService', false) - expect(config).to.have.property('spanRemoveIntegrationFromService', false) - expect(config.tags).to.have.property('service', 'node') - expect(config).to.have.property('traceEnabled', true) - expect(config).to.have.property('traceId128BitGenerationEnabled', true) - expect(config).to.have.property('traceId128BitLoggingEnabled', true) - expect(config).to.have.nested.property('tracePropagationBehaviorExtract', 'continue') - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['datadog', 'tracecontext', 'baggage']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['datadog', 'tracecontext', 'baggage']) - expect(config).to.have.property('tracing', true) - - expect(updateConfig).to.be.calledOnce - - expect(updateConfig.getCall(0).args[0]).to.deep.include.members([ + assertObjectContains(config, { + apmTracingEnabled: true, + appKey: undefined, + appsec: { + apiSecurity: { + enabled: true, + sampleDelay: 30, + endpointCollectionEnabled: true, + endpointCollectionMessageLimit: 300 + }, + blockedTemplateHtml: undefined, + blockedTemplateJson: undefined, + blockedTemplateGraphql: undefined, + enabled: undefined, + eventTracking: { + mode: 'identification' + }, + extendedHeadersCollection: { + enabled: false, + maxHeaders: 50, + redaction: true + }, + rules: undefined, + rasp: { + bodyCollection: false, + enabled: true + }, + rateLimit: 100, + sca: { + enabled: null + }, + stackTrace: { + enabled: true, + maxDepth: 32, + maxStackTraces: 2 + }, + wafTimeout: 5e3 + }, + clientIpEnabled: false, + clientIpHeader: null, + codeOriginForSpans: { + enabled: true, + experimental: { + exit_spans: { + enabled: false + } + } + }, + crashtracking: { + enabled: true + }, + debug: false, + dogstatsd: { + hostname: '127.0.0.1', + port: '8125' + }, + dynamicInstrumentation: { + enabled: false, + probeFile: undefined, + uploadIntervalSeconds: 1 + }, + env: undefined, + experimental: { + aiguard: { + enabled: false, + endpoint: undefined, + maxMessagesLength: 16, + timeout: 10_000, + maxContentSize: 512 * 1024, + }, + exporter: undefined, + enableGetRumData: false + }, + flushInterval: 2000, + flushMinSpans: 1000, + heapSnapshot: { + count: 0, + destination: '', + interval: 3600 + }, + iast: { + enabled: false, + redactionEnabled: true, + redactionNamePattern: null, + redactionValuePattern: null, + telemetryVerbosity: 'INFORMATION', + stackTrace: { + enabled: true + } + }, + injectForce: null, + installSignature: { + id: null, + time: null, + type: null + }, + instrumentationSource: 'manual', + instrumentation_config_id: undefined, + llmobs: { + agentlessEnabled: undefined, + enabled: false, + mlApp: undefined + }, + logLevel: 'debug', + middlewareTracingEnabled: true, + plugins: true, + protocolVersion: '0.4', + tracing: true, + tags: { + service: 'node' + }, + remoteConfig: { + enabled: true, + pollInterval: 5 + }, + reportHostname: false, + runtimeMetrics: { + enabled: false, + eventLoop: true, + gc: true + }, + runtimeMetricsRuntimeId: false, + sampleRate: undefined, + scope: undefined, + service: 'node', + spanAttributeSchema: 'v0', + spanComputePeerService: false, + spanRemoveIntegrationFromService: false, + traceEnabled: true, + traceId128BitGenerationEnabled: true, + traceId128BitLoggingEnabled: true, + tracePropagationBehaviorExtract: 'continue' + }) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactedIdentifiers, []) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, []) + assert.deepStrictEqual(config.grpc.client.error.statuses, GRPC_CLIENT_ERROR_STATUSES) + assert.deepStrictEqual(config.grpc.server.error.statuses, GRPC_SERVER_ERROR_STATUSES) + assert.deepStrictEqual(config.injectionEnabled, []) + assert.deepStrictEqual(config.serviceMapping, {}) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog', 'tracecontext', 'baggage']) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog', 'tracecontext', 'baggage']) + assert.strictEqual(config.queryStringObfuscation?.length, 626) + assert.strictEqual(config.appsec?.obfuscatorKeyRegex?.length, 190) + assert.strictEqual(config.appsec?.obfuscatorValueRegex?.length, 578) + + sinon.assert.calledOnce(updateConfig) + + assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ { name: 'apmTracingEnabled', value: true, origin: 'default' }, { name: 'appsec.apiSecurity.enabled', value: true, origin: 'default' }, { name: 'appsec.apiSecurity.sampleDelay', value: 30, origin: 'default' }, @@ -459,13 +533,10 @@ describe('Config', () => { { name: 'isGitUploadEnabled', value: false, origin: 'default' }, { name: 'isIntelligentTestRunnerEnabled', value: false, origin: 'default' }, { name: 'isManualApiEnabled', value: false, origin: 'default' }, - { name: 'isTestDynamicInstrumentationEnabled', value: false, origin: 'default' }, { name: 'langchain.spanCharLimit', value: 128, origin: 'default' }, { name: 'langchain.spanPromptCompletionSampleRate', value: 1.0, origin: 'default' }, { name: 'llmobs.agentlessEnabled', value: undefined, origin: 'default' }, { name: 'llmobs.mlApp', value: undefined, origin: 'default' }, - { name: 'ciVisibilityTestSessionName', value: '', origin: 'default' }, - { name: 'ciVisAgentlessLogSubmissionEnabled', value: false, origin: 'default' }, { name: 'isTestDynamicInstrumentationEnabled', value: false, origin: 'default' }, { name: 'logInjection', value: true, origin: 'default' }, { name: 'lookup', value: undefined, origin: 'default' }, @@ -488,7 +559,6 @@ describe('Config', () => { { name: 'remoteConfig.enabled', value: true, origin: 'default' }, { name: 'remoteConfig.pollInterval', value: 5, origin: 'default' }, { name: 'reportHostname', value: false, origin: 'default' }, - { name: 'reportHostname', value: false, origin: 'default' }, { name: 'runtimeMetrics.enabled', value: false, origin: 'default' }, { name: 'runtimeMetricsRuntimeId', value: false, origin: 'default' }, { name: 'sampleRate', value: undefined, origin: 'default' }, @@ -517,7 +587,7 @@ describe('Config', () => { { name: 'version', value: '', origin: 'default' }, { name: 'vertexai.spanCharLimit', value: 128, origin: 'default' }, { name: 'vertexai.spanPromptCompletionSampleRate', value: 1.0, origin: 'default' } - ]) + ].sort(comparator)) }) it('should support logging', () => { @@ -526,8 +596,8 @@ describe('Config', () => { debug: true }) - expect(log.use).to.have.been.calledWith(config.logger) - expect(log.toggle).to.have.been.calledWith(config.debug) + sinon.assert.calledWith(log.use, config.logger) + sinon.assert.calledWith(log.toggle, config.debug) }) it('should not warn on undefined DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', () => { @@ -535,8 +605,8 @@ describe('Config', () => { logger: {}, debug: true }) - expect(log.warn).not.to.be.called - expect(config).to.have.property('spanAttributeSchema', 'v0') + sinon.assert.notCalled(log.warn) + assert.strictEqual(config.spanAttributeSchema, 'v0') }) it('should initialize from the default service', () => { @@ -545,8 +615,8 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('service', 'test') - expect(config.tags).to.have.property('service', 'test') + assert.strictEqual(config.service, 'test') + assert.strictEqual(config.tags?.service, 'test') }) it('should initialize from the default version', () => { @@ -555,16 +625,16 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('version', '1.2.3') - expect(config.tags).to.have.property('version', '1.2.3') + assert.strictEqual(config.version, '1.2.3') + assert.strictEqual(config.tags?.version, '1.2.3') }) it('should initialize from environment variables', () => { process.env.DD_AI_GUARD_ENABLED = 'true' process.env.DD_AI_GUARD_ENDPOINT = 'https://dd.datad0g.com/api/unstable/ai-guard' - process.env.DD_AI_GUARD_MAX_CONTENT_SIZE = 1024 * 1024 - process.env.DD_AI_GUARD_MAX_MESSAGES_LENGTH = 32 - process.env.DD_AI_GUARD_TIMEOUT = 2000 + process.env.DD_AI_GUARD_MAX_CONTENT_SIZE = String(1024 * 1024) + process.env.DD_AI_GUARD_MAX_MESSAGES_LENGTH = '32' + process.env.DD_AI_GUARD_TIMEOUT = '2000' process.env.DD_API_SECURITY_ENABLED = 'true' process.env.DD_API_SECURITY_SAMPLE_DELAY = '25' process.env.DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED = 'false' @@ -684,89 +754,151 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.nested.property('apmTracingEnabled', false) - expect(config).to.have.property('appKey', 'myAppKey') - expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) - expect(config).to.have.nested.property('appsec.apiSecurity.sampleDelay', 25) - expect(config).to.have.nested.property('appsec.apiSecurity.endpointCollectionEnabled', false) - expect(config).to.have.nested.property('appsec.apiSecurity.endpointCollectionMessageLimit', 500) - expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', BLOCKED_TEMPLATE_GRAPHQL) - expect(config).to.have.nested.property('appsec.blockedTemplateHtml', BLOCKED_TEMPLATE_HTML) - expect(config).to.have.nested.property('appsec.blockedTemplateJson', BLOCKED_TEMPLATE_JSON) - expect(config).to.have.nested.property('appsec.enabled', true) - expect(config).to.have.nested.property('appsec.eventTracking.mode', 'extended') - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.enabled', true) - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.maxHeaders', 42) - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.redaction', false) - expect(config).to.have.nested.property('appsec.obfuscatorKeyRegex', '.*') - expect(config).to.have.nested.property('appsec.obfuscatorValueRegex', '.*') - expect(config).to.have.nested.property('appsec.rasp.bodyCollection', true) - expect(config).to.have.nested.property('appsec.rasp.enabled', false) - expect(config).to.have.nested.property('appsec.rateLimit', 42) - expect(config).to.have.nested.property('appsec.rules', RULES_JSON_PATH) - expect(config).to.have.nested.property('appsec.sca.enabled', true) - expect(config).to.have.nested.property('appsec.stackTrace.enabled', false) - expect(config).to.have.nested.property('appsec.stackTrace.maxDepth', 42) - expect(config).to.have.nested.property('appsec.stackTrace.maxStackTraces', 5) - expect(config).to.have.nested.property('appsec.wafTimeout', 42) - expect(config).to.have.property('clientIpEnabled', true) - expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') - expect(config).to.have.nested.property('codeOriginForSpans.enabled', false) - expect(config).to.have.nested.property('codeOriginForSpans.experimental.exit_spans.enabled', true) - expect(config).to.have.nested.property('crashtracking.enabled', false) - expect(config).to.have.property('debug', true) - expect(config).to.have.nested.property('dogstatsd.hostname', 'dsd-agent') - expect(config).to.have.nested.property('dogstatsd.port', '5218') - expect(config).to.have.nested.property('dynamicInstrumentation.enabled', true) - expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', 'probes.json') - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', ['foo', 'bar']) - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', ['a', 'b', 'c']) - expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 0.1) - expect(config).to.have.property('env', 'test') - expect(config).to.have.nested.property('experimental.aiguard.enabled', true) - expect(config).to.have.nested.property('experimental.aiguard.endpoint', 'https://dd.datad0g.com/api/unstable/ai-guard') - expect(config).to.have.nested.property('experimental.aiguard.maxContentSize', 1024 * 1024) - expect(config).to.have.nested.property('experimental.aiguard.maxMessagesLength', 32) - expect(config).to.have.nested.property('experimental.aiguard.timeout', 2000) - expect(config).to.have.nested.property('experimental.enableGetRumData', true) - expect(config).to.have.nested.property('experimental.exporter', 'log') - expect(config.grpc.client.error.statuses).to.deep.equal([3, 13, 400, 401, 402, 403]) - expect(config.grpc.server.error.statuses).to.deep.equal([3, 13, 400, 401, 402, 403]) - expect(config).to.have.property('hostname', 'agent') - expect(config).to.have.nested.property('heapSnapshot.count', 1) - expect(config).to.have.nested.property('heapSnapshot.destination', '/tmp') - expect(config).to.have.nested.property('heapSnapshot.interval', 1800) - expect(config).to.have.nested.property('iast.dbRowsToTaint', 2) - expect(config).to.have.nested.property('iast.deduplicationEnabled', false) - expect(config).to.have.nested.property('iast.enabled', true) - expect(config).to.have.nested.property('iast.maxConcurrentRequests', 3) - expect(config).to.have.nested.property('iast.maxContextOperations', 4) - expect(config).to.have.nested.property('iast.redactionEnabled', false) - expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') - expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') - expect(config).to.have.nested.property('iast.requestSampling', 40) - expect(config).to.have.nested.property('iast.securityControlsConfiguration', - 'SANITIZER:CODE_INJECTION:sanitizer.js:method') - expect(config).to.have.nested.property('iast.stackTrace.enabled', false) - expect(config).to.have.nested.property('iast.telemetryVerbosity', 'DEBUG') - expect(config).to.have.deep.property('installSignature', - { id: '68e75c48-57ca-4a12-adfc-575c4b05fcbe', type: 'k8s_single_step', time: '1703188212' }) - expect(config).to.have.property('instrumentation_config_id', 'abcdef123') - expect(config).to.have.nested.property('llmobs.agentlessEnabled', true) - expect(config).to.have.nested.property('llmobs.mlApp', 'myMlApp') - expect(config).to.have.property('middlewareTracingEnabled', false) - expect(config).to.have.deep.property('peerServiceMapping', { c: 'cc', d: 'dd' }) - expect(config).to.have.property('protocolVersion', '0.5') - expect(config).to.have.property('queryStringObfuscation', '.*') - expect(config).to.have.nested.property('remoteConfig.enabled', false) - expect(config).to.have.nested.property('remoteConfig.pollInterval', 42) - expect(config).to.have.property('reportHostname', true) - expect(config).to.have.nested.property('runtimeMetrics.enabled', true) - expect(config).to.have.nested.property('runtimeMetrics.eventLoop', false) - expect(config).to.have.nested.property('runtimeMetrics.gc', false) - expect(config).to.have.property('runtimeMetricsRuntimeId', true) - expect(config).to.have.property('sampleRate', 0.5) - expect(config).to.have.deep.nested.property('sampler', { + assertObjectContains(config, { + apmTracingEnabled: false, + appKey: 'myAppKey', + appsec: { + apiSecurity: { + enabled: true, + sampleDelay: 25, + endpointCollectionEnabled: false, + endpointCollectionMessageLimit: 500 + }, + blockedTemplateGraphql: BLOCKED_TEMPLATE_GRAPHQL, + blockedTemplateHtml: BLOCKED_TEMPLATE_HTML, + blockedTemplateJson: BLOCKED_TEMPLATE_JSON, + enabled: true, + eventTracking: { + mode: 'extended' + }, + extendedHeadersCollection: { + enabled: true, + maxHeaders: 42, + redaction: false + }, + obfuscatorKeyRegex: '.*', + obfuscatorValueRegex: '.*', + rasp: { + bodyCollection: true, + enabled: false + }, + rateLimit: 42, + rules: RULES_JSON_PATH, + sca: { + enabled: true + }, + stackTrace: { + enabled: false, + maxDepth: 42, + maxStackTraces: 5 + }, + wafTimeout: 42 + }, + clientIpEnabled: true, + clientIpHeader: 'x-true-client-ip', + codeOriginForSpans: { + enabled: false, + experimental: { + exit_spans: { + enabled: true + } + } + }, + crashtracking: { + enabled: false + }, + debug: true, + dogstatsd: { + hostname: 'dsd-agent', + port: '5218' + }, + dynamicInstrumentation: { + enabled: true, + probeFile: 'probes.json', + redactedIdentifiers: ['foo', 'bar'], + redactionExcludedIdentifiers: ['a', 'b', 'c'], + uploadIntervalSeconds: 0.1 + }, + env: 'test', + experimental: { + aiguard: { + enabled: true, + endpoint: 'https://dd.datad0g.com/api/unstable/ai-guard', + maxContentSize: 1024 * 1024, + maxMessagesLength: 32, + timeout: 2000 + }, + enableGetRumData: true, + exporter: 'log' + }, + hostname: 'agent', + heapSnapshot: { + count: 1, + destination: '/tmp', + interval: 1800 + }, + iast: { + dbRowsToTaint: 2, + deduplicationEnabled: false, + enabled: true, + maxConcurrentRequests: 3, + maxContextOperations: 4, + redactionEnabled: false, + redactionNamePattern: 'REDACTION_NAME_PATTERN', + redactionValuePattern: 'REDACTION_VALUE_PATTERN', + requestSampling: 40, + securityControlsConfiguration: 'SANITIZER:CODE_INJECTION:sanitizer.js:method', + stackTrace: { + enabled: false + }, + telemetryVerbosity: 'DEBUG' + }, + instrumentation_config_id: 'abcdef123', + llmobs: { + agentlessEnabled: true, + mlApp: 'myMlApp' + }, + middlewareTracingEnabled: false, + protocolVersion: '0.5', + queryStringObfuscation: '.*', + remoteConfig: { + enabled: false, + pollInterval: 42 + }, + reportHostname: true, + runtimeMetrics: { + enabled: true, + eventLoop: false, + gc: false + }, + runtimeMetricsRuntimeId: true, + sampleRate: 0.5, + service: 'service', + spanAttributeSchema: 'v1', + spanComputePeerService: true, + spanRemoveIntegrationFromService: true, + tags: { + foo: 'bar', + baz: 'qux', + service: 'service', + version: '1.0.0', + env: 'test' + }, + traceEnabled: true, + traceId128BitGenerationEnabled: true, + traceId128BitLoggingEnabled: true, + tracePropagationBehaviorExtract: 'restart', + tracing: false, + version: '1.0.0' + }) + assert.deepStrictEqual(config.grpc.client.error.statuses, [3, 13, 400, 401, 402, 403]) + assert.deepStrictEqual(config.grpc.server.error.statuses, [3, 13, 400, 401, 402, 403]) + assert.deepStrictEqual( + config.installSignature, + { id: '68e75c48-57ca-4a12-adfc-575c4b05fcbe', type: 'k8s_single_step', time: '1703188212' } + ) + assert.deepStrictEqual(config.peerServiceMapping, { c: 'cc', d: 'dd' }) + assert.deepStrictEqual(config.sampler, { sampleRate: 0.5, rateLimit: '-1', rules: [ @@ -782,25 +914,13 @@ describe('Config', () => { { sampleRate: 0.1 } ] }) - expect(config).to.have.property('service', 'service') - expect(config).to.have.deep.property('serviceMapping', { a: 'aa', b: 'bb' }) - expect(config).to.have.property('spanAttributeSchema', 'v1') - expect(config).to.have.property('spanComputePeerService', true) - expect(config).to.have.property('spanRemoveIntegrationFromService', true) - expect(config.tags).to.include({ foo: 'bar', baz: 'qux' }) - expect(config.tags).to.include({ service: 'service', version: '1.0.0', env: 'test' }) - expect(config).to.have.property('traceEnabled', true) - expect(config).to.have.property('traceId128BitGenerationEnabled', true) - expect(config).to.have.property('traceId128BitLoggingEnabled', true) - expect(config).to.have.nested.property('tracePropagationBehaviorExtract', 'restart') - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['b3', 'tracecontext']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['b3', 'tracecontext']) - expect(config).to.have.property('tracing', false) - expect(config).to.have.property('version', '1.0.0') - - expect(updateConfig).to.be.calledOnce - - expect(updateConfig.getCall(0).args[0]).to.deep.include.members([ + assert.deepStrictEqual(config.serviceMapping, { a: 'aa', b: 'bb' }) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['b3', 'tracecontext']) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['b3', 'tracecontext']) + + sinon.assert.calledOnce(updateConfig) + + assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ { name: 'apmTracingEnabled', value: false, origin: 'env_var' }, { name: 'appsec.apiSecurity.enabled', value: true, origin: 'env_var' }, { name: 'appsec.apiSecurity.sampleDelay', value: 25, origin: 'env_var' }, @@ -893,7 +1013,7 @@ describe('Config', () => { { name: 'version', value: '1.0.0', origin: 'env_var' }, { name: 'vertexai.spanCharLimit', value: 50, origin: 'env_var' }, { name: 'vertexai.spanPromptCompletionSampleRate', value: 0.5, origin: 'env_var' } - ]) + ].sort(comparator)) }) it('should ignore empty strings', () => { @@ -901,17 +1021,21 @@ describe('Config', () => { let config = getConfig() - expect(config).to.have.property('service', 'node') - expect(config).to.have.property('env', undefined) - expect(config).to.have.property('version', '') + assertObjectContains(config, { + service: 'node', + env: undefined, + version: '' + }) process.env.DD_TAGS = 'service: env: version:' config = getConfig() - expect(config).to.have.property('service', 'node') - expect(config).to.have.property('env', undefined) - expect(config).to.have.property('version', '') + assertObjectContains(config, { + service: 'node', + env: undefined, + version: '' + }) }) it('should support space separated tags when experimental mode enabled', () => { @@ -919,36 +1043,42 @@ describe('Config', () => { let config = getConfig() - expect(config.tags).to.include({ key1: 'value1', key2: 'value2' }) + assertObjectContains(config.tags, { key1: 'value1', key2: 'value2' }) process.env.DD_TAGS = 'env:test aKey:aVal bKey:bVal cKey:' config = getConfig() - expect(config.tags).to.have.property('env', 'test') - expect(config.tags).to.have.property('aKey', 'aVal') - expect(config.tags).to.have.property('bKey', 'bVal') - expect(config.tags).to.have.property('cKey', '') + assertObjectContains(config.tags, { + env: 'test', + aKey: 'aVal', + bKey: 'bVal', + cKey: '' + }) process.env.DD_TAGS = 'env:test,aKey:aVal bKey:bVal cKey:' config = getConfig() - expect(config.tags).to.have.property('env', 'test') - expect(config.tags).to.have.property('aKey', 'aVal bKey:bVal cKey:') + assertObjectContains(config.tags, { + env: 'test', + aKey: 'aVal bKey:bVal cKey:' + }) process.env.DD_TAGS = 'a:b:c:d' config = getConfig() - expect(config.tags).to.have.property('a', 'b:c:d') + assert.strictEqual(config.tags?.a, 'b:c:d') process.env.DD_TAGS = 'a,1' config = getConfig() - expect(config.tags).to.have.property('a', '') - expect(config.tags).to.have.property('1', '') + assertObjectContains(config.tags, { + a: '', + 1: '' + }) }) it('should read case-insensitive booleans from environment variables', () => { @@ -958,9 +1088,13 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('tracing', false) - expect(config).to.have.property('tracePropagationExtractFirst', true) - expect(config).to.have.nested.property('runtimeMetrics.enabled', false) + assertObjectContains(config, { + tracing: false, + tracePropagationExtractFirst: true, + runtimeMetrics: { + enabled: false + } + }) }) it('should initialize from environment variables with url taking precedence', () => { @@ -974,14 +1108,17 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('tracing', false) - expect(config).to.have.nested.property('dogstatsd.hostname', 'agent') - expect(config).to.have.nested.property('url.protocol', 'https:') - expect(config).to.have.nested.property('url.hostname', 'agent2') - expect(config).to.have.nested.property('url.port', '7777') - expect(config).to.have.property('site', 'datadoghq.eu') - expect(config).to.have.property('service', 'service') - expect(config).to.have.property('env', 'test') + assert.strictEqual(config.url.toString(), 'https://agent2:7777/') + + assertObjectContains(config, { + tracing: false, + dogstatsd: { + hostname: 'agent' + }, + site: 'datadoghq.eu', + service: 'service', + env: 'test' + }) }) it('should initialize from environment variables with inject/extract taking precedence', () => { @@ -991,8 +1128,8 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['tracecontext']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['tracecontext']) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['tracecontext']) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['tracecontext']) }) it('should enable crash tracking for SSI by default', () => { @@ -1000,7 +1137,7 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.nested.deep.property('crashtracking.enabled', true) + assert.deepStrictEqual(config.crashtracking?.enabled, true) }) it('should disable crash tracking for SSI when configured', () => { @@ -1009,7 +1146,7 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.nested.deep.property('crashtracking.enabled', false) + assert.deepStrictEqual(config.crashtracking?.enabled, false) }) it('should prioritize DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE over DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING', () => { @@ -1018,7 +1155,7 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.nested.property('appsec.eventTracking.mode', 'anonymous') + assert.strictEqual(config.appsec?.eventTracking?.mode, 'anonymous') }) it('should initialize from the options', () => { @@ -1143,63 +1280,106 @@ describe('Config', () => { version: '0.1.0' }) - expect(config).to.have.nested.property('appsec.enabled', false) - expect(config).to.have.property('clientIpEnabled', true) - expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') - expect(config).to.have.nested.property('codeOriginForSpans.enabled', false) - expect(config).to.have.nested.property('codeOriginForSpans.experimental.exit_spans.enabled', true) - expect(config).to.have.nested.property('dogstatsd.hostname', 'agent-dsd') - expect(config).to.have.nested.property('dogstatsd.port', '5218') - expect(config).to.have.nested.property('dynamicInstrumentation.enabled', true) - expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', 'probes.json') - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', ['foo', 'bar']) - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', ['a', 'b', 'c']) - expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 0.1) - expect(config).to.have.property('env', 'test') - expect(config).to.have.nested.property('experimental.aiguard.enabled', true) - expect(config).to.have.nested.property('experimental.aiguard.endpoint', 'https://dd.datad0g.com/api/unstable/ai-guard') - expect(config).to.have.nested.property('experimental.aiguard.maxContentSize', 1024 * 1024) - expect(config).to.have.nested.property('experimental.aiguard.maxMessagesLength', 32) - expect(config).to.have.nested.property('experimental.aiguard.timeout', 2000) - expect(config).to.have.nested.property('experimental.enableGetRumData', true) - expect(config).to.have.nested.property('experimental.exporter', 'log') - expect(config).to.have.property('flushInterval', 5000) - expect(config).to.have.property('flushMinSpans', 500) - expect(config).to.have.property('hostname', 'agent') - expect(config).to.have.nested.property('iast.dbRowsToTaint', 2) - expect(config).to.have.nested.property('iast.deduplicationEnabled', false) - expect(config).to.have.nested.property('iast.enabled', true) - expect(config).to.have.nested.property('iast.maxConcurrentRequests', 4) - expect(config).to.have.nested.property('iast.maxContextOperations', 5) - expect(config).to.have.nested.property('iast.redactionEnabled', false) - expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') - expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') - expect(config).to.have.nested.property('iast.requestSampling', 50) + assertObjectContains(config, { + appsec: { + enabled: false + }, + clientIpEnabled: true, + clientIpHeader: 'x-true-client-ip', + codeOriginForSpans: { + enabled: false, + experimental: { + exit_spans: { + enabled: true + } + } + }, + dogstatsd: { + hostname: 'agent-dsd', + port: '5218' + }, + dynamicInstrumentation: { + enabled: true, + probeFile: 'probes.json' + } + }) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactedIdentifiers, ['foo', 'bar']) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, ['a', 'b', 'c']) + assertObjectContains(config, { + dynamicInstrumentation: { + uploadIntervalSeconds: 0.1 + }, + env: 'test', + experimental: { + aiguard: { + enabled: true, + endpoint: 'https://dd.datad0g.com/api/unstable/ai-guard' + } + } + }) + assert.strictEqual(config.experimental?.aiguard?.maxContentSize, 1024 * 1024) + assertObjectContains(config, { + experimental: { + aiguard: { + maxMessagesLength: 32, + timeout: 2000 + }, + enableGetRumData: true, + exporter: 'log' + }, + flushInterval: 5000, + flushMinSpans: 500, + hostname: 'agent', + iast: { + dbRowsToTaint: 2, + deduplicationEnabled: false, + enabled: true, + maxConcurrentRequests: 4, + maxContextOperations: 5, + redactionEnabled: false, + redactionNamePattern: 'REDACTION_NAME_PATTERN', + redactionValuePattern: 'REDACTION_VALUE_PATTERN', + requestSampling: 50 + } + }) if (DD_MAJOR < 6) { - expect(config).to.have.nested.property('iast.securityControlsConfiguration', - 'SANITIZER:CODE_INJECTION:sanitizer.js:method') + assert.strictEqual(config.iast?.securityControlsConfiguration, 'SANITIZER:CODE_INJECTION:sanitizer.js:method') } else { - expect(config).to.not.have.property('iast.securityControlsConfiguration') + assert.ok(!('iast.securityControlsConfiguration' in config)) } - expect(config).to.have.nested.property('iast.stackTrace.enabled', false) - expect(config).to.have.nested.property('iast.telemetryVerbosity', 'DEBUG') - expect(config).to.have.nested.property('llmobs.agentlessEnabled', true) - expect(config).to.have.nested.property('llmobs.mlApp', 'myMlApp') - expect(config).to.have.property('logLevel', logLevel) - expect(config).to.have.property('logger', logger) - expect(config).to.have.property('middlewareTracingEnabled', false) - expect(config).to.have.deep.property('peerServiceMapping', { d: 'dd' }) - expect(config).to.have.property('plugins', false) - expect(config).to.have.property('port', '6218') - expect(config).to.have.property('protocolVersion', '0.5') - expect(config).to.have.nested.property('remoteConfig.pollInterval', 42) - expect(config).to.have.property('reportHostname', true) - expect(config).to.have.nested.property('runtimeMetrics.enabled', true) - expect(config).to.have.nested.property('runtimeMetrics.eventLoop', false) - expect(config).to.have.nested.property('runtimeMetrics.gc', false) - expect(config).to.have.property('runtimeMetricsRuntimeId', true) - expect(config).to.have.property('sampleRate', 0.5) - expect(config).to.have.deep.nested.property('sampler', { + assertObjectContains(config, { + iast: { + stackTrace: { + enabled: false + }, + telemetryVerbosity: 'DEBUG' + }, + llmobs: { + agentlessEnabled: true, + mlApp: 'myMlApp' + } + }) + assert.strictEqual(config.logLevel, logLevel) + assert.strictEqual(config.logger, logger) + assert.strictEqual(config.middlewareTracingEnabled, false) + assert.deepStrictEqual(config.peerServiceMapping, { d: 'dd' }) + assertObjectContains(config, { + plugins: false, + port: '6218', + protocolVersion: '0.5', + remoteConfig: { + pollInterval: 42 + }, + reportHostname: true, + runtimeMetrics: { + enabled: true, + eventLoop: false, + gc: false + }, + runtimeMetricsRuntimeId: true, + sampleRate: 0.5 + }) + assert.deepStrictEqual(config.sampler, { rateLimit: 1000, rules: [ { service: 'usersvc', name: 'healthcheck', sampleRate: 0.0 }, @@ -1215,27 +1395,35 @@ describe('Config', () => { { sampleRate: 0.1 } ] }) - expect(config).to.have.property('service', 'service') - expect(config).to.have.deep.property('serviceMapping', { a: 'aa', b: 'bb' }) - expect(config).to.have.property('site', 'datadoghq.eu') - expect(config).to.have.property('spanComputePeerService', true) - expect(config).to.have.property('spanRemoveIntegrationFromService', true) - expect(config).to.have.property('tags') - expect(config.tags).to.have.property('env', 'test') - expect(config.tags).to.have.property('foo', 'bar') - expect(config.tags).to.have.property('runtime-id') - expect(config.tags['runtime-id']).to.match(/^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$/) - expect(config.tags).to.have.property('service', 'service') - expect(config.tags).to.have.property('version', '0.1.0') - expect(config).to.have.property('traceId128BitGenerationEnabled', true) - expect(config).to.have.property('traceId128BitLoggingEnabled', true) - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['datadog']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['datadog']) - expect(config).to.have.property('version', '0.1.0') - - expect(updateConfig).to.be.calledOnce - - expect(updateConfig.getCall(0).args[0]).to.deep.include.members([ + assert.strictEqual(config.service, 'service') + assert.deepStrictEqual(config.serviceMapping, { a: 'aa', b: 'bb' }) + assertObjectContains(config, { + site: 'datadoghq.eu', + spanComputePeerService: true, + spanRemoveIntegrationFromService: true + }) + assert.ok(Object.hasOwn(config, 'tags')) + assertObjectContains(config.tags, { + env: 'test', + foo: 'bar' + }) + assert.ok(Object.hasOwn(config.tags, 'runtime-id')) + assert.match(config.tags['runtime-id'], /^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$/) + assertObjectContains(config.tags, { + service: 'service', + version: '0.1.0' + }) + assertObjectContains(config, { + traceId128BitGenerationEnabled: true, + traceId128BitLoggingEnabled: true + }) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog']) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog']) + assert.strictEqual(config.version, '0.1.0') + + sinon.assert.calledOnce(updateConfig) + + assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ { name: 'appsec.enabled', value: false, origin: 'code' }, { name: 'clientIpEnabled', value: true, origin: 'code' }, { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'code' }, @@ -1298,7 +1486,7 @@ describe('Config', () => { { name: 'traceId128BitGenerationEnabled', value: true, origin: 'code' }, { name: 'traceId128BitLoggingEnabled', value: true, origin: 'code' }, { name: 'version', value: '0.1.0', origin: 'code' } - ].filter(v => v)) + ].filter(v => v).sort(comparator)) }) it('should initialize from the options with url taking precedence', () => { @@ -1319,18 +1507,21 @@ describe('Config', () => { plugins: false }) - expect(config).to.have.nested.property('url.protocol', 'https:') - expect(config).to.have.nested.property('url.hostname', 'agent2') - expect(config).to.have.nested.property('url.port', '7777') - expect(config).to.have.property('site', 'datadoghq.eu') - expect(config).to.have.property('service', 'service') - expect(config).to.have.property('env', 'test') - expect(config).to.have.property('sampleRate', 0.5) - expect(config).to.have.property('logger', logger) - expect(config.tags).to.have.property('foo', 'bar') - expect(config).to.have.property('flushInterval', 5000) - expect(config).to.have.property('flushMinSpans', 500) - expect(config).to.have.property('plugins', false) + assert.strictEqual(config.url.toString(), 'https://agent2:7777/') + + assertObjectContains(config, { + site: 'datadoghq.eu', + service: 'service', + env: 'test', + sampleRate: 0.5 + }) + assert.strictEqual(config.logger, logger) + assert.strictEqual(config.tags?.foo, 'bar') + assertObjectContains(config, { + flushInterval: 5000, + flushMinSpans: 500, + plugins: false + }) }) it('should warn if mixing shared and extract propagation style env vars', () => { @@ -1339,7 +1530,7 @@ describe('Config', () => { getConfig() - expect(log.warn).to.have.been.calledWith('Use either the DD_TRACE_PROPAGATION_STYLE ' + + sinon.assert.calledWith(log.warn, 'Use either the DD_TRACE_PROPAGATION_STYLE ' + 'environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and ' + 'DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') }) @@ -1350,7 +1541,7 @@ describe('Config', () => { getConfig() - expect(log.warn).to.have.been.calledWith('Use either the DD_TRACE_PROPAGATION_STYLE ' + + sinon.assert.calledWith(log.warn, 'Use either the DD_TRACE_PROPAGATION_STYLE ' + 'environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and ' + 'DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') }) @@ -1360,8 +1551,8 @@ describe('Config', () => { const config = getConfig() - expect(log.warn).to.have.been.calledWith('Unexpected input for config.spanAttributeSchema, picked default', 'v0') - expect(config).to.have.property('spanAttributeSchema', 'v0') + sinon.assert.calledWith(log.warn, 'Unexpected input for config.spanAttributeSchema, picked default', 'v0') + assert.strictEqual(config.spanAttributeSchema, 'v0') }) it('should parse integer range sets', () => { @@ -1370,24 +1561,24 @@ describe('Config', () => { let config = getConfig() - expect(config.grpc.client.error.statuses).to.deep.equal([3, 13, 400, 401, 402, 403]) - expect(config.grpc.server.error.statuses).to.deep.equal([3, 13, 400, 401, 402, 403]) + assert.deepStrictEqual(config.grpc.client.error.statuses, [3, 13, 400, 401, 402, 403]) + assert.deepStrictEqual(config.grpc.server.error.statuses, [3, 13, 400, 401, 402, 403]) process.env.DD_GRPC_CLIENT_ERROR_STATUSES = '1' process.env.DD_GRPC_SERVER_ERROR_STATUSES = '1' config = getConfig() - expect(config.grpc.client.error.statuses).to.deep.equal([1]) - expect(config.grpc.server.error.statuses).to.deep.equal([1]) + assert.deepStrictEqual(config.grpc.client.error.statuses, [1]) + assert.deepStrictEqual(config.grpc.server.error.statuses, [1]) process.env.DD_GRPC_CLIENT_ERROR_STATUSES = '2,10,13-15' process.env.DD_GRPC_SERVER_ERROR_STATUSES = '2,10,13-15' config = getConfig() - expect(config.grpc.client.error.statuses).to.deep.equal([2, 10, 13, 14, 15]) - expect(config.grpc.server.error.statuses).to.deep.equal([2, 10, 13, 14, 15]) + assert.deepStrictEqual(config.grpc.client.error.statuses, [2, 10, 13, 14, 15]) + assert.deepStrictEqual(config.grpc.server.error.statuses, [2, 10, 13, 14, 15]) }) context('peer service tagging', () => { @@ -1395,38 +1586,38 @@ describe('Config', () => { process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = 'v0' process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'true' let config = getConfig() - expect(config).to.have.property('spanComputePeerService', true) + assert.strictEqual(config.spanComputePeerService, true) process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'foo' config = getConfig() - expect(config).to.have.property('spanComputePeerService', false) + assert.strictEqual(config.spanComputePeerService, false) process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'false' config = getConfig() - expect(config).to.have.property('spanComputePeerService', false) + assert.strictEqual(config.spanComputePeerService, false) delete process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED config = getConfig() - expect(config).to.have.property('spanComputePeerService', false) + assert.strictEqual(config.spanComputePeerService, false) }) it('should activate peer service in v1 unless explicitly false', () => { process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = 'v1' process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'false' let config = getConfig() - expect(config).to.have.property('spanComputePeerService', false) + assert.strictEqual(config.spanComputePeerService, false) process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'foo' config = getConfig() - expect(config).to.have.property('spanComputePeerService', true) + assert.strictEqual(config.spanComputePeerService, true) process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'true' config = getConfig() - expect(config).to.have.property('spanComputePeerService', true) + assert.strictEqual(config.spanComputePeerService, true) delete process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED config = getConfig() - expect(config).to.have.property('spanComputePeerService', true) + assert.strictEqual(config.spanComputePeerService, true) }) }) @@ -1438,16 +1629,16 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.property('hostname', 'agent') - expect(config.tags).to.include({ foo: 'foo', baz: 'qux' }) + assert.strictEqual(config.hostname, 'agent') + assertObjectContains(config.tags, { foo: 'foo', baz: 'qux' }) }) it('should give priority to the options', () => { process.env.DD_AI_GUARD_ENABLED = 'false' process.env.DD_AI_GUARD_ENDPOINT = 'https://dd.datadog.com/api/unstable/ai-guard' - process.env.DD_AI_GUARD_MAX_CONTENT_SIZE = 512 * 1024 - process.env.DD_AI_GUARD_MAX_MESSAGES_LENGTH = 16 - process.env.DD_AI_GUARD_TIMEOUT = 1_000 + process.env.DD_AI_GUARD_MAX_CONTENT_SIZE = String(512 * 1024) + process.env.DD_AI_GUARD_MAX_MESSAGES_LENGTH = '16' + process.env.DD_AI_GUARD_TIMEOUT = '1000' process.env.DD_API_KEY = '123' process.env.DD_API_SECURITY_ENABLED = 'false' process.env.DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED = 'false' @@ -1639,91 +1830,127 @@ describe('Config', () => { version: '1.0.0' }) - expect(config).to.have.nested.property('apmTracingEnabled', true) - expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) - expect(config).to.have.nested.property('appsec.apiSecurity.endpointCollectionEnabled', true) - expect(config).to.have.nested.property('appsec.apiSecurity.endpointCollectionMessageLimit', 150) - expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', BLOCKED_TEMPLATE_GRAPHQL) - expect(config).to.have.nested.property('appsec.blockedTemplateHtml', BLOCKED_TEMPLATE_HTML) - expect(config).to.have.nested.property('appsec.blockedTemplateJson', BLOCKED_TEMPLATE_JSON) - expect(config).to.have.nested.property('appsec.enabled', true) - expect(config).to.have.nested.property('appsec.eventTracking.mode', 'anonymous') - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.enabled', true) - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.maxHeaders', 42) - expect(config).to.have.nested.property('appsec.extendedHeadersCollection.redaction', true) - expect(config).to.have.nested.property('appsec.obfuscatorKeyRegex', '.*') - expect(config).to.have.nested.property('appsec.obfuscatorValueRegex', '.*') - expect(config).to.have.nested.property('appsec.rasp.bodyCollection', true) - expect(config).to.have.nested.property('appsec.rasp.enabled', false) - expect(config).to.have.nested.property('appsec.rateLimit', 42) - expect(config).to.have.nested.property('appsec.rules', RULES_JSON_PATH) - expect(config).to.have.nested.property('appsec.stackTrace.enabled', false) - expect(config).to.have.nested.property('appsec.stackTrace.maxDepth', 42) - expect(config).to.have.nested.property('appsec.stackTrace.maxStackTraces', 5) - expect(config).to.have.nested.property('appsec.wafTimeout', 42) - expect(config).to.have.property('clientIpEnabled', true) - expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') - expect(config).to.have.nested.property('codeOriginForSpans.enabled', true) - expect(config).to.have.nested.property('codeOriginForSpans.experimental.exit_spans.enabled', false) - expect(config).to.have.nested.property('dogstatsd.hostname', 'server') - expect(config).to.have.nested.property('dogstatsd.port', '8888') - expect(config).to.have.nested.property('dynamicInstrumentation.enabled', false) - expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', 'probes2.json') - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactedIdentifiers', ['foo2', 'bar2']) - expect(config).to.have.nested.deep.property('dynamicInstrumentation.redactionExcludedIdentifiers', ['a2', 'b2']) - expect(config).to.have.nested.property('dynamicInstrumentation.uploadIntervalSeconds', 0.2) - expect(config).to.have.property('env', 'development') - expect(config).to.have.nested.property('experimental.aiguard.enabled', true) - expect(config).to.have.nested.property('experimental.aiguard.endpoint', 'https://dd.datad0g.com/api/unstable/ai-guard') - expect(config).to.have.nested.property('experimental.aiguard.maxContentSize', 1024 * 1024) - expect(config).to.have.nested.property('experimental.aiguard.maxMessagesLength', 32) - expect(config).to.have.nested.property('experimental.aiguard.timeout', 2000) - expect(config).to.have.nested.property('experimental.enableGetRumData', false) - expect(config).to.have.nested.property('experimental.exporter', 'agent') - expect(config).to.have.property('flushMinSpans', 500) - expect(config).to.have.property('flushInterval', 500) - expect(config).to.have.nested.property('iast.dbRowsToTaint', 3) - expect(config).to.have.nested.property('iast.deduplicationEnabled', true) - expect(config).to.have.nested.property('iast.enabled', true) - expect(config).to.have.nested.property('iast.maxConcurrentRequests', 2) - expect(config).to.have.nested.property('iast.maxContextOperations', 2) - expect(config).to.have.nested.property('iast.redactionEnabled', true) - expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') - expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') - expect(config).to.have.nested.property('iast.requestSampling', 30) - if (DD_MAJOR < 6) { - expect(config).to.have.nested.property('iast.securityControlsConfiguration', - 'SANITIZER:CODE_INJECTION:sanitizer.js:method2') - } else { - expect(config).to.have.nested.property('iast.securityControlsConfiguration', - 'SANITIZER:CODE_INJECTION:sanitizer.js:method1') - } - expect(config).to.have.nested.property('iast.stackTrace.enabled', false) - expect(config).to.have.nested.property('llmobs.agentlessEnabled', false) - expect(config).to.have.nested.property('llmobs.mlApp', 'myOtherMlApp') - expect(config).to.have.property('middlewareTracingEnabled', true) - expect(config).to.have.deep.property('peerServiceMapping', { d: 'dd' }) - expect(config).to.have.property('protocolVersion', '0.5') - expect(config).to.have.nested.property('remoteConfig.pollInterval', 42) - expect(config).to.have.property('reportHostname', false) - expect(config).to.have.nested.property('runtimeMetrics.enabled', false) - expect(config).to.have.property('runtimeMetricsRuntimeId', false) - expect(config).to.have.property('service', 'test') - expect(config).to.have.deep.property('serviceMapping', { b: 'bb' }) - expect(config).to.have.property('site', 'datadoghq.com') - expect(config).to.have.property('spanAttributeSchema', 'v1') - expect(config).to.have.property('spanComputePeerService', true) - expect(config).to.have.property('spanRemoveIntegrationFromService', true) - expect(config.tags).to.include({ foo: 'foo' }) - expect(config.tags).to.include({ service: 'test', version: '1.0.0', env: 'development' }) - expect(config).to.have.property('traceId128BitGenerationEnabled', false) - expect(config).to.have.property('traceId128BitLoggingEnabled', false) - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', []) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', []) - expect(config).to.have.nested.property('url.hostname', 'agent2') - expect(config).to.have.nested.property('url.port', '6218') - expect(config).to.have.nested.property('url.protocol', 'https:') - expect(config).to.have.property('version', '1.0.0') + assertObjectContains(config, { + apmTracingEnabled: true, + appsec: { + apiSecurity: { + enabled: true, + endpointCollectionEnabled: true, + endpointCollectionMessageLimit: 150 + }, + blockedTemplateGraphql: BLOCKED_TEMPLATE_GRAPHQL, + blockedTemplateHtml: BLOCKED_TEMPLATE_HTML, + blockedTemplateJson: BLOCKED_TEMPLATE_JSON, + rules: RULES_JSON_PATH, + enabled: true, + eventTracking: { + mode: 'anonymous' + }, + extendedHeadersCollection: { + enabled: true, + maxHeaders: 42, + redaction: true + }, + obfuscatorKeyRegex: '.*', + obfuscatorValueRegex: '.*', + rasp: { + bodyCollection: true, + enabled: false + }, + rateLimit: 42, + stackTrace: { + enabled: false, + maxDepth: 42, + maxStackTraces: 5 + }, + wafTimeout: 42 + }, + clientIpEnabled: true, + clientIpHeader: 'x-true-client-ip', + codeOriginForSpans: { + enabled: true, + experimental: { + exit_spans: { + enabled: false + } + } + }, + dogstatsd: { + hostname: 'server', + port: '8888' + }, + dynamicInstrumentation: { + enabled: false, + probeFile: 'probes2.json', + redactedIdentifiers: ['foo2', 'bar2'], + redactionExcludedIdentifiers: ['a2', 'b2'], + uploadIntervalSeconds: 0.2 + }, + env: 'development', + experimental: { + aiguard: { + enabled: true, + endpoint: 'https://dd.datad0g.com/api/unstable/ai-guard', + maxContentSize: 1024 * 1024, + maxMessagesLength: 32, + timeout: 2000 + }, + enableGetRumData: false, + exporter: 'agent' + }, + flushMinSpans: 500, + flushInterval: 500, + iast: { + dbRowsToTaint: 3, + deduplicationEnabled: true, + enabled: true, + maxConcurrentRequests: 2, + maxContextOperations: 2, + redactionEnabled: true, + redactionNamePattern: 'REDACTION_NAME_PATTERN', + redactionValuePattern: 'REDACTION_VALUE_PATTERN', + requestSampling: 30, + securityControlsConfiguration: 'SANITIZER:CODE_INJECTION:sanitizer.js:method' + (DD_MAJOR < 6 ? '2' : '1'), + stackTrace: { + enabled: false + } + }, + llmobs: { + agentlessEnabled: false, + mlApp: 'myOtherMlApp' + }, + middlewareTracingEnabled: true, + peerServiceMapping: { d: 'dd' }, + protocolVersion: '0.5', + remoteConfig: { + pollInterval: 42 + }, + reportHostname: false, + runtimeMetrics: { + enabled: false + }, + runtimeMetricsRuntimeId: false, + service: 'test', + site: 'datadoghq.com', + spanAttributeSchema: 'v1', + spanComputePeerService: true, + spanRemoveIntegrationFromService: true, + traceId128BitGenerationEnabled: false, + traceId128BitLoggingEnabled: false, + version: '1.0.0', + serviceMapping: { b: 'bb' }, + tags: { + foo: 'foo', + service: 'test', + version: '1.0.0', + env: 'development' + }, + tracePropagationStyle: { + extract: [], + inject: [] + } + }) + assert.strictEqual(config.url.toString(), 'https://agent2:6218/') }) it('should give priority to non-experimental options', () => { @@ -1818,7 +2045,7 @@ describe('Config', () => { } }) - expect(config).to.have.deep.property('appsec', { + assert.deepStrictEqual(config.appsec, { apiSecurity: { enabled: true, sampleDelay: 30, @@ -1856,7 +2083,7 @@ describe('Config', () => { wafTimeout: 42 }) - expect(config).to.have.deep.property('iast', { + assert.deepStrictEqual(config.iast, { dbRowsToTaint: 3, deduplicationEnabled: false, enabled: true, @@ -1890,11 +2117,12 @@ describe('Config', () => { env: 'development' }) - expect(config).to.have.nested.property('url.protocol', 'https:') - expect(config).to.have.nested.property('url.hostname', 'agent3') - expect(config).to.have.nested.property('url.port', '7778') - expect(config).to.have.property('service', 'test') - expect(config).to.have.property('env', 'development') + assert.strictEqual(config.url.toString(), 'https://agent3:7778/') + + assertObjectContains(config, { + service: 'test', + env: 'development' + }) }) it('should give priority to individual options over tags', () => { @@ -1905,7 +2133,7 @@ describe('Config', () => { const config = getConfig() - expect(config.tags).to.include({ + assertObjectContains(config.tags, { service: 'test', env: 'dev', version: '1.0.0' @@ -1913,9 +2141,9 @@ describe('Config', () => { }) it('should sanitize the sample rate to be between 0 and 1', () => { - expect(getConfig({ sampleRate: -1 })).to.have.property('sampleRate', 0) - expect(getConfig({ sampleRate: 2 })).to.have.property('sampleRate', 1) - expect(getConfig({ sampleRate: NaN })).to.have.property('sampleRate', undefined) + assert.strictEqual(getConfig({ sampleRate: -1 })?.sampleRate, 0) + assert.strictEqual(getConfig({ sampleRate: 2 })?.sampleRate, 1) + assert.strictEqual(getConfig({ sampleRate: NaN })?.sampleRate, undefined) }) it('should ignore empty service names', () => { @@ -1923,7 +2151,7 @@ describe('Config', () => { const config = getConfig() - expect(config.tags).to.include({ + assertObjectContains(config.tags, { service: 'node' }) }) @@ -1937,9 +2165,11 @@ describe('Config', () => { } }) - expect(config).to.have.property('service', 'service') - expect(config).to.have.property('version', '0.1.0') - expect(config).to.have.property('env', 'test') + assertObjectContains(config, { + service: 'service', + version: '0.1.0', + env: 'test' + }) }) it('should trim whitespace characters around keys', () => { @@ -1947,14 +2177,14 @@ describe('Config', () => { const config = getConfig() - expect(config.tags).to.include({ foo: 'bar', baz: 'qux' }) + assertObjectContains(config.tags, { foo: 'bar', baz: 'qux' }) }) it('should not transform the lookup parameter', () => { const lookup = () => 'test' const config = getConfig({ lookup }) - expect(config.lookup).to.equal(lookup) + assert.strictEqual(config.lookup, lookup) }) it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if AWS_LAMBDA_FUNCTION_NAME is present', () => { @@ -1962,7 +2192,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.enabled).to.be.false + assert.strictEqual(config.telemetry.enabled, false) }) it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if FUNCTION_NAME and GCP_PROJECT are present', () => { @@ -1972,7 +2202,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.enabled).to.be.false + assert.strictEqual(config.telemetry.enabled, false) }) it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if K_SERVICE and FUNCTION_TARGET are present', () => { @@ -1982,7 +2212,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.enabled).to.be.false + assert.strictEqual(config.telemetry.enabled, false) }) it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if Azure Consumption Plan Function', () => { @@ -1993,18 +2223,18 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.enabled).to.be.false + assert.strictEqual(config.telemetry.enabled, false) }) it('should set telemetry default values', () => { const config = getConfig() - expect(config.telemetry).to.not.be.undefined - expect(config.telemetry.enabled).to.be.true - expect(config.telemetry.heartbeatInterval).to.eq(60000) - expect(config.telemetry.logCollection).to.be.true - expect(config.telemetry.debug).to.be.false - expect(config.telemetry.metrics).to.be.true + assert.notStrictEqual(config.telemetry, undefined) + assert.strictEqual(config.telemetry.enabled, true) + assert.strictEqual(config.telemetry.heartbeatInterval, 60000) + assert.strictEqual(config.telemetry.logCollection, true) + assert.strictEqual(config.telemetry.debug, false) + assert.strictEqual(config.telemetry.metrics, true) }) it('should set DD_TELEMETRY_HEARTBEAT_INTERVAL', () => { @@ -2013,7 +2243,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.heartbeatInterval).to.eq(42000) + assert.strictEqual(config.telemetry.heartbeatInterval, 42000) process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL = origTelemetryHeartbeatIntervalValue }) @@ -2024,7 +2254,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.enabled).to.be.false + assert.strictEqual(config.telemetry.enabled, false) process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = origTraceTelemetryValue }) @@ -2035,7 +2265,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.metrics).to.be.false + assert.strictEqual(config.telemetry.metrics, false) process.env.DD_TELEMETRY_METRICS_ENABLED = origTelemetryMetricsEnabledValue }) @@ -2046,7 +2276,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.logCollection).to.be.false + assert.strictEqual(config.telemetry.logCollection, false) process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED = origLogsValue }) @@ -2057,7 +2287,7 @@ describe('Config', () => { const config = getConfig() - expect(config.telemetry.debug).to.be.true + assert.strictEqual(config.telemetry.debug, true) process.env.DD_TELEMETRY_DEBUG = origTelemetryDebugValue }) @@ -2067,7 +2297,7 @@ describe('Config', () => { const config = getConfig() - expect(config.remoteConfig.enabled).to.be.false + assert.strictEqual(config.remoteConfig.enabled, false) }) it('should not set DD_REMOTE_CONFIGURATION_ENABLED if FUNCTION_NAME and GCP_PROJECT are present', () => { @@ -2076,7 +2306,7 @@ describe('Config', () => { const config = getConfig() - expect(config.remoteConfig.enabled).to.be.false + assert.strictEqual(config.remoteConfig.enabled, false) }) it('should not set DD_REMOTE_CONFIGURATION_ENABLED if K_SERVICE and FUNCTION_TARGET are present', () => { @@ -2085,7 +2315,7 @@ describe('Config', () => { const config = getConfig() - expect(config.remoteConfig.enabled).to.be.false + assert.strictEqual(config.remoteConfig.enabled, false) }) it('should not set DD_REMOTE_CONFIGURATION_ENABLED if Azure Functions env vars are present', () => { @@ -2095,7 +2325,7 @@ describe('Config', () => { const config = getConfig() - expect(config.remoteConfig.enabled).to.be.false + assert.strictEqual(config.remoteConfig.enabled, false) }) it('should send empty array when remote config is called on empty options', () => { @@ -2103,8 +2333,8 @@ describe('Config', () => { config.configure({}, true) - expect(updateConfig).to.be.calledTwice - expect(updateConfig.getCall(1).args[0]).to.deep.equal([]) + sinon.assert.calledTwice(updateConfig) + assert.deepStrictEqual(updateConfig.getCall(1).args[0], []) }) it('should send remote config changes to telemetry', () => { @@ -2114,7 +2344,7 @@ describe('Config', () => { tracing_sampling_rate: 0 }, true) - expect(updateConfig.getCall(1).args[0]).to.deep.equal([ + assert.deepStrictEqual(updateConfig.getCall(1).args[0], [ { name: 'sampleRate', value: 0, origin: 'remote_config' } ]) }) @@ -2134,7 +2364,7 @@ describe('Config', () => { } ] }, true) - expect(config).to.have.deep.nested.property('sampler', { + assert.deepStrictEqual(config.sampler, { spanSamplingRules: [], rateLimit: 100, rules: [ @@ -2155,8 +2385,8 @@ describe('Config', () => { tracing_tags: { foo: 'bar' } }, true) - expect(config.tags).to.have.property('foo', 'bar') - expect(config.tags).to.have.property('runtime-id', runtimeId) + assert.strictEqual(config.tags?.foo, 'bar') + assert.strictEqual(config.tags?.['runtime-id'], runtimeId) }) it('should ignore invalid iast.requestSampling', () => { @@ -2167,7 +2397,7 @@ describe('Config', () => { } } }) - expect(config.iast.requestSampling).to.be.equals(30) + assert.strictEqual(config.iast.requestSampling, 30) }) it('should load span sampling rules from json file', () => { @@ -2176,7 +2406,7 @@ describe('Config', () => { const config = getConfig() - expect(config.sampler).to.have.deep.nested.property('spanSamplingRules', [ + assert.deepStrictEqual(config.sampler?.spanSamplingRules, [ { service: 'mysql', name: 'mysql.query', sampleRate: 0.0, maxPerSecond: 1 }, { service: 'mysql', sampleRate: 0.5 }, { service: 'mysql', sampleRate: 1.0 }, @@ -2198,19 +2428,16 @@ describe('Config', () => { } }) - expect(log.error).to.be.callCount(3) - expect(log.error.firstCall) - .to.have.been.calledWithExactly('Error reading file %s', 'DOES_NOT_EXIST.json', error) - expect(log.error.secondCall) - .to.have.been.calledWithExactly('Error reading file %s', 'DOES_NOT_EXIST.html', error) - expect(log.error.thirdCall) - .to.have.been.calledWithExactly('Error reading file %s', 'DOES_NOT_EXIST.json', error) + sinon.assert.callCount(log.error, 3) + sinon.assert.calledWithExactly(log.error.firstCall, 'Error reading file %s', 'DOES_NOT_EXIST.json', error) + sinon.assert.calledWithExactly(log.error.secondCall, 'Error reading file %s', 'DOES_NOT_EXIST.html', error) + sinon.assert.calledWithExactly(log.error.thirdCall, 'Error reading file %s', 'DOES_NOT_EXIST.json', error) - expect(config.appsec.enabled).to.be.true - expect(config.appsec.rules).to.eq('path/to/rules.json') - expect(config.appsec.blockedTemplateHtml).to.be.undefined - expect(config.appsec.blockedTemplateJson).to.be.undefined - expect(config.appsec.blockedTemplateGraphql).to.be.undefined + assert.strictEqual(config.appsec.enabled, true) + assert.strictEqual(config.appsec.rules, 'path/to/rules.json') + assert.strictEqual(config.appsec.blockedTemplateHtml, undefined) + assert.strictEqual(config.appsec.blockedTemplateJson, undefined) + assert.strictEqual(config.appsec.blockedTemplateGraphql, undefined) }) it('should enable api security with DD_EXPERIMENTAL_API_SECURITY_ENABLED', () => { @@ -2218,7 +2445,7 @@ describe('Config', () => { const config = getConfig() - expect(config.appsec.apiSecurity.enabled).to.be.true + assert.strictEqual(config.appsec.apiSecurity.enabled, true) }) it('should disable api security with DD_EXPERIMENTAL_API_SECURITY_ENABLED', () => { @@ -2226,7 +2453,7 @@ describe('Config', () => { const config = getConfig() - expect(config.appsec.apiSecurity.enabled).to.be.false + assert.strictEqual(config.appsec.apiSecurity.enabled, false) }) it('should ignore DD_EXPERIMENTAL_API_SECURITY_ENABLED with DD_API_SECURITY_ENABLED=true', () => { @@ -2235,7 +2462,7 @@ describe('Config', () => { const config = getConfig() - expect(config.appsec.apiSecurity.enabled).to.be.true + assert.strictEqual(config.appsec.apiSecurity.enabled, true) }) it('should prioritize DD_DOGSTATSD_HOST over DD_DOGSTATSD_HOSTNAME', () => { @@ -2244,7 +2471,7 @@ describe('Config', () => { const config = getConfig() - expect(config).to.have.nested.property('dogstatsd.hostname', 'localhost') + assert.strictEqual(config.dogstatsd?.hostname, 'localhost') }) context('auto configuration w/ unix domain sockets', () => { @@ -2253,14 +2480,14 @@ describe('Config', () => { osType = 'Windows_NT' const config = getConfig() - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) }) context('socket does not exist', () => { it('should not be used', () => { const config = getConfig() - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) }) context('socket exists', () => { @@ -2271,8 +2498,8 @@ describe('Config', () => { it('should be used when no options and no env vars', () => { const config = getConfig() - expect(existsSyncParam).to.equal('/var/run/datadog/apm.socket') - expect(config.url.toString()).to.equal('unix:///var/run/datadog/apm.socket') + assert.strictEqual(existsSyncParam, '/var/run/datadog/apm.socket') + assert.strictEqual(config.url.toString(), 'unix:///var/run/datadog/apm.socket') }) it('should not be used when DD_TRACE_AGENT_URL provided', () => { @@ -2280,7 +2507,7 @@ describe('Config', () => { const config = getConfig() - expect(config.url.toString()).to.equal('https://example.com/') + assert.strictEqual(config.url.toString(), 'https://example.com/') }) it('should not be used when DD_TRACE_URL provided', () => { @@ -2288,13 +2515,13 @@ describe('Config', () => { const config = getConfig() - expect(config.url.toString()).to.equal('https://example.com/') + assert.strictEqual(config.url.toString(), 'https://example.com/') }) it('should not be used when options.url provided', () => { const config = getConfig({ url: 'https://example.com/' }) - expect(config.url.toString()).to.equal('https://example.com/') + assert.strictEqual(config.url.toString(), 'https://example.com/') }) it('should not be used when DD_TRACE_AGENT_PORT provided', () => { @@ -2302,13 +2529,13 @@ describe('Config', () => { const config = getConfig() - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) it('should not be used when options.port provided', () => { const config = getConfig({ port: 12345 }) - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) it('should not be used when DD_TRACE_AGENT_HOSTNAME provided', () => { @@ -2316,7 +2543,7 @@ describe('Config', () => { const config = getConfig() - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) it('should not be used when DD_AGENT_HOST provided', () => { @@ -2324,13 +2551,13 @@ describe('Config', () => { const config = getConfig() - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) it('should not be used when options.hostname provided', () => { const config = getConfig({ hostname: 'example.com' }) - expect(config.url).to.be.undefined + assert.strictEqual(config.url, undefined) }) }) }) @@ -2356,104 +2583,104 @@ describe('Config', () => { }) it('should activate git upload by default', () => { const config = getConfig(options) - expect(config).to.have.property('isGitUploadEnabled', true) + assert.strictEqual(config.isGitUploadEnabled, true) }) it('should disable git upload if the DD_CIVISIBILITY_GIT_UPLOAD_ENABLED is set to false', () => { process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = 'false' const config = getConfig(options) - expect(config).to.have.property('isGitUploadEnabled', false) + assert.strictEqual(config.isGitUploadEnabled, false) }) it('should activate ITR by default', () => { const config = getConfig(options) - expect(config).to.have.property('isIntelligentTestRunnerEnabled', true) + assert.strictEqual(config.isIntelligentTestRunnerEnabled, true) }) it('should disable ITR if DD_CIVISIBILITY_ITR_ENABLED is set to false', () => { process.env.DD_CIVISIBILITY_ITR_ENABLED = 'false' const config = getConfig(options) - expect(config).to.have.property('isIntelligentTestRunnerEnabled', false) + assert.strictEqual(config.isIntelligentTestRunnerEnabled, false) }) it('should enable manual testing API by default', () => { const config = getConfig(options) - expect(config).to.have.property('isManualApiEnabled', true) + assert.strictEqual(config.isManualApiEnabled, true) }) it('should disable manual testing API if DD_CIVISIBILITY_MANUAL_API_ENABLED is set to false', () => { process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED = 'false' const config = getConfig(options) - expect(config).to.have.property('isManualApiEnabled', false) + assert.strictEqual(config.isManualApiEnabled, false) }) it('should disable memcached command tagging by default', () => { const config = getConfig(options) - expect(config).to.have.property('memcachedCommandEnabled', false) + assert.strictEqual(config.memcachedCommandEnabled, false) }) it('should enable memcached command tagging if DD_TRACE_MEMCACHED_COMMAND_ENABLED is enabled', () => { process.env.DD_TRACE_MEMCACHED_COMMAND_ENABLED = 'true' const config = getConfig(options) - expect(config).to.have.property('memcachedCommandEnabled', true) + assert.strictEqual(config.memcachedCommandEnabled, true) }) it('should enable telemetry', () => { const config = getConfig(options) - expect(config).to.nested.property('telemetry.enabled', true) + assert.strictEqual(config.telemetry?.enabled, true) }) it('should enable early flake detection by default', () => { const config = getConfig(options) - expect(config).to.have.property('isEarlyFlakeDetectionEnabled', true) + assert.strictEqual(config.isEarlyFlakeDetectionEnabled, true) }) it('should disable early flake detection if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', () => { process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED = 'false' const config = getConfig(options) - expect(config).to.have.property('isEarlyFlakeDetectionEnabled', false) + assert.strictEqual(config.isEarlyFlakeDetectionEnabled, false) }) it('should enable flaky test retries by default', () => { const config = getConfig(options) - expect(config).to.have.property('isFlakyTestRetriesEnabled', true) + assert.strictEqual(config.isFlakyTestRetriesEnabled, true) }) it('should disable flaky test retries if isFlakyTestRetriesEnabled is false', () => { process.env.DD_CIVISIBILITY_FLAKY_RETRY_ENABLED = 'false' const config = getConfig(options) - expect(config).to.have.property('isFlakyTestRetriesEnabled', false) + assert.strictEqual(config.isFlakyTestRetriesEnabled, false) }) it('should read DD_CIVISIBILITY_FLAKY_RETRY_COUNT if present', () => { process.env.DD_CIVISIBILITY_FLAKY_RETRY_COUNT = '4' const config = getConfig(options) - expect(config).to.have.property('flakyTestRetriesCount', 4) + assert.strictEqual(config.flakyTestRetriesCount, 4) }) it('should default DD_CIVISIBILITY_FLAKY_RETRY_COUNT to 5', () => { const config = getConfig(options) - expect(config).to.have.property('flakyTestRetriesCount', 5) + assert.strictEqual(config.flakyTestRetriesCount, 5) }) it('should round non integer values of DD_CIVISIBILITY_FLAKY_RETRY_COUNT', () => { process.env.DD_CIVISIBILITY_FLAKY_RETRY_COUNT = '4.1' const config = getConfig(options) - expect(config).to.have.property('flakyTestRetriesCount', 4) + assert.strictEqual(config.flakyTestRetriesCount, 4) }) it('should set the default to DD_CIVISIBILITY_FLAKY_RETRY_COUNT if it is not a number', () => { process.env.DD_CIVISIBILITY_FLAKY_RETRY_COUNT = 'a' const config = getConfig(options) - expect(config).to.have.property('flakyTestRetriesCount', 5) + assert.strictEqual(config.flakyTestRetriesCount, 5) }) it('should set the session name if DD_TEST_SESSION_NAME is set', () => { process.env.DD_TEST_SESSION_NAME = 'my-test-session' const config = getConfig(options) - expect(config).to.have.property('ciVisibilityTestSessionName', 'my-test-session') + assert.strictEqual(config.ciVisibilityTestSessionName, 'my-test-session') }) it('should not enable agentless log submission by default', () => { const config = getConfig(options) - expect(config).to.have.property('ciVisAgentlessLogSubmissionEnabled', false) + assert.strictEqual(config.ciVisAgentlessLogSubmissionEnabled, false) }) it('should enable agentless log submission if DD_AGENTLESS_LOG_SUBMISSION_ENABLED is true', () => { process.env.DD_AGENTLESS_LOG_SUBMISSION_ENABLED = 'true' const config = getConfig(options) - expect(config).to.have.property('ciVisAgentlessLogSubmissionEnabled', true) + assert.strictEqual(config.ciVisAgentlessLogSubmissionEnabled, true) }) it('should set isTestDynamicInstrumentationEnabled by default', () => { const config = getConfig(options) - expect(config).to.have.property('isTestDynamicInstrumentationEnabled', true) + assert.strictEqual(config.isTestDynamicInstrumentationEnabled, true) }) it('should set isTestDynamicInstrumentationEnabled to false if DD_TEST_FAILED_TEST_REPLAY_ENABLED is false', () => { process.env.DD_TEST_FAILED_TEST_REPLAY_ENABLED = 'false' const config = getConfig(options) - expect(config).to.have.property('isTestDynamicInstrumentationEnabled', false) + assert.strictEqual(config.isTestDynamicInstrumentationEnabled, false) }) }) context('ci visibility mode is not enabled', () => { @@ -2461,14 +2688,16 @@ describe('Config', () => { process.env.DD_CIVISIBILITY_ITR_ENABLED = 'true' process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = 'true' const config = getConfig(options) - expect(config).to.have.property('isIntelligentTestRunnerEnabled', false) - expect(config).to.have.property('isGitUploadEnabled', false) + assertObjectContains(config, { + isIntelligentTestRunnerEnabled: false, + isGitUploadEnabled: false + }) }) }) it('disables telemetry if inside a jest worker', () => { process.env.JEST_WORKER_ID = '1' const config = getConfig(options) - expect(config.telemetry.enabled).to.be.false + assert.strictEqual(config.telemetry.enabled, false) }) }) @@ -2493,143 +2722,149 @@ describe('Config', () => { process.env.DD_GIT_COMMIT_SHA = DUMMY_COMMIT_SHA process.env.DD_GIT_REPOSITORY_URL = DUMMY_REPOSITORY_URL const config = getConfig({}) - expect(config).to.have.property('commitSHA', DUMMY_COMMIT_SHA) - expect(config).to.have.property('repositoryUrl', DUMMY_REPOSITORY_URL) + assert.strictEqual(config.commitSHA, DUMMY_COMMIT_SHA) + assert.strictEqual(config.repositoryUrl, DUMMY_REPOSITORY_URL) }) it('reads DD_GIT_* env vars and filters out user data', () => { process.env.DD_GIT_REPOSITORY_URL = 'https://user:password@github.com/DataDog/dd-trace-js.git' const config = getConfig({}) - expect(config).to.have.property('repositoryUrl', 'https://github.com/DataDog/dd-trace-js.git') + assert.strictEqual(config.repositoryUrl, 'https://github.com/DataDog/dd-trace-js.git') }) it('reads DD_TAGS env var', () => { process.env.DD_TAGS = `git.commit.sha:${DUMMY_COMMIT_SHA},git.repository_url:${DUMMY_REPOSITORY_URL}` process.env.DD_GIT_REPOSITORY_URL = DUMMY_REPOSITORY_URL const config = getConfig({}) - expect(config).to.have.property('commitSHA', DUMMY_COMMIT_SHA) - expect(config).to.have.property('repositoryUrl', DUMMY_REPOSITORY_URL) + assert.strictEqual(config.commitSHA, DUMMY_COMMIT_SHA) + assert.strictEqual(config.repositoryUrl, DUMMY_REPOSITORY_URL) }) it('reads git.properties if it is available', () => { process.env.DD_GIT_PROPERTIES_FILE = DD_GIT_PROPERTIES_FILE const config = getConfig({}) - expect(config).to.have.property('commitSHA', '4e7da8069bcf5ffc8023603b95653e2dc99d1c7d') - expect(config).to.have.property('repositoryUrl', DUMMY_REPOSITORY_URL) + assert.strictEqual(config.commitSHA, '4e7da8069bcf5ffc8023603b95653e2dc99d1c7d') + assert.strictEqual(config.repositoryUrl, DUMMY_REPOSITORY_URL) }) it('does not crash if git.properties is not available', () => { process.env.DD_GIT_PROPERTIES_FILE = '/does/not/exist' // Should not throw const config = getConfig({}) - expect(config).to.be.an('object') + assert.ok(config !== null && typeof config === 'object' && !Array.isArray(config)) }) it('does not read git.properties if env vars are passed', () => { process.env.DD_GIT_PROPERTIES_FILE = DD_GIT_PROPERTIES_FILE process.env.DD_GIT_COMMIT_SHA = DUMMY_COMMIT_SHA process.env.DD_GIT_REPOSITORY_URL = 'https://github.com:DataDog/dd-trace-js.git' const config = getConfig({}) - expect(config).to.have.property('commitSHA', DUMMY_COMMIT_SHA) - expect(config).to.have.property('repositoryUrl', 'https://github.com:DataDog/dd-trace-js.git') + assert.strictEqual(config.commitSHA, DUMMY_COMMIT_SHA) + assert.strictEqual(config.repositoryUrl, 'https://github.com:DataDog/dd-trace-js.git') }) it('still reads git.properties if one of the env vars is missing', () => { process.env.DD_GIT_PROPERTIES_FILE = DD_GIT_PROPERTIES_FILE process.env.DD_GIT_COMMIT_SHA = DUMMY_COMMIT_SHA const config = getConfig({}) - expect(config).to.have.property('commitSHA', DUMMY_COMMIT_SHA) - expect(config).to.have.property('repositoryUrl', DUMMY_REPOSITORY_URL) + assert.strictEqual(config.commitSHA, DUMMY_COMMIT_SHA) + assert.strictEqual(config.repositoryUrl, DUMMY_REPOSITORY_URL) }) it('reads git.properties and filters out credentials', () => { process.env.DD_GIT_PROPERTIES_FILE = require.resolve('./fixtures/config/git.properties.credentials') const config = getConfig({}) - expect(config).to.have.property('commitSHA', '4e7da8069bcf5ffc8023603b95653e2dc99d1c7d') - expect(config).to.have.property('repositoryUrl', 'https://github.com/datadog/dd-trace-js') + assertObjectContains(config, { + commitSHA: '4e7da8069bcf5ffc8023603b95653e2dc99d1c7d', + repositoryUrl: 'https://github.com/datadog/dd-trace-js' + }) }) it('does not read git metadata if DD_TRACE_GIT_METADATA_ENABLED is false', () => { process.env.DD_TRACE_GIT_METADATA_ENABLED = 'false' const config = getConfig({}) - expect(config).not.to.have.property('commitSHA') - expect(config).not.to.have.property('repositoryUrl') + assert.ok(!(Object.hasOwn(config, 'commitSHA'))) + assert.ok(!(Object.hasOwn(config, 'repositoryUrl'))) }) it('reads .git/ folder if it is available', () => { process.env.DD_GIT_FOLDER_PATH = DD_GIT_FOLDER_PATH const config = getConfig({}) - expect(config).to.have.property('repositoryUrl', 'git@github.com:DataDog/dd-trace-js.git') - expect(config).to.have.property('commitSHA', '964886d9ec0c9fc68778e4abb0aab4d9982ce2b5') + assertObjectContains(config, { + repositoryUrl: 'git@github.com:DataDog/dd-trace-js.git', + commitSHA: '964886d9ec0c9fc68778e4abb0aab4d9982ce2b5' + }) }) it('does not crash if .git/ folder is not available', () => { process.env.DD_GIT_FOLDER_PATH = '/does/not/exist/' // Should not throw const config = getConfig({}) - expect(config).to.be.an('object') + assert.ok(config !== null && typeof config === 'object' && !Array.isArray(config)) }) it('does not read .git/ folder if env vars are passed', () => { process.env.DD_GIT_FOLDER_PATH = DD_GIT_FOLDER_PATH process.env.DD_GIT_COMMIT_SHA = DUMMY_COMMIT_SHA process.env.DD_GIT_REPOSITORY_URL = 'https://github.com:DataDog/dd-trace-js.git' const config = getConfig({}) - expect(config).to.have.property('commitSHA', DUMMY_COMMIT_SHA) - expect(config).to.have.property('repositoryUrl', 'https://github.com:DataDog/dd-trace-js.git') + assert.strictEqual(config.commitSHA, DUMMY_COMMIT_SHA) + assert.strictEqual(config.repositoryUrl, 'https://github.com:DataDog/dd-trace-js.git') }) it('still reads .git/ if one of the env vars is missing', () => { process.env.DD_GIT_FOLDER_PATH = DD_GIT_FOLDER_PATH process.env.DD_GIT_REPOSITORY_URL = 'git@github.com:DataDog/dummy-dd-trace-js.git' const config = getConfig({}) - expect(config).to.have.property('commitSHA', '964886d9ec0c9fc68778e4abb0aab4d9982ce2b5') - expect(config).to.have.property('repositoryUrl', 'git@github.com:DataDog/dummy-dd-trace-js.git') + assertObjectContains(config, { + commitSHA: '964886d9ec0c9fc68778e4abb0aab4d9982ce2b5', + repositoryUrl: 'git@github.com:DataDog/dummy-dd-trace-js.git' + }) }) }) context('llmobs config', () => { it('should disable llmobs by default', () => { const config = getConfig() - expect(config.llmobs.enabled).to.be.false + assert.strictEqual(config.llmobs.enabled, false) // check origin computation - expect(updateConfig.getCall(0).args[0]).to.deep.include({ + assertObjectContains(updateConfig.getCall(0).args[0], [{ name: 'llmobs.enabled', value: false, origin: 'default' - }) + }]) }) it('should enable llmobs if DD_LLMOBS_ENABLED is set to true', () => { process.env.DD_LLMOBS_ENABLED = 'true' const config = getConfig() - expect(config.llmobs.enabled).to.be.true + assert.strictEqual(config.llmobs.enabled, true) // check origin computation - expect(updateConfig.getCall(0).args[0]).to.deep.include({ + assertObjectContains(updateConfig.getCall(0).args[0], [{ name: 'llmobs.enabled', value: true, origin: 'env_var' - }) + }]) }) it('should disable llmobs if DD_LLMOBS_ENABLED is set to false', () => { process.env.DD_LLMOBS_ENABLED = 'false' const config = getConfig() - expect(config.llmobs.enabled).to.be.false + assert.strictEqual(config.llmobs.enabled, false) // check origin computation - expect(updateConfig.getCall(0).args[0]).to.deep.include({ + assertObjectContains(updateConfig.getCall(0).args[0], [{ name: 'llmobs.enabled', value: false, origin: 'env_var' - }) + }]) }) it('should enable llmobs with options and DD_LLMOBS_ENABLED is not set', () => { const config = getConfig({ llmobs: {} }) - expect(config.llmobs.enabled).to.be.true + assert.strictEqual(config.llmobs.enabled, true) // check origin computation - expect(updateConfig.getCall(0).args[0]).to.deep.include({ + assertObjectContains(updateConfig.getCall(0).args[0], [{ name: 'llmobs.enabled', value: true, origin: 'code' - }) + }]) }) it('should have DD_LLMOBS_ENABLED take priority over options', () => { process.env.DD_LLMOBS_ENABLED = 'false' const config = getConfig({ llmobs: {} }) - expect(config.llmobs.enabled).to.be.false + assert.strictEqual(config.llmobs.enabled, false) // check origin computation - expect(updateConfig.getCall(0).args[0]).to.deep.include({ + assertObjectContains(updateConfig.getCall(0).args[0], [{ name: 'llmobs.enabled', value: false, origin: 'env_var' - }) + }]) }) }) @@ -2648,56 +2883,70 @@ describe('Config', () => { it('defaults', () => { const taggingConfig = getConfig().cloudPayloadTagging - expect(taggingConfig).to.have.property('requestsEnabled', false) - expect(taggingConfig).to.have.property('responsesEnabled', false) - expect(taggingConfig).to.have.property('maxDepth', 10) + assertObjectContains(taggingConfig, { + requestsEnabled: false, + responsesEnabled: false, + maxDepth: 10 + }) }) it('enabling requests with no additional filter', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = 'all' const taggingConfig = getConfig().cloudPayloadTagging - expect(taggingConfig).to.have.property('requestsEnabled', true) - expect(taggingConfig).to.have.property('responsesEnabled', false) - expect(taggingConfig).to.have.property('maxDepth', 10) + assertObjectContains(taggingConfig, { + requestsEnabled: true, + responsesEnabled: false, + maxDepth: 10 + }) const awsRules = taggingConfig.rules.aws for (const [serviceName, service] of Object.entries(awsRules)) { - expect(service.request).to.deep.equal(staticConfig[serviceName].request) + assert.deepStrictEqual(service.request, staticConfig[serviceName].request) } }) it('enabling requests with an additional filter', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = '$.foo.bar' const taggingConfig = getConfig().cloudPayloadTagging - expect(taggingConfig).to.have.property('requestsEnabled', true) - expect(taggingConfig).to.have.property('responsesEnabled', false) - expect(taggingConfig).to.have.property('maxDepth', 10) + assertObjectContains(taggingConfig, { + requestsEnabled: true, + responsesEnabled: false, + maxDepth: 10 + }) const awsRules = taggingConfig.rules.aws for (const [, service] of Object.entries(awsRules)) { - expect(service.request).to.include('$.foo.bar') + assertObjectContains(service, { + request: ['$.foo.bar'], + }) } }) it('enabling responses with no additional filter', () => { process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = 'all' const taggingConfig = getConfig().cloudPayloadTagging - expect(taggingConfig).to.have.property('requestsEnabled', false) - expect(taggingConfig).to.have.property('responsesEnabled', true) - expect(taggingConfig).to.have.property('maxDepth', 10) + assertObjectContains(taggingConfig, { + requestsEnabled: false, + responsesEnabled: true, + maxDepth: 10 + }) const awsRules = taggingConfig.rules.aws for (const [serviceName, service] of Object.entries(awsRules)) { - expect(service.response).to.deep.equal(staticConfig[serviceName].response) + assert.deepStrictEqual(service.response, staticConfig[serviceName].response) } }) it('enabling responses with an additional filter', () => { process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = '$.foo.bar' const taggingConfig = getConfig().cloudPayloadTagging - expect(taggingConfig).to.have.property('requestsEnabled', false) - expect(taggingConfig).to.have.property('responsesEnabled', true) - expect(taggingConfig).to.have.property('maxDepth', 10) + assertObjectContains(taggingConfig, { + requestsEnabled: false, + responsesEnabled: true, + maxDepth: 10 + }) const awsRules = taggingConfig.rules.aws for (const [, service] of Object.entries(awsRules)) { - expect(service.response).to.include('$.foo.bar') + assertObjectContains(service, { + response: ['$.foo.bar'], + }) } }) @@ -2745,7 +2994,7 @@ describe('Config', () => { process.env.DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED = '1' const config = getConfig() - expect(config).to.have.property('apmTracingEnabled', false) + assert.strictEqual(config.apmTracingEnabled, false) }) it('should win DD_APM_TRACING_ENABLED', () => { @@ -2753,14 +3002,14 @@ describe('Config', () => { process.env.DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED = 'true' const config = getConfig() - expect(config).to.have.property('apmTracingEnabled', true) + assert.strictEqual(config.apmTracingEnabled, true) }) it('should disable apm tracing with legacy experimental.appsec.standalone.enabled option', () => { process.env.DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED = '0' const config = getConfig({ experimental: { appsec: { standalone: { enabled: true } } } }) - expect(config).to.have.property('apmTracingEnabled', false) + assert.strictEqual(config.apmTracingEnabled, false) }) it('should win apmTracingEnabled option', () => { @@ -2770,17 +3019,21 @@ describe('Config', () => { apmTracingEnabled: false, experimental: { appsec: { standalone: { enabled: true } } } }) - expect(config).to.have.property('apmTracingEnabled', false) + assert.strictEqual(config.apmTracingEnabled, false) }) it('should not affect stats', () => { process.env.DD_TRACE_STATS_COMPUTATION_ENABLED = 'true' const config = getConfig() - expect(config).to.have.property('apmTracingEnabled', true) - expect(config).to.have.nested.property('stats.enabled', true) + assertObjectContains(config, { + apmTracingEnabled: true, + stats: { + enabled: true + } + }) - expect(updateConfig.getCall(0).args[0]).to.deep.include.members([ + assertObjectContains(updateConfig.getCall(0).args[0], [ { name: 'stats.enabled', value: true, origin: 'calculated' } ]) }) @@ -2790,10 +3043,14 @@ describe('Config', () => { process.env.DD_TRACE_STATS_COMPUTATION_ENABLED = 'true' const config = getConfig() - expect(config).to.have.property('apmTracingEnabled', false) - expect(config).to.have.nested.property('stats.enabled', false) + assertObjectContains(config, { + apmTracingEnabled: false, + stats: { + enabled: false + } + }) - expect(updateConfig.getCall(0).args[0]).to.deep.include.members([ + assertObjectContains(updateConfig.getCall(0).args[0], [ { name: 'stats.enabled', value: false, origin: 'calculated' } ]) }) @@ -2802,8 +3059,12 @@ describe('Config', () => { const config = getConfig({ apmTracingEnabled: false }) - expect(config).to.have.property('apmTracingEnabled', false) - expect(config).to.have.nested.property('stats.enabled', false) + assertObjectContains(config, { + apmTracingEnabled: false, + stats: { + enabled: false + } + }) }) }) @@ -2834,7 +3095,7 @@ apm_configuration_default: DD_RUNTIME_METRICS_ENABLED: true `) const config = getConfig() - expect(config).to.have.nested.property('runtimeMetrics.enabled', true) + assert.strictEqual(config.runtimeMetrics?.enabled, true) }) it('should apply service specific config', () => { @@ -2851,13 +3112,13 @@ rules: DD_SERVICE: my-service `) const config = getConfig() - expect(config).to.have.property('service', 'my-service') + assert.strictEqual(config.service, 'my-service') }) it('should respect the priority sources', () => { // 1. Default const config1 = getConfig() - expect(config1).to.have.property('service', 'node') + assert.strictEqual(config1?.service, 'node') // 2. Local stable > Default fs.writeFileSync( @@ -2873,20 +3134,12 @@ rules: DD_SERVICE: service_local_stable `) const config2 = getConfig() - expect(config2).to.have.property( - 'service', - 'service_local_stable', - 'default < local stable config' - ) + assert.strictEqual(config2?.service, 'service_local_stable') // 3. Env > Local stable > Default process.env.DD_SERVICE = 'service_env' const config3 = getConfig() - expect(config3).to.have.property( - 'service', - 'service_env', - 'default < local stable config < env var' - ) + assert.strictEqual(config3?.service, 'service_env') // 4. Fleet Stable > Env > Local stable > Default fs.writeFileSync( @@ -2902,19 +3155,11 @@ rules: DD_SERVICE: service_fleet_stable `) const config4 = getConfig() - expect(config4).to.have.property( - 'service', - 'service_fleet_stable', - 'default < local stable config < env var < fleet stable config' - ) + assert.strictEqual(config4?.service, 'service_fleet_stable') // 5. Code > Fleet Stable > Env > Local stable > Default const config5 = getConfig({ service: 'service_code' }) - expect(config5).to.have.property( - 'service', - 'service_code', - 'default < local stable config < env var < fleet config < code' - ) + assert.strictEqual(config5?.service, 'service_code') }) it('should ignore unknown keys', () => { @@ -2926,10 +3171,10 @@ apm_configuration_default: DD_FOOBAR_ENABLED: baz `) const stableConfig = new StableConfig() - expect(stableConfig.warnings).to.have.lengthOf(0) + assert.strictEqual(stableConfig.warnings?.length, 0) const config = getConfig() - expect(config).to.have.nested.property('runtimeMetrics.enabled', true) + assert.strictEqual(config.runtimeMetrics?.enabled, true) }) it('should log a warning if the YAML files are malformed', () => { @@ -2940,12 +3185,12 @@ apm_configuration_default: DD_RUNTIME_METRICS_ENABLED true `) const stableConfig = new StableConfig() - expect(stableConfig.warnings).to.have.lengthOf(1) + assert.strictEqual(stableConfig.warnings?.length, 1) }) it('should only load the WASM module if the stable config files exist', () => { const stableConfig1 = new StableConfig() - expect(stableConfig1).to.have.property('wasm_loaded', false) + assert.strictEqual(stableConfig1?.wasm_loaded, false) fs.writeFileSync( process.env.DD_TEST_LOCAL_CONFIG_PATH, @@ -2954,7 +3199,7 @@ apm_configuration_default: DD_RUNTIME_METRICS_ENABLED: true `) const stableConfig2 = new StableConfig() - expect(stableConfig2).to.have.property('wasm_loaded', true) + assert.strictEqual(stableConfig2?.wasm_loaded, true) }) it('should not load the WASM module in a serverless environment', () => { @@ -2967,7 +3212,7 @@ apm_configuration_default: process.env.AWS_LAMBDA_FUNCTION_NAME = 'my-great-lambda-function' const stableConfig = getConfig() - expect(stableConfig).to.not.have.property('stableConfig') + assert.ok(!(Object.hasOwn(stableConfig, 'stableConfig'))) }) it('should support all extended configs across product areas', () => { @@ -2997,31 +3242,37 @@ apm_configuration_default: const config = getConfig() // Tracing - expect(config).to.have.nested.property('traceId128BitGenerationEnabled', true) - expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['tracecontext']) - expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['tracecontext']) + assert.strictEqual(config.traceId128BitGenerationEnabled, true) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['tracecontext']) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['tracecontext']) // Appsec - expect(config).to.have.nested.property('appsec.rateLimit', 100) - expect(config).to.have.nested.property('appsec.stackTrace.maxStackTraces', 2) - expect(config).to.have.nested.property('appsec.obfuscatorKeyRegex', 'password|token') - - // IAST - expect(config).to.have.nested.property('iast.requestSampling', 50) - expect(config).to.have.nested.property('iast.maxConcurrentRequests', 10) - - // Telemetry - expect(config).to.have.nested.property('telemetry.heartbeatInterval', 42000) - expect(config).to.have.nested.property('telemetry.metrics', false) - - // LLMObs - expect(config).to.have.nested.property('llmobs.mlApp', 'my-llm-app') - - // Profiling - expect(config).to.have.nested.property('profiling.exporters', 'agent') - - // Dynamic Instrumentation - expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', '/tmp/probes') + assertObjectContains(config, { + appsec: { + rateLimit: 100, + stackTrace: { + maxStackTraces: 2 + }, + obfuscatorKeyRegex: 'password|token' + }, + iast: { + requestSampling: 50, + maxConcurrentRequests: 10 + }, + telemetry: { + heartbeatInterval: 42000, + metrics: false + }, + llmobs: { + mlApp: 'my-llm-app' + }, + profiling: { + exporters: 'agent' + }, + dynamicInstrumentation: { + probeFile: '/tmp/probes' + } + }) }) // Regression test for fields that were previously set directly from environment variables @@ -3041,13 +3292,19 @@ apm_configuration_default: DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH: 5 `) let config = getConfig() - expect(config).to.have.property('apiKey', 'local-api-key') - expect(config).to.have.property('appKey', 'local-app-key') - expect(config).to.have.nested.property('installSignature.id', 'local-install-id') - expect(config).to.have.nested.property('installSignature.time', '1234567890') - expect(config).to.have.nested.property('installSignature.type', 'local_install') - expect(config).to.have.nested.property('cloudPayloadTagging.requestsEnabled', true) - expect(config).to.have.nested.property('cloudPayloadTagging.maxDepth', 5) + assertObjectContains(config, { + apiKey: 'local-api-key', + appKey: 'local-app-key', + installSignature: { + id: 'local-install-id', + time: '1234567890', + type: 'local_install' + }, + cloudPayloadTagging: { + requestsEnabled: true, + maxDepth: 5 + } + }) // Test 2: Env vars should take precedence over local stable config process.env.DD_API_KEY = 'env-api-key' @@ -3055,10 +3312,16 @@ apm_configuration_default: process.env.DD_INSTRUMENTATION_INSTALL_ID = 'env-install-id' process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = '7' config = getConfig() - expect(config).to.have.property('apiKey', 'env-api-key') - expect(config).to.have.property('appKey', 'env-app-key') - expect(config).to.have.nested.property('installSignature.id', 'env-install-id') - expect(config).to.have.nested.property('cloudPayloadTagging.maxDepth', 7) + assertObjectContains(config, { + apiKey: 'env-api-key', + appKey: 'env-app-key', + installSignature: { + id: 'env-install-id' + }, + cloudPayloadTagging: { + maxDepth: 7 + } + }) // Test 3: Fleet stable config should take precedence over env vars fs.writeFileSync( @@ -3081,14 +3344,20 @@ rules: DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH: 15 `) config = getConfig() - expect(config).to.have.property('apiKey', 'fleet-api-key') - expect(config).to.have.property('appKey', 'fleet-app-key') - expect(config).to.have.nested.property('installSignature.id', 'fleet-install-id') - expect(config).to.have.nested.property('installSignature.time', '9999999999') - expect(config).to.have.nested.property('installSignature.type', 'fleet_install') - expect(config).to.have.nested.property('cloudPayloadTagging.requestsEnabled', false) - expect(config).to.have.nested.property('cloudPayloadTagging.responsesEnabled', true) - expect(config).to.have.nested.property('cloudPayloadTagging.maxDepth', 15) + assertObjectContains(config, { + apiKey: 'fleet-api-key', + appKey: 'fleet-app-key', + installSignature: { + id: 'fleet-install-id', + time: '9999999999', + type: 'fleet_install' + }, + cloudPayloadTagging: { + requestsEnabled: false, + responsesEnabled: true, + maxDepth: 15 + } + }) }) }) @@ -3118,60 +3387,60 @@ rules: it('should be false by default', () => { const config = getConfig() - expect(config).to.have.property('resourceRenamingEnabled', false) + assert.strictEqual(config.resourceRenamingEnabled, false) }) it('should be enabled when DD_TRACE_RESOURCE_RENAMING_ENABLED is true', () => { process.env.DD_TRACE_RESOURCE_RENAMING_ENABLED = 'true' const config = getConfig() - expect(config).to.have.property('resourceRenamingEnabled', true) + assert.strictEqual(config.resourceRenamingEnabled, true) }) it('should be disabled when DD_TRACE_RESOURCE_RENAMING_ENABLED is false', () => { process.env.DD_TRACE_RESOURCE_RENAMING_ENABLED = 'false' const config = getConfig() - expect(config).to.have.property('resourceRenamingEnabled', false) + assert.strictEqual(config.resourceRenamingEnabled, false) }) it('should be enabled when appsec is enabled via env var', () => { process.env.DD_APPSEC_ENABLED = 'true' const config = getConfig() - expect(config).to.have.property('resourceRenamingEnabled', true) + assert.strictEqual(config.resourceRenamingEnabled, true) }) it('should be enabled when appsec is enabled via options', () => { const config = getConfig({ appsec: { enabled: true } }) - expect(config).to.have.property('resourceRenamingEnabled', true) + assert.strictEqual(config.resourceRenamingEnabled, true) }) it('should prioritize DD_TRACE_RESOURCE_RENAMING_ENABLED over appsec setting', () => { process.env.DD_APPSEC_ENABLED = 'true' process.env.DD_TRACE_RESOURCE_RENAMING_ENABLED = 'false' const config = getConfig() - expect(config).to.have.property('resourceRenamingEnabled', false) + assert.strictEqual(config.resourceRenamingEnabled, false) }) it('should prioritize DD_TRACE_RESOURCE_RENAMING_ENABLED over appsec option', () => { process.env.DD_TRACE_RESOURCE_RENAMING_ENABLED = 'false' const config = getConfig({ appsec: { enabled: true } }) - expect(config).to.have.property('resourceRenamingEnabled', false) + assert.strictEqual(config.resourceRenamingEnabled, false) }) it('should enable when appsec is enabled via both env and options', () => { process.env.DD_APPSEC_ENABLED = 'true' const config = getConfig({ appsec: { enabled: true } }) - expect(config).to.have.property('resourceRenamingEnabled', true) + assert.strictEqual(config.resourceRenamingEnabled, true) }) it('should remain false when appsec is disabled', () => { process.env.DD_APPSEC_ENABLED = 'false' const config = getConfig() - expect(config).to.have.property('resourceRenamingEnabled', false) + assert.strictEqual(config.resourceRenamingEnabled, false) }) it('should remain false when appsec is disabled via options', () => { const config = getConfig({ appsec: { enabled: false } }) - expect(config).to.have.property('resourceRenamingEnabled', false) + assert.strictEqual(config.resourceRenamingEnabled, false) }) }) @@ -3189,7 +3458,7 @@ rules: it('should return default value', () => { const config = getConfig() - expect(config.getOrigin('appsec.enabled')).to.be.equal('default') + assert.strictEqual(config.getOrigin('appsec.enabled'), 'default') }) it('should return env_var', () => { @@ -3197,7 +3466,7 @@ rules: const config = getConfig() - expect(config.getOrigin('appsec.enabled')).to.be.equal('env_var') + assert.strictEqual(config.getOrigin('appsec.enabled'), 'env_var') }) it('should return code', () => { @@ -3205,7 +3474,7 @@ rules: appsec: true }) - expect(config.getOrigin('appsec.enabled')).to.be.equal('code') + assert.strictEqual(config.getOrigin('appsec.enabled'), 'code') }) }) }) diff --git a/packages/dd-trace/test/datastreams/processor.spec.js b/packages/dd-trace/test/datastreams/processor.spec.js index 1187eec4f1e..93734962706 100644 --- a/packages/dd-trace/test/datastreams/processor.spec.js +++ b/packages/dd-trace/test/datastreams/processor.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') const { hostname } = require('node:os') @@ -221,7 +220,7 @@ describe('DataStreamsProcessor', () => { processor = new DataStreamsProcessor(config) clearTimeout(processor.timer) - expect(DataStreamsWriter).to.be.calledWith({ + sinon.assert.calledWith(DataStreamsWriter, { hostname: config.hostname, port: config.port, url: config.url @@ -294,7 +293,7 @@ describe('DataStreamsProcessor', () => { it('should export on interval', () => { processor.recordCheckpoint(mockCheckpoint) processor.onInterval() - expect(writer.flush).to.be.calledWith({ + sinon.assert.calledWith(writer.flush, { Env: 'test', Service: 'service1', Version: 'v1', diff --git a/packages/dd-trace/test/encode/agentless-ci-visibility.spec.js b/packages/dd-trace/test/encode/agentless-ci-visibility.spec.js index 63338456b1a..4b72dde2473 100644 --- a/packages/dd-trace/test/encode/agentless-ci-visibility.spec.js +++ b/packages/dd-trace/test/encode/agentless-ci-visibility.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { assertObjectContains } = require('../../../../integration-tests/helpers') const { describe, it, beforeEach, afterEach } = require('tap').mocha @@ -152,7 +151,7 @@ describe('agentless-ci-visibility-encode', () => { const buffer = encoder.makePayload() const decodedTrace = msgpack.decode(buffer, { useBigInt64: true }) - expect(decodedTrace) + assert.ok(decodedTrace) const spanEvent = decodedTrace.events[0] assert.strictEqual(spanEvent.content.type.length, MAX_TYPE_LENGTH) assert.strictEqual(spanEvent.content.name.length, MAX_NAME_LENGTH) @@ -180,7 +179,7 @@ describe('agentless-ci-visibility-encode', () => { const buffer = encoder.makePayload() const decodedTrace = msgpack.decode(buffer, { useBigInt64: true }) - expect(decodedTrace) + assert.ok(decodedTrace) const spanEvent = decodedTrace.events[0] assert.strictEqual(spanEvent.content.service, DEFAULT_SERVICE_NAME) assert.strictEqual(spanEvent.content.name, DEFAULT_SPAN_NAME) diff --git a/packages/dd-trace/test/exporters/agent/writer.spec.js b/packages/dd-trace/test/exporters/agent/writer.spec.js index f3a5e885b1f..e483916d791 100644 --- a/packages/dd-trace/test/exporters/agent/writer.spec.js +++ b/packages/dd-trace/test/exporters/agent/writer.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { assertObjectContains } = require('../../../../../integration-tests/helpers') const { describe, it, beforeEach, context } = require('tap').mocha @@ -165,9 +164,12 @@ function describeWriter (protocolVersion) { writer.flush() setTimeout(() => { - expect(log.errorWithoutTelemetry) - .to.have.been.calledWith('Error sending payload to the agent (status code: %s)', - error.status, error) + sinon.assert.calledWith( + log.errorWithoutTelemetry, + 'Error sending payload to the agent (status code: %s)', + error.status, + error + ) done() }) }) diff --git a/packages/dd-trace/test/exporters/common/agent-info-exporter.spec.js b/packages/dd-trace/test/exporters/common/agent-info-exporter.spec.js index bffd57612e8..30b0ddbcf3f 100644 --- a/packages/dd-trace/test/exporters/common/agent-info-exporter.spec.js +++ b/packages/dd-trace/test/exporters/common/agent-info-exporter.spec.js @@ -2,8 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') - const { describe, it } = require('tap').mocha const sinon = require('sinon') const nock = require('nock') @@ -74,7 +72,7 @@ describe('AgentInfoExporter', () => { agentInfoExporter.export(trace) sinon.assert.calledWith(writer.append, trace) sinon.assert.notCalled(writer.flush) - expect(agentInfoExporter.getUncodedTraces()).not.to.include(trace) + assert.ok(!(agentInfoExporter.getUncodedTraces()).includes(trace)) setTimeout(() => { sinon.assert.called(writer.flush) done() diff --git a/packages/dd-trace/test/exporters/log/exporter.spec.js b/packages/dd-trace/test/exporters/log/exporter.spec.js index e79257c690b..dc2fb653599 100644 --- a/packages/dd-trace/test/exporters/log/exporter.spec.js +++ b/packages/dd-trace/test/exporters/log/exporter.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -38,7 +37,7 @@ describe('LogExporter', () => { exporter.export([span, span]) log.restore() const result = `${expectedPrefix}${span.tag}${expectedSuffix}` - expect(log).to.have.calledTwice + sinon.assert.calledTwice(log) sinon.assert.calledWithMatch(log, result) }) diff --git a/packages/dd-trace/test/exporters/span-stats/exporter.spec.js b/packages/dd-trace/test/exporters/span-stats/exporter.spec.js index 85723d7cc34..19bec71261c 100644 --- a/packages/dd-trace/test/exporters/span-stats/exporter.spec.js +++ b/packages/dd-trace/test/exporters/span-stats/exporter.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -34,8 +33,8 @@ describe('span-stats exporter', () => { it('should flush immediately on export', () => { exporter = new Exporter({ url }) - expect(writer.append).to.have.not.been.called - expect(writer.flush).to.have.not.been.called + sinon.assert.notCalled(writer.append) + sinon.assert.notCalled(writer.flush) exporter.export('') diff --git a/packages/dd-trace/test/guardrails/telemetry.spec.js b/packages/dd-trace/test/guardrails/telemetry.spec.js index 792fee8788d..fd502327cd5 100644 --- a/packages/dd-trace/test/guardrails/telemetry.spec.js +++ b/packages/dd-trace/test/guardrails/telemetry.spec.js @@ -93,7 +93,7 @@ describe('sendTelemetry', () => { } function assertStdinMetadata (expected) { - assert.ok(capturedStdinData != null) + assert.ok(capturedStdinData) const parsed = JSON.parse(capturedStdinData) assert.strictEqual(parsed.metadata.result, expected.result) assert.strictEqual(parsed.metadata.result_class, expected.result_class) diff --git a/packages/dd-trace/test/llmobs/index.spec.js b/packages/dd-trace/test/llmobs/index.spec.js index 170f100ab7e..e62056b1f71 100644 --- a/packages/dd-trace/test/llmobs/index.spec.js +++ b/packages/dd-trace/test/llmobs/index.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { channel } = require('dc-polyfill') const { after, afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') @@ -138,8 +137,8 @@ describe('module', () => { site: 'datadoghq.com' }) - expect(LLMObsSpanWriterSpy().setAgentless).to.have.been.calledWith(true) - expect(LLMObsEvalMetricsWriterSpy().setAgentless).to.have.been.calledWith(true) + sinon.assert.calledWith(LLMObsSpanWriterSpy().setAgentless, true) + sinon.assert.calledWith(LLMObsEvalMetricsWriterSpy().setAgentless, true) }) }) }) @@ -152,8 +151,8 @@ describe('module', () => { } }) - expect(LLMObsSpanWriterSpy().setAgentless).to.have.been.calledWith(false) - expect(LLMObsEvalMetricsWriterSpy().setAgentless).to.have.been.calledWith(false) + sinon.assert.calledWith(LLMObsSpanWriterSpy().setAgentless, false) + sinon.assert.calledWith(LLMObsEvalMetricsWriterSpy().setAgentless, false) }) }) @@ -190,8 +189,8 @@ describe('module', () => { site: 'datadoghq.com' }) - expect(LLMObsSpanWriterSpy().setAgentless).to.have.been.calledWith(true) - expect(LLMObsEvalMetricsWriterSpy().setAgentless).to.have.been.calledWith(true) + sinon.assert.calledWith(LLMObsSpanWriterSpy().setAgentless, true) + sinon.assert.calledWith(LLMObsEvalMetricsWriterSpy().setAgentless, true) }) }) @@ -206,8 +205,8 @@ describe('module', () => { it('configures the agent-proxy writers', () => { llmobsModule.enable({ llmobs: { mlApp: 'test' } }) - expect(LLMObsSpanWriterSpy().setAgentless).to.have.been.calledWith(false) - expect(LLMObsEvalMetricsWriterSpy().setAgentless).to.have.been.calledWith(false) + sinon.assert.calledWith(LLMObsSpanWriterSpy().setAgentless, false) + sinon.assert.calledWith(LLMObsEvalMetricsWriterSpy().setAgentless, false) }) }) }) @@ -243,8 +242,8 @@ describe('module', () => { it('configures the agentless writers', () => { llmobsModule.enable({ llmobs: {}, apiKey: 'test', site: 'datadoghq.com' }) - expect(LLMObsSpanWriterSpy().setAgentless).to.have.been.calledWith(true) - expect(LLMObsEvalMetricsWriterSpy().setAgentless).to.have.been.calledWith(true) + sinon.assert.calledWith(LLMObsSpanWriterSpy().setAgentless, true) + sinon.assert.calledWith(LLMObsEvalMetricsWriterSpy().setAgentless, true) }) }) }) @@ -257,7 +256,7 @@ describe('module', () => { evalMetricAppendCh.publish(payload) - expect(LLMObsEvalMetricsWriterSpy().append).to.have.been.calledWith(payload) + sinon.assert.calledWith(LLMObsEvalMetricsWriterSpy().append, payload) }) it('removes all subscribers when disabling', () => { diff --git a/packages/dd-trace/test/llmobs/sdk/index.spec.js b/packages/dd-trace/test/llmobs/sdk/index.spec.js index af502a19341..6217a3ecac4 100644 --- a/packages/dd-trace/test/llmobs/sdk/index.spec.js +++ b/packages/dd-trace/test/llmobs/sdk/index.spec.js @@ -265,7 +265,7 @@ describe('sdk', () => { it.skip('starts a span with a distinct trace id', () => { llmobs.trace({ kind: 'workflow', name: 'test' }, span => { const traceId = LLMObsTagger.tagMap.get(span)['_ml_obs.trace_id'] - assert.ok(traceId != null) + assert.ok(traceId) assert.notStrictEqual(traceId, span.context().toTraceId(true)) }) }) @@ -578,7 +578,7 @@ describe('sdk', () => { const wrappedMyWorkflow = llmobs.wrap({ kind: 'workflow' }, myWorkflow) wrappedMyWorkflow('input', (err, res) => { - assert.ok(err != null) + assert.ok(err) assert.strictEqual(res, 'output') }) @@ -670,7 +670,7 @@ describe('sdk', () => { const span = llmobs._active() const traceId = span.context()._tags['_ml_obs.trace_id'] - assert.ok(traceId != null) + assert.ok(traceId) assert.notStrictEqual(traceId, span.context().toTraceId(true)) }) diff --git a/packages/dd-trace/test/llmobs/span_processor.spec.js b/packages/dd-trace/test/llmobs/span_processor.spec.js index f406eafe194..48502609477 100644 --- a/packages/dd-trace/test/llmobs/span_processor.spec.js +++ b/packages/dd-trace/test/llmobs/span_processor.spec.js @@ -280,7 +280,7 @@ describe('span processor', () => { assert.strictEqual(payload.meta['error.message'], 'error message') assert.strictEqual(payload.meta['error.type'], 'Error') - assert.ok(payload.meta['error.stack'] != null) + assert.ok(payload.meta['error.stack']) assert.strictEqual(payload.status, 'error') assertObjectContains(payload.tags, ['error_type:Error']) diff --git a/packages/dd-trace/test/llmobs/writers/base.spec.js b/packages/dd-trace/test/llmobs/writers/base.spec.js index 2ac8950ee63..22f925c323c 100644 --- a/packages/dd-trace/test/llmobs/writers/base.spec.js +++ b/packages/dd-trace/test/llmobs/writers/base.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { afterEach, beforeEach, describe, it } = require('mocha') const proxyquire = require('proxyquire') const sinon = require('sinon') @@ -267,8 +266,7 @@ describe('BaseLLMObsWriter', () => { sinon.assert.calledWith(clearInterval, writer._periodic) sinon.assert.calledWith(process.removeListener, 'beforeExit', writer._beforeExitHandler) sinon.assert.calledOnce(writer.flush) - expect(logger.debug) - .to.have.been.calledWith('Stopping BaseLLMObsWriter') + sinon.assert.calledWith(logger.debug, 'Stopping BaseLLMObsWriter') }) it('does not destroy more than once', () => { diff --git a/packages/dd-trace/test/openfeature/flagging_provider.spec.js b/packages/dd-trace/test/openfeature/flagging_provider.spec.js index ebcf9bca624..03cc5b6410d 100644 --- a/packages/dd-trace/test/openfeature/flagging_provider.spec.js +++ b/packages/dd-trace/test/openfeature/flagging_provider.spec.js @@ -63,14 +63,14 @@ describe('FlaggingProvider', () => { it('should create exposure channel', () => { const provider = new FlaggingProvider(mockTracer, mockConfig) - assert.ok(provider != null) + assert.ok(provider) sinon.assert.calledWith(channelStub, 'ffe:exposure:submit') }) it('should log debug message on creation', () => { const provider = new FlaggingProvider(mockTracer, mockConfig) - assert.ok(provider != null) + assert.ok(provider) sinon.assert.calledWith(log.debug, 'FlaggingProvider created with timeout: 30000ms') }) }) diff --git a/packages/dd-trace/test/openfeature/flagging_provider_timeout.spec.js b/packages/dd-trace/test/openfeature/flagging_provider_timeout.spec.js index adeec79c0ba..da6741fb9eb 100644 --- a/packages/dd-trace/test/openfeature/flagging_provider_timeout.spec.js +++ b/packages/dd-trace/test/openfeature/flagging_provider_timeout.spec.js @@ -76,7 +76,7 @@ describe('FlaggingProvider Initialization Timeout', () => { }) // Verify initialization is in progress - assert.ok(provider.initController != null) + assert.ok(provider.initController) assert.strictEqual(provider.initController.isInitializing(), true) // Advance time by 30 seconds (default timeout) and run pending promises diff --git a/packages/dd-trace/test/openfeature/writers/exposures.spec.js b/packages/dd-trace/test/openfeature/writers/exposures.spec.js index 7fea6ce05fb..b81d8ddd439 100644 --- a/packages/dd-trace/test/openfeature/writers/exposures.spec.js +++ b/packages/dd-trace/test/openfeature/writers/exposures.spec.js @@ -1,6 +1,6 @@ 'use strict' -const { expect } = require('chai') +const assert = require('node:assert/strict') const { describe, it, beforeEach, afterEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -71,14 +71,14 @@ describe('OpenFeature Exposures Writer', () => { describe('constructor', () => { it('should initialize with correct defaults', () => { - expect(writer._interval).to.equal(1000) - expect(writer._timeout).to.equal(5000) - expect(writer._bufferLimit).to.equal(1000) - expect(writer._buffer).to.be.an('array').that.is.empty + assert.strictEqual(writer._interval, 1000) + assert.strictEqual(writer._timeout, 5000) + assert.strictEqual(writer._bufferLimit, 1000) + assert.deepStrictEqual(writer._buffer, []) }) it('should set up periodic flushing', () => { - expect(writer._periodic).to.exist + assert.ok(writer._periodic) }) }) @@ -90,8 +90,8 @@ describe('OpenFeature Exposures Writer', () => { it('should add exposure event to buffer', () => { writer.append(exposureEvent) - expect(writer._buffer).to.have.length(1) - expect(writer._buffer[0]).to.equal(exposureEvent) + assert.strictEqual(writer._buffer?.length, 1) + assert.strictEqual(writer._buffer[0], exposureEvent) }) it('should track buffer size', () => { @@ -99,7 +99,7 @@ describe('OpenFeature Exposures Writer', () => { writer.append(exposureEvent) - expect(writer._bufferSize).to.be.greaterThan(initialSize) + assert.ok(writer._bufferSize > initialSize) }) it('should drop events when buffer is full', () => { @@ -109,9 +109,9 @@ describe('OpenFeature Exposures Writer', () => { writer.append(exposureEvent) writer.append(exposureEvent) // Should be dropped - expect(writer._buffer).to.have.length(2) - expect(writer._droppedEvents).to.equal(1) - expect(log.warn).to.have.been.calledOnce + assert.strictEqual(writer._buffer?.length, 2) + assert.strictEqual(writer._droppedEvents, 1) + sinon.assert.calledOnce(log.warn) }) it('should drop events exceeding 1MB size limit', () => { @@ -122,11 +122,9 @@ describe('OpenFeature Exposures Writer', () => { writer.append(largeEvent) - expect(writer._buffer).to.have.length(0) - expect(writer._droppedEvents).to.equal(1) - expect(log.warn).to.have.been.calledWith( - sinon.match(/event size[\s\S]*bytes exceeds limit/) - ) + assert.strictEqual(writer._buffer?.length, 0) + assert.strictEqual(writer._droppedEvents, 1) + sinon.assert.calledWith(log.warn, sinon.match(/event size[\s\S]*bytes exceeds limit/)) }) it('should flush when payload would exceed 5MB limit', () => { @@ -137,18 +135,16 @@ describe('OpenFeature Exposures Writer', () => { } writer.append(largeEvent) // First event, buffer = ~2MB - expect(writer._buffer).to.have.length(1) + assert.strictEqual(writer._buffer?.length, 1) writer.append(largeEvent) // Second event, buffer = ~4MB - expect(writer._buffer).to.have.length(2) + assert.strictEqual(writer._buffer?.length, 2) writer.append(largeEvent) // Third event would make buffer ~6MB, should trigger flush - expect(log.debug).to.have.been.calledWith( - sinon.match(/buffer size would exceed .* bytes, flushing first/) - ) + sinon.assert.calledWith(log.debug, sinon.match(/buffer size would exceed .* bytes, flushing first/)) // After flush is triggered, buffer should be cleared and new event added - expect(writer._buffer).to.have.length(1) + assert.strictEqual(writer._buffer?.length, 1) }) it('should buffer events when disabled', () => { @@ -156,9 +152,9 @@ describe('OpenFeature Exposures Writer', () => { writer.append(exposureEvent) - expect(writer._buffer).to.have.length(0) // Event should not be in main buffer - expect(writer._pendingEvents).to.have.length(1) // Should be in pending events - expect(writer._pendingEvents[0].event).to.equal(exposureEvent) + assert.strictEqual(writer._buffer?.length, 0) // Event should not be in main buffer + assert.strictEqual(writer._pendingEvents?.length, 1) // Should be in pending events + assert.strictEqual(writer._pendingEvents[0].event, exposureEvent) }) }) @@ -167,17 +163,17 @@ describe('OpenFeature Exposures Writer', () => { const events = [exposureEvent] const payload = writer.makePayload(events) - expect(payload).to.be.an('object') - expect(payload).to.have.property('context') - expect(payload).to.have.property('exposures') - expect(payload.exposures).to.be.an('array').with.length(1) + assert.ok(payload !== null && typeof payload === 'object' && !Array.isArray(payload)) + assert.ok(Object.hasOwn(payload, 'context')) + assert.ok(Object.hasOwn(payload, 'exposures')) + assert.strictEqual(payload.exposures?.length, 1) }) it('should include service metadata in context', () => { const events = [exposureEvent] const payload = writer.makePayload(events) - expect(payload.context).to.deep.equal({ + assert.deepStrictEqual(payload.context, { service: 'test-service', version: '1.0.0', env: 'test' @@ -189,7 +185,7 @@ describe('OpenFeature Exposures Writer', () => { const payload = writer.makePayload(events) const formattedEvent = payload.exposures[0] - expect(formattedEvent).to.deep.equal({ + assert.deepStrictEqual(formattedEvent, { timestamp: 1672531200000, allocation: { key: 'allocation_123' }, flag: { key: 'test_flag' }, @@ -212,11 +208,11 @@ describe('OpenFeature Exposures Writer', () => { const events = [exposureEvent] const payload = writerWithoutOptionals.makePayload(events) - expect(payload.context).to.deep.equal({ + assert.deepStrictEqual(payload.context, { service: 'test-service' }) - expect(payload.context).to.not.have.property('version') - expect(payload.context).to.not.have.property('env') + assert.ok(!(Object.hasOwn(payload.context, 'version'))) + assert.ok(!(Object.hasOwn(payload.context, 'env'))) }) it('should handle flat format with dot notation', () => { @@ -231,12 +227,12 @@ describe('OpenFeature Exposures Writer', () => { const payload = writer.makePayload([flatEvent]) const formattedEvent = payload.exposures[0] - expect(formattedEvent.allocation.key).to.equal('allocation_123') - expect(formattedEvent.flag.key).to.equal('test_flag') - expect(formattedEvent.variant.key).to.equal('A') - expect(formattedEvent.subject.id).to.equal('user_123') - expect(formattedEvent.subject.type).to.be.undefined - expect(formattedEvent.subject.attributes).to.be.undefined + assert.strictEqual(formattedEvent.allocation.key, 'allocation_123') + assert.strictEqual(formattedEvent.flag.key, 'test_flag') + assert.strictEqual(formattedEvent.variant.key, 'A') + assert.strictEqual(formattedEvent.subject.id, 'user_123') + assert.strictEqual(formattedEvent.subject.type, undefined) + assert.strictEqual(formattedEvent.subject.attributes, undefined) }) }) @@ -248,7 +244,7 @@ describe('OpenFeature Exposures Writer', () => { it('should skip flushing when buffer is empty', () => { writer.flush() - expect(request).to.not.have.been.called + sinon.assert.called(request) }) it('should skip flushing when writer is disabled', () => { @@ -257,7 +253,7 @@ describe('OpenFeature Exposures Writer', () => { writer.flush() - expect(request).to.not.have.been.called + sinon.assert.called(request) }) it('should flush events to agent via EVP proxy', () => { @@ -265,31 +261,31 @@ describe('OpenFeature Exposures Writer', () => { writer.flush() - expect(request).to.have.been.calledOnce + sinon.assert.calledOnce(request) const [payload, options] = request.getCall(0).args - expect(options.method).to.equal('POST') - expect(options.path).to.include('/evp_proxy/v2/') - expect(options.headers['Content-Type']).to.equal('application/json') - expect(options.headers['X-Datadog-EVP-Subdomain']).to.equal('event-platform-intake') + assert.strictEqual(options.method, 'POST') + assert.match(options.path, /\/evp_proxy\/v2\//) + assert.strictEqual(options.headers['Content-Type'], 'application/json') + assert.strictEqual(options.headers['X-Datadog-EVP-Subdomain'], 'event-platform-intake') const parsedPayload = JSON.parse(payload) - expect(parsedPayload).to.be.an('object') - expect(parsedPayload).to.have.property('context') - expect(parsedPayload).to.have.property('exposures') - expect(parsedPayload.exposures).to.be.an('array').with.length(1) - expect(parsedPayload.exposures[0].timestamp).to.exist - expect(parsedPayload.context.service).to.equal('test-service') + assert.ok(parsedPayload !== null && typeof parsedPayload === 'object' && !Array.isArray(parsedPayload)) + assert.ok(Object.hasOwn(parsedPayload, 'context')) + assert.ok(Object.hasOwn(parsedPayload, 'exposures')) + assert.strictEqual(parsedPayload.exposures?.length, 1) + assert.ok(parsedPayload.exposures[0].timestamp) + assert.strictEqual(parsedPayload.context.service, 'test-service') }) it('should empty buffer after flushing', () => { writer.append(exposureEvent) - expect(writer._buffer).to.have.length(1) + assert.strictEqual(writer._buffer?.length, 1) writer.flush() - expect(writer._buffer).to.have.length(0) - expect(writer._bufferSize).to.equal(0) + assert.strictEqual(writer._buffer?.length, 0) + assert.strictEqual(writer._bufferSize, 0) }) it('should log errors on request failure', (done) => { @@ -299,7 +295,7 @@ describe('OpenFeature Exposures Writer', () => { writer.flush() clock.tickAsync(0).then(() => { - expect(log.error).to.have.been.calledOnce + sinon.assert.calledOnce(log.error) done() }) }) @@ -309,7 +305,7 @@ describe('OpenFeature Exposures Writer', () => { writer.flush() - expect(log.debug).to.have.been.called + sinon.assert.called(log.debug) }) it('should warn on non-2xx response', (done) => { @@ -319,7 +315,7 @@ describe('OpenFeature Exposures Writer', () => { writer.flush() clock.tickAsync(0).then(() => { - expect(log.warn).to.have.been.calledOnce + sinon.assert.calledOnce(log.warn) done() }) }) @@ -335,13 +331,13 @@ describe('OpenFeature Exposures Writer', () => { clock.tick(1000) // Advance by flush interval - expect(request).to.have.been.calledOnce + sinon.assert.calledOnce(request) }) it('should not flush empty buffer periodically', () => { clock.tick(1000) - expect(request).to.not.have.been.called + sinon.assert.called(request) }) }) @@ -351,7 +347,7 @@ describe('OpenFeature Exposures Writer', () => { writer.destroy() - expect(clearIntervalSpy).to.have.been.calledOnce + sinon.assert.calledOnce(clearIntervalSpy) clearIntervalSpy.restore() }) @@ -361,7 +357,7 @@ describe('OpenFeature Exposures Writer', () => { writer.destroy() - expect(request).to.have.been.calledOnce + sinon.assert.calledOnce(request) }) it('should log dropped events count', () => { @@ -369,16 +365,14 @@ describe('OpenFeature Exposures Writer', () => { writer.destroy() - expect(log.warn).to.have.been.calledWith( - sinon.match(/dropped 5 events/) - ) + sinon.assert.calledWith(log.warn, sinon.match(/dropped 5 events/)) }) it('should prevent multiple destruction', () => { writer.destroy() writer.destroy() // Should not throw or cause issues - expect(writer._destroyed).to.be.true + assert.strictEqual(writer._destroyed, true) }) }) }) diff --git a/packages/dd-trace/test/opentelemetry/logs.spec.js b/packages/dd-trace/test/opentelemetry/logs.spec.js index 37ad96d83d1..5c3b226cdb3 100644 --- a/packages/dd-trace/test/opentelemetry/logs.spec.js +++ b/packages/dd-trace/test/opentelemetry/logs.spec.js @@ -496,7 +496,7 @@ describe('OpenTelemetry Logs', () => { const { loggerProvider } = setupTracer() assert.strictEqual(loggerProvider.processor.exporter.transformer.protocol, 'http/protobuf') - assert(logMock.getMessage().includes('OTLP gRPC protocol is not supported')) + assert.match(logMock.getMessage(), /OTLP gRPC protocol is not supported/) logMock.restore() }) diff --git a/packages/dd-trace/test/opentelemetry/span.spec.js b/packages/dd-trace/test/opentelemetry/span.spec.js index 8062148ace3..292e8481b64 100644 --- a/packages/dd-trace/test/opentelemetry/span.spec.js +++ b/packages/dd-trace/test/opentelemetry/span.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it } = require('tap').mocha const sinon = require('sinon') const { performance } = require('perf_hooks') @@ -494,13 +493,13 @@ describe('OTel Span', () => { processor.onEnd = sinon.stub() tracerProvider.addSpanProcessor(processor) - expect(processor.onStart).to.have.not.been.called - expect(processor.onEnd).to.have.not.been.called + sinon.assert.notCalled(processor.onStart) + sinon.assert.notCalled(processor.onEnd) const span = tracer.startSpan('name') sinon.assert.calledWith(processor.onStart, span, span._context) - expect(processor.onEnd).to.have.not.been.called + sinon.assert.notCalled(processor.onEnd) span.end() diff --git a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js index 9c8fab6151e..42ed4e96a51 100644 --- a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js +++ b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { assertObjectContains } = require('../../../../../integration-tests/helpers') const { describe, it, beforeEach } = require('tap').mocha @@ -438,7 +437,7 @@ describe('TextMapPropagator', () => { propagator.inject(undefined, carrier) sinon.assert.calledWith(tracerMetrics.count, 'context_header_style.injected', ['header_style:baggage']) - expect(tracerMetrics.count().inc).to.have.been.called + sinon.assert.called(tracerMetrics.count().inc) assert.strictEqual(carrier.baggage, 'test-key=test-value') }) @@ -458,7 +457,7 @@ describe('TextMapPropagator', () => { 'context_header.truncated', ['truncation_reason:baggage_item_count_exceeded'] ) - expect(tracerMetrics.count().inc).to.have.been.called + sinon.assert.called(tracerMetrics.count().inc) // Restore original config config.baggageMaxItems = originalMaxItems @@ -478,7 +477,7 @@ describe('TextMapPropagator', () => { 'context_header.truncated', ['truncation_reason:baggage_byte_count_exceeded'] ) - expect(tracerMetrics.count().inc).to.have.been.called + sinon.assert.called(tracerMetrics.count().inc) // Restore original config config.baggageMaxBytes = originalMaxBytes @@ -977,7 +976,7 @@ describe('TextMapPropagator', () => { propagator.extract(carrier) sinon.assert.calledWith(tracerMetrics.count, 'context_header_style.extracted', ['header_style:baggage']) - expect(tracerMetrics.count().inc).to.have.been.called + sinon.assert.called(tracerMetrics.count().inc) assert.strictEqual(getBaggageItem('test-key'), 'test-value') }) @@ -991,7 +990,7 @@ describe('TextMapPropagator', () => { propagator.extract(carrier) sinon.assert.calledWith(tracerMetrics.count, 'context_header_style.malformed', ['header_style:baggage']) - expect(tracerMetrics.count().inc).to.have.been.called + sinon.assert.called(tracerMetrics.count().inc) assert.deepStrictEqual(getAllBaggageItems(), {}) }) }) diff --git a/packages/dd-trace/test/plugins/util/ip_extractor.spec.js b/packages/dd-trace/test/plugins/util/ip_extractor.spec.js index 1ca2b02ff07..9259e166b30 100644 --- a/packages/dd-trace/test/plugins/util/ip_extractor.spec.js +++ b/packages/dd-trace/test/plugins/util/ip_extractor.spec.js @@ -8,6 +8,7 @@ const http = require('node:http') require('../../setup/core') const { extractIp } = require('../../../src/plugins/util/ip_extractor') +const { assertObjectContains } = require('../../../../../integration-tests/helpers') describe('ip extractor', () => { let port, appListener, controller @@ -193,7 +194,7 @@ describe('ip extractor', () => { controller = function (req) { const ip = extractIp({}, req) try { - assert.ok(['::1', '127.0.0.1'].includes(ip)) + assertObjectContains(['::1', '127.0.0.1'], [ip]) done() } catch (e) { done(e) @@ -219,7 +220,7 @@ describe('ip extractor', () => { controller = function (req) { const ip = extractIp({}, req) try { - assert.ok(['::1', '127.0.0.1'].includes(ip)) + assertObjectContains(['::1', '127.0.0.1'], [ip]) done() } catch (e) { done(e) diff --git a/packages/dd-trace/test/priority_sampler.spec.js b/packages/dd-trace/test/priority_sampler.spec.js index bb80dce4e6a..977ecc01dba 100644 --- a/packages/dd-trace/test/priority_sampler.spec.js +++ b/packages/dd-trace/test/priority_sampler.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -533,7 +532,7 @@ describe('PrioritySampler', () => { PrioritySampler.keepTrace(span, SAMPLING_MECHANISM_APPSEC) - expect(setPriority).to.be.calledOnceWithExactly(span, USER_KEEP, SAMPLING_MECHANISM_APPSEC) + sinon.assert.calledOnceWithExactly(setPriority, span, USER_KEEP, SAMPLING_MECHANISM_APPSEC) }) }) }) diff --git a/packages/dd-trace/test/process-tags.spec.js b/packages/dd-trace/test/process-tags.spec.js index ac014f24008..754723779f1 100644 --- a/packages/dd-trace/test/process-tags.spec.js +++ b/packages/dd-trace/test/process-tags.spec.js @@ -1,6 +1,7 @@ 'use strict' -const { expect } = require('chai') +const assert = require('node:assert/strict') +const { assertObjectContains } = require('../../../integration-tests/helpers') const { describe, it, beforeEach, afterEach } = require('tap').mocha require('./setup/core') @@ -13,59 +14,56 @@ describe('process-tags', () => { it('should return an object with tags and serialized properties', () => { const result = getProcessTags() - expect(result).to.have.property('tags') - expect(result).to.have.property('serialized') - expect(result.tags).to.be.an('array') - expect(result.serialized).to.be.a('string') + assert.ok(Object.hasOwn(result, 'tags')) + assert.ok(Object.hasOwn(result, 'serialized')) + assert.ok(Array.isArray(result.tags)) + assert.strictEqual(typeof result.serialized, 'string') }) it('should include all expected tag names', () => { const result = getProcessTags() - const tagNames = result.tags.map(([name]) => name) + const tagNames = result.tags.map(([name]) => name).sort() - expect(tagNames).to.include('entrypoint.basedir') - expect(tagNames).to.include('entrypoint.name') - expect(tagNames).to.include('entrypoint.type') - expect(tagNames).to.include('entrypoint.workdir') - expect(tagNames).to.include('package.json.name') + assertObjectContains( + tagNames, + [ + 'entrypoint.basedir', + 'entrypoint.name', + 'entrypoint.type', + 'entrypoint.workdir', + 'package.json.name' + ] + ) }) it('should have entrypoint.type set to "script"', () => { const result = getProcessTags() const typeTag = result.tags.find(([name]) => name === 'entrypoint.type') - expect(typeTag).to.exist - expect(typeTag[1]).to.equal('script') + assert.ok(Array.isArray(typeTag)) + assert.strictEqual(typeTag[1], 'script') }) it('should set entrypoint.workdir to the basename of cwd', () => { const result = getProcessTags() const workdirTag = result.tags.find(([name]) => name === 'entrypoint.workdir') - expect(workdirTag).to.exist - expect(workdirTag[1]).to.be.a('string') - expect(workdirTag[1]).to.not.include('/') + assert.ok(Array.isArray(workdirTag)) + assert.strictEqual(typeof workdirTag[1], 'string') + assert.doesNotMatch(workdirTag[1], /\//) }) // note that these tests may fail if the tracer folder structure changes - it('should set sensible values based on tracer project structure', () => { + it('should set sensible values based on tracer project structure and be sorted alphabetically', () => { const result = getProcessTags() - expect(result.tags.find(([name]) => name === 'entrypoint.basedir')[1]).to.equal('test') - expect(result.tags.find(([name]) => name === 'entrypoint.name')[1]).to.equal('process-tags.spec') - expect(result.tags.find(([name]) => name === 'entrypoint.type')[1]).to.equal('script') - expect(result.tags.find(([name]) => name === 'entrypoint.workdir')[1]).to.equal('dd-trace-js') - expect(result.tags.find(([name]) => name === 'package.json.name')[1]).to.equal('dd-trace') - }) - - it('should sort tags alphabetically', () => { - const result = getProcessTags() - - expect(result.tags[0][0]).to.equal('entrypoint.basedir') - expect(result.tags[1][0]).to.equal('entrypoint.name') - expect(result.tags[2][0]).to.equal('entrypoint.type') - expect(result.tags[3][0]).to.equal('entrypoint.workdir') - expect(result.tags[4][0]).to.equal('package.json.name') + assert.deepStrictEqual(result.tags, [ + ['entrypoint.basedir', 'test'], + ['entrypoint.name', 'process-tags.spec'], + ['entrypoint.type', 'script'], + ['entrypoint.workdir', 'dd-trace-js'], + ['package.json.name', 'dd-trace'], + ]) }) it('should serialize tags correctly', () => { @@ -74,10 +72,10 @@ describe('process-tags', () => { // serialized should be comma-separated and not include undefined values if (result.serialized) { const parts = result.serialized.split(',') - expect(parts.length).to.be.greaterThan(0) + assert.ok(parts.length > 0) parts.forEach(part => { - expect(part).to.include(':') - expect(part).to.not.include('undefined') + assert.match(part, /:/) + assert.doesNotMatch(part, /undefined/) }) } }) @@ -93,7 +91,7 @@ describe('process-tags', () => { const result = serialize(tags) - expect(result).to.equal('tag1:value1,tag2:value2,tag3:value3') + assert.strictEqual(result, 'tag1:value1,tag2:value2,tag3:value3') }) it('should filter out tags with undefined values', () => { @@ -106,8 +104,8 @@ describe('process-tags', () => { const result = serialize(tags) - expect(result).to.equal('tag1:value1,tag3:value3') - expect(result).to.not.include('undefined') + assert.strictEqual(result, 'tag1:value1,tag3:value3') + assert.doesNotMatch(result, /undefined/) }) it('should sanitize tag values', () => { @@ -119,7 +117,7 @@ describe('process-tags', () => { const result = serialize(tags) - expect(result).to.equal('tag1:value_with_spaces,tag2:uppercase,tag3:special_chars_') + assert.strictEqual(result, 'tag1:value_with_spaces,tag2:uppercase,tag3:special_chars_') }) it('should return empty string when all values are undefined', () => { @@ -130,13 +128,13 @@ describe('process-tags', () => { const result = serialize(tags) - expect(result).to.equal('') + assert.strictEqual(result, '') }) it('should handle empty tags array', () => { const result = serialize([]) - expect(result).to.equal('') + assert.strictEqual(result, '') }) it('should handle numeric values', () => { @@ -147,7 +145,7 @@ describe('process-tags', () => { const result = serialize(tags) - expect(result).to.equal('tag1:123,tag2:456') + assert.strictEqual(result, 'tag1:123,tag2:456') }) it('should handle mixed defined and undefined values', () => { @@ -161,107 +159,110 @@ describe('process-tags', () => { const result = serialize(tags) - expect(result).to.equal('tag1:value1,tag3:value3,tag5:value5') + assert.strictEqual(result, 'tag1:value1,tag3:value3,tag5:value5') }) }) describe('sanitize', () => { it('should convert to lowercase', () => { - expect(sanitize('UPPERCASE')).to.equal('uppercase') - expect(sanitize('MixedCase')).to.equal('mixedcase') - expect(sanitize('CamelCase')).to.equal('camelcase') + assert.strictEqual(sanitize('UPPERCASE'), 'uppercase') + assert.strictEqual(sanitize('MixedCase'), 'mixedcase') + assert.strictEqual(sanitize('CamelCase'), 'camelcase') }) it('should replace spaces with underscores', () => { - expect(sanitize('hello world')).to.equal('hello_world') - expect(sanitize('multiple spaces')).to.equal('multiple_spaces') + assert.strictEqual(sanitize('hello world'), 'hello_world') + assert.strictEqual(sanitize('multiple spaces'), 'multiple_spaces') }) it('should replace special characters with underscores', () => { - expect(sanitize('hello@world')).to.equal('hello_world') - expect(sanitize('hello!world')).to.equal('hello_world') - expect(sanitize('hello#world')).to.equal('hello_world') - expect(sanitize('hello$world')).to.equal('hello_world') - expect(sanitize('hello%world')).to.equal('hello_world') - expect(sanitize('hello&world')).to.equal('hello_world') - expect(sanitize('hello*world')).to.equal('hello_world') + assert.strictEqual(sanitize('hello@world'), 'hello_world') + assert.strictEqual(sanitize('hello!world'), 'hello_world') + assert.strictEqual(sanitize('hello#world'), 'hello_world') + assert.strictEqual(sanitize('hello$world'), 'hello_world') + assert.strictEqual(sanitize('hello%world'), 'hello_world') + assert.strictEqual(sanitize('hello&world'), 'hello_world') + assert.strictEqual(sanitize('hello*world'), 'hello_world') }) it('should preserve forward slashes', () => { - expect(sanitize('path/to/file')).to.equal('path/to/file') - expect(sanitize('foo/bar/baz')).to.equal('foo/bar/baz') + assert.strictEqual(sanitize('path/to/file'), 'path/to/file') + assert.strictEqual(sanitize('foo/bar/baz'), 'foo/bar/baz') }) it('should preserve underscores', () => { - expect(sanitize('hello_world')).to.equal('hello_world') - expect(sanitize('foo_bar_baz')).to.equal('foo_bar_baz') + assert.strictEqual(sanitize('hello_world'), 'hello_world') + assert.strictEqual(sanitize('foo_bar_baz'), 'foo_bar_baz') }) it('should preserve dots', () => { - expect(sanitize('file.txt')).to.equal('file.txt') - expect(sanitize('my.package.name')).to.equal('my.package.name') + assert.strictEqual(sanitize('file.txt'), 'file.txt') + assert.strictEqual(sanitize('my.package.name'), 'my.package.name') }) it('should preserve hyphens', () => { - expect(sanitize('my-package')).to.equal('my-package') - expect(sanitize('foo-bar-baz')).to.equal('foo-bar-baz') + assert.strictEqual(sanitize('my-package'), 'my-package') + assert.strictEqual(sanitize('foo-bar-baz'), 'foo-bar-baz') }) it('should preserve alphanumeric characters', () => { - expect(sanitize('abc123')).to.equal('abc123') - expect(sanitize('ABC123')).to.equal('abc123') - expect(sanitize('test123abc')).to.equal('test123abc') + assert.strictEqual(sanitize('abc123'), 'abc123') + assert.strictEqual(sanitize('ABC123'), 'abc123') + assert.strictEqual(sanitize('test123abc'), 'test123abc') }) it('should handle multiple consecutive special characters', () => { - expect(sanitize('hello!!!world')).to.equal('hello_world') - expect(sanitize('foo@@@bar')).to.equal('foo_bar') - expect(sanitize('test spaces')).to.equal('test_spaces') + assert.strictEqual(sanitize('hello!!!world'), 'hello_world') + assert.strictEqual(sanitize('foo@@@bar'), 'foo_bar') + assert.strictEqual(sanitize('test spaces'), 'test_spaces') }) it('should handle complex combinations', () => { - expect(sanitize('My-Package_Name/v1.2.3')).to.equal('my-package_name/v1.2.3') - expect(sanitize('foo@bar#baz.txt')).to.equal('foo_bar_baz.txt') - expect(sanitize('Test File (Copy).js')).to.equal('test_file_copy_.js') + assert.strictEqual(sanitize('My-Package_Name/v1.2.3'), 'my-package_name/v1.2.3') + assert.strictEqual(sanitize('foo@bar#baz.txt'), 'foo_bar_baz.txt') + assert.strictEqual(sanitize('Test File (Copy).js'), 'test_file_copy_.js') }) it('should convert non-string values to strings first', () => { - expect(sanitize(123)).to.equal('123') - expect(sanitize(true)).to.equal('true') - expect(sanitize(false)).to.equal('false') + // @ts-expect-error: intentionally passing invalid types to test robustness + assert.strictEqual(sanitize(123), '123') + // @ts-expect-error: intentionally passing invalid types to test robustness + assert.strictEqual(sanitize(true), 'true') + // @ts-expect-error: intentionally passing invalid types to test robustness + assert.strictEqual(sanitize(false), 'false') }) it('should handle empty string', () => { - expect(sanitize('')).to.equal('') + assert.strictEqual(sanitize(''), '') }) it('should handle strings with only special characters', () => { - expect(sanitize('!!!')).to.equal('_') - expect(sanitize('@@@')).to.equal('_') - expect(sanitize(' ')).to.equal('_') + assert.strictEqual(sanitize('!!!'), '_') + assert.strictEqual(sanitize('@@@'), '_') + assert.strictEqual(sanitize(' '), '_') }) it('should handle unicode characters', () => { - expect(sanitize('hello™world')).to.equal('hello_world') - expect(sanitize('café')).to.equal('caf_') - expect(sanitize('日本語')).to.equal('_') + assert.strictEqual(sanitize('hello™world'), 'hello_world') + assert.strictEqual(sanitize('café'), 'caf_') + assert.strictEqual(sanitize('日本語'), '_') }) it('should handle brackets and parentheses', () => { - expect(sanitize('func()')).to.equal('func_') - expect(sanitize('array[0]')).to.equal('array_0_') - expect(sanitize('{object}')).to.equal('_object_') + assert.strictEqual(sanitize('func()'), 'func_') + assert.strictEqual(sanitize('array[0]'), 'array_0_') + assert.strictEqual(sanitize('{object}'), '_object_') }) it('should handle quotes and backticks', () => { - expect(sanitize('"quoted"')).to.equal('_quoted_') - expect(sanitize("'quoted'")).to.equal('_quoted_') - expect(sanitize('`backtick`')).to.equal('_backtick_') + assert.strictEqual(sanitize('"quoted"'), '_quoted_') + assert.strictEqual(sanitize("'quoted'"), '_quoted_') + assert.strictEqual(sanitize('`backtick`'), '_backtick_') }) it('should preserve allowed characters in combination', () => { - expect(sanitize('my_file-v1.0/test.js')).to.equal('my_file-v1.0/test.js') - expect(sanitize('package_name-2.4.6/lib/index.js')).to.equal('package_name-2.4.6/lib/index.js') + assert.strictEqual(sanitize('my_file-v1.0/test.js'), 'my_file-v1.0/test.js') + assert.strictEqual(sanitize('package_name-2.4.6/lib/index.js'), 'package_name-2.4.6/lib/index.js') }) }) @@ -287,15 +288,14 @@ describe('process-tags', () => { getConfig = require('../src/config') const config = getConfig() - expect(config.propagateProcessTags).to.exist - expect(config.propagateProcessTags.enabled).to.equal(true) + assert.ok(config.propagateProcessTags) + assert.strictEqual(config.propagateProcessTags.enabled, true) SpanProcessor = require('../src/span_processor') const processor = new SpanProcessor(undefined, undefined, config) - expect(processor._processTags).to.be.a('string') - expect(processor._processTags).to.not.be.false - expect(processor._processTags).to.include('entrypoint') + assert.ok(typeof processor._processTags === 'string') + assert.match(processor._processTags, /entrypoint/) }) it('should disable process tags propagation when set to false', () => { @@ -304,13 +304,13 @@ describe('process-tags', () => { getConfig = require('../src/config') const config = getConfig() - expect(config.propagateProcessTags).to.exist - expect(config.propagateProcessTags.enabled).to.equal(false) + assert.ok(config.propagateProcessTags) + assert.strictEqual(config.propagateProcessTags.enabled, false) SpanProcessor = require('../src/span_processor') const processor = new SpanProcessor(undefined, undefined, config) - expect(processor._processTags).to.equal(false) + assert.strictEqual(processor._processTags, false) }) it('should disable process tags propagation when not set', () => { @@ -319,12 +319,12 @@ describe('process-tags', () => { getConfig = require('../src/config') const config = getConfig() - expect(config.propagateProcessTags?.enabled).to.not.equal(true) + assert.notStrictEqual(config.propagateProcessTags?.enabled, true) SpanProcessor = require('../src/span_processor') const processor = new SpanProcessor(undefined, undefined, config) - expect(processor._processTags).to.equal(false) + assert.strictEqual(processor._processTags, false) }) }) }) diff --git a/packages/dd-trace/test/profiling/exporters/agent.spec.js b/packages/dd-trace/test/profiling/exporters/agent.spec.js index 25adf1794c5..8d59b71b9af 100644 --- a/packages/dd-trace/test/profiling/exporters/agent.spec.js +++ b/packages/dd-trace/test/profiling/exporters/agent.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -132,8 +131,8 @@ describe('exporters/agent', function () { const wallProfile = Profile.decode(req.files[1].buffer) const spaceProfile = Profile.decode(req.files[2].buffer) - expect(wallProfile).to.be.a.profile - expect(spaceProfile).to.be.a.profile + assertIsProfile(wallProfile) + assertIsProfile(spaceProfile) assert.deepStrictEqual(wallProfile, Profile.decode(profiles.wall)) assert.deepStrictEqual(spaceProfile, Profile.decode(profiles.space)) @@ -490,3 +489,56 @@ describe('exporters/agent', function () { }) }, { skip: os.platform() === 'win32' }) }) + +function assertIsProfile (obj, msg) { + assert.ok(typeof obj === 'object' && obj !== null, msg) + assert.strictEqual(typeof obj.timeNanos, 'bigint', msg) + assert.ok(typeof obj.period === 'number' || typeof obj.period === 'bigint', msg) + + assertIsValueType(obj.periodType, msg) + + assert.ok(Array.isArray(obj.sampleType), msg) + assert.strictEqual(obj.sampleType.length, 2, msg) + assert.ok(Array.isArray(obj.sample), msg) + assert.ok(Array.isArray(obj.location), msg) + assert.ok(Array.isArray(obj.function), msg) + + assert.ok(typeof obj.stringTable === 'object' && obj.stringTable !== null, msg) + assert.ok(Array.isArray(obj.stringTable.strings), msg) + assert.ok(obj.stringTable.strings.length >= 1, msg) + assert.strictEqual(obj.stringTable.strings[0], '', msg) + + for (const sampleType of obj.sampleType) { + assertIsValueType(sampleType, msg) + } + + for (const fn of obj.function) { + assert.strictEqual(typeof fn.filename, 'number', msg) + assert.strictEqual(typeof fn.systemName, 'number', msg) + assert.strictEqual(typeof fn.name, 'number', msg) + assert.ok(Number.isSafeInteger(fn.id), msg) + } + + for (const location of obj.location) { + assert.ok(Number.isSafeInteger(location.id), msg) + assert.ok(Array.isArray(location.line), msg) + + for (const line of location.line) { + assert.ok(Number.isSafeInteger(line.functionId), msg) + assert.strictEqual(typeof line.line, 'number', msg) + } + } + + for (const sample of obj.sample) { + assert.ok(Array.isArray(sample.locationId), msg) + assert.ok(sample.locationId.length >= 1, msg) + assert.ok(Array.isArray(sample.value), msg) + assert.strictEqual(sample.value.length, obj.sampleType.length, msg) + } + + function assertIsValueType (valueType, msg) { + assert.ok(typeof valueType === 'object' && valueType !== null, msg) + assert.strictEqual(typeof valueType.type, 'number', msg) + assert.strictEqual(typeof valueType.unit, 'number', msg) + } +} diff --git a/packages/dd-trace/test/profiling/profiler.spec.js b/packages/dd-trace/test/profiling/profiler.spec.js index cdd3d0a069d..683ba2e32d4 100644 --- a/packages/dd-trace/test/profiling/profiler.spec.js +++ b/packages/dd-trace/test/profiling/profiler.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -239,8 +238,8 @@ describe('profiler', function () { assert.ok(Object.hasOwn(profiles, 'space')) assert.ok(profiles.space instanceof Buffer) assert.strictEqual(profiles.space.indexOf(magicBytes), 0) - expect(start).to.be.a('date') - expect(end).to.be.a('date') + assert.ok((start) instanceof Date) + assert.ok((end) instanceof Date) assert.strictEqual(end - start, 65000) assert.strictEqual(tags.foo, 'foo') } @@ -311,8 +310,8 @@ describe('profiler', function () { await waitForExport() const { start, end } = exporter.export.args[0][0] - expect(start).to.be.a('date') - expect(end).to.be.a('date') + assert.ok((start) instanceof Date) + assert.ok((end) instanceof Date) assert.strictEqual(end - start, 65000) sinon.assert.calledOnce(exporter.export) @@ -324,8 +323,8 @@ describe('profiler', function () { const { start: start2, end: end2 } = exporter.export.args[0][0] assert.ok(start2 >= end) - expect(start2).to.be.a('date') - expect(end2).to.be.a('date') + assert.ok((start2) instanceof Date) + assert.ok((end2) instanceof Date) assert.strictEqual(end2 - start2, 65000) sinon.assert.calledOnce(exporter.export) diff --git a/packages/dd-trace/test/remote_config/manager.spec.js b/packages/dd-trace/test/remote_config/manager.spec.js index f6a87214c74..35f7ac6eeed 100644 --- a/packages/dd-trace/test/remote_config/manager.spec.js +++ b/packages/dd-trace/test/remote_config/manager.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -87,7 +86,7 @@ describe('RemoteConfigManager', () => { assert.strictEqual(secondArg, 5e3) firstArg(noop) - expect(rc.poll).to.have.calledOnceWithExactly(noop) + sinon.assert.calledOnceWithExactly(rc.poll, noop) assert.strictEqual(rc.scheduler, scheduler) @@ -276,8 +275,7 @@ describe('RemoteConfigManager', () => { rc.poll(() => { sinon.assert.calledOnceWithMatch(request, payload, expectedPayload) sinon.assert.calledOnceWithExactly(rc.parseConfig, { a: 'b' }) - expect(log.error).to.have.been - .calledOnceWithExactly('[RC] Could not parse remote config response', error) + sinon.assert.calledOnceWithExactly(log.error, '[RC] Could not parse remote config response', error) assert.strictEqual(rc.state.client.state.has_error, true) assert.strictEqual(rc.state.client.state.error, 'Error: Unable to parse config') diff --git a/packages/dd-trace/test/service-naming/schema.spec.js b/packages/dd-trace/test/service-naming/schema.spec.js index 23f06a7a15a..2845e357148 100644 --- a/packages/dd-trace/test/service-naming/schema.spec.js +++ b/packages/dd-trace/test/service-naming/schema.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('tap').mocha const sinon = require('sinon') @@ -81,7 +80,7 @@ describe('Service naming', () => { describe('Operation name getter', () => { it('should passthrough operation name arguments', () => { resolver.getOpName('messaging', 'inbound', 'kafka', extra) - expect(dummySchema.messaging.inbound.kafka.opName).to.be.calledWith(extra) + sinon.assert.calledWith(dummySchema.messaging.inbound.kafka.opName, extra) }) }) @@ -89,7 +88,7 @@ describe('Service naming', () => { it('should add service name and passthrough service name arguments', () => { const opts = { tracerService: 'test-service', ...extra } resolver.getServiceName('messaging', 'inbound', 'kafka', opts) - expect(dummySchema.messaging.inbound.kafka.serviceName).to.be.calledWith(opts) + sinon.assert.calledWith(dummySchema.messaging.inbound.kafka.serviceName, opts) }) }) }) diff --git a/packages/dd-trace/test/setup/core.js b/packages/dd-trace/test/setup/core.js index 2d8381bab14..41507f6726b 100644 --- a/packages/dd-trace/test/setup/core.js +++ b/packages/dd-trace/test/setup/core.js @@ -1,11 +1,5 @@ 'use strict' -const chai = require('chai') -const sinonChai = require('sinon-chai') - -chai.use(sinonChai) -chai.use(require('../asserts/profile')) - process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = 'false' // If this is a release PR, set the SSI variables. diff --git a/packages/dd-trace/test/span_format.spec.js b/packages/dd-trace/test/span_format.spec.js index b2badbfee9a..73a44b5ea8f 100644 --- a/packages/dd-trace/test/span_format.spec.js +++ b/packages/dd-trace/test/span_format.spec.js @@ -2,8 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') - const { assertObjectContains } = require('../../../integration-tests/helpers') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') @@ -311,13 +309,8 @@ describe('spanFormat', () => { } trace = spanFormat(span, false) - expect(trace.meta).to.not.include({ - chunk: 'test' - }) - - expect(trace.metrics).to.not.include({ - count: 1 - }) + assert.ok(!('chunk' in trace.meta)) + assert.ok(!('count' in trace.metrics)) }) it('should extract empty tags', () => { diff --git a/packages/dd-trace/test/telemetry/dependencies.spec.js b/packages/dd-trace/test/telemetry/dependencies.spec.js index d3951d7ba26..ed76b51a5e2 100644 --- a/packages/dd-trace/test/telemetry/dependencies.spec.js +++ b/packages/dd-trace/test/telemetry/dependencies.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -163,8 +162,14 @@ describe('dependencies', () => { { name: request, version: packageVersion } ] } - expect(sendData) - .to.have.been.calledOnceWith(config, application, host, 'app-dependencies-loaded', expectedDependencies) + sinon.assert.calledOnceWithMatch( + sendData, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies + ) }) it('should call sendData with computed request from file path when it does not come in message', () => { @@ -178,8 +183,14 @@ describe('dependencies', () => { { name: request, version: packageVersion } ] } - expect(sendData) - .to.have.been.calledOnceWith(config, application, host, 'app-dependencies-loaded', expectedDependencies) + sinon.assert.calledOnceWithMatch( + sendData, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies + ) }) it('should call sendData with computed request from filename with scope when it does not come in message', () => { @@ -194,8 +205,14 @@ describe('dependencies', () => { { name: request, version: packageVersion } ] } - expect(sendData) - .to.have.been.calledOnceWith(config, application, host, 'app-dependencies-loaded', expectedDependencies) + sinon.assert.calledOnceWithMatch( + sendData, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies + ) }) it('should only include one copy of each dependency, regardless of how many of its files are loaded', () => { @@ -211,8 +228,14 @@ describe('dependencies', () => { { name: moduleName, version: packageVersion } ] } - expect(sendData) - .to.have.been.calledOnceWith(config, application, host, 'app-dependencies-loaded', expectedDependencies) + sinon.assert.calledOnceWithMatch( + sendData, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies + ) }) it('should include two dependencies when they are in different paths', () => { @@ -246,11 +269,23 @@ describe('dependencies', () => { } sinon.assert.calledTwice(sendData) - expect(sendData.firstCall) - .to.have.been.calledWith(config, application, host, 'app-dependencies-loaded', expectedDependencies1) - - expect(sendData.secondCall) - .to.have.been.calledWith(config, application, host, 'app-dependencies-loaded', expectedDependencies2) + sinon.assert.calledWith( + sendData.firstCall, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies1 + ) + + sinon.assert.calledWith( + sendData.secondCall, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies2 + ) }) it('should include only one dependency when they are in different paths but the version number is the same', () => { @@ -276,8 +311,14 @@ describe('dependencies', () => { { name: moduleName, version: packageVersion } ] } - expect(sendData).to.have.been - .calledOnceWith(config, application, host, 'app-dependencies-loaded', expectedDependencies) + sinon.assert.calledOnceWithMatch( + sendData, + config, + application, + host, + 'app-dependencies-loaded', + expectedDependencies + ) }) it('should call sendData only once with duplicated dependency', () => { diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index c495e1aaf31..8f4c903d1f7 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -1051,7 +1051,7 @@ async function testSeq (seqId, reqType, validatePayload) { }, host }) - assert.strictEqual([1, 0, -1].includes(Math.floor(Date.now() / 1000) - req.body.tracer_time), true) + assertObjectContains([1, 0, -1], [Math.floor(Date.now() / 1000) - req.body.tracer_time]) validatePayload(req.body.payload) } diff --git a/packages/dd-trace/test/telemetry/logs/index.spec.js b/packages/dd-trace/test/telemetry/logs/index.spec.js index 4f2f40a7aa5..6c360f86a4f 100644 --- a/packages/dd-trace/test/telemetry/logs/index.spec.js +++ b/packages/dd-trace/test/telemetry/logs/index.spec.js @@ -1,6 +1,5 @@ 'use strict' -const { expect } = require('chai') const { describe, it, beforeEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -69,7 +68,7 @@ describe('telemetry logs', () => { defaultConfig.telemetry.logCollection = false logs.start(defaultConfig) - expect(telemetryLog.subscribe).to.not.been.called + sinon.assert.notCalled(telemetryLog.subscribe) }) }) @@ -114,13 +113,13 @@ describe('telemetry logs', () => { it('should be called with WARN level', () => { telemetryLog.publish({ message: 'message', level: 'WARN' }) - expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'WARN' })) + sinon.assert.calledOnceWithExactly(logCollectorAdd, match({ message: 'message', level: 'WARN' })) }) it('should be called with ERROR level', () => { telemetryLog.publish({ message: 'message', level: 'ERROR' }) - expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'ERROR' })) + sinon.assert.calledOnceWithExactly(logCollectorAdd, match({ message: 'message', level: 'ERROR' })) }) it('should be called with ERROR level and stack_trace', () => { @@ -128,7 +127,10 @@ describe('telemetry logs', () => { const stack = error.stack telemetryLog.publish({ message: error.message, stack_trace: stack, level: 'ERROR' }) - expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'ERROR', stack_trace: stack })) + sinon.assert.calledOnceWithExactly( + logCollectorAdd, + match({ message: 'message', level: 'ERROR', stack_trace: stack }) + ) }) it('should not be called with no defined level', () => { @@ -149,19 +151,18 @@ describe('telemetry logs', () => { const stack = error.stack errorLog.publish({ cause: error, sendViaTelemetry: true }) - expect(logCollectorAdd) - .to.be.calledOnceWith(match({ - message: 'Generic Error', - level: 'ERROR', - errorType: 'Error', - stack_trace: stack - })) + sinon.assert.calledOnceWithExactly(logCollectorAdd, match({ + message: 'Generic Error', + level: 'ERROR', + errorType: 'Error', + stack_trace: stack + })) }) it('should be called when an error string is published to datadog:log:error', () => { errorLog.publish({ message: 'custom error message', sendViaTelemetry: true }) - expect(logCollectorAdd).to.be.calledOnceWith(match({ + sinon.assert.calledOnceWithExactly(logCollectorAdd, match({ message: 'custom error message', level: 'ERROR', stack_trace: undefined @@ -217,7 +218,7 @@ describe('telemetry logs', () => { logs.send(defaultConfig, application, host) - expect(sendData).to.be.calledOnceWithExactly(defaultConfig, application, host, 'logs', { logs: collectedLogs }) + sinon.assert.calledOnceWithExactly(sendData, defaultConfig, application, host, 'logs', { logs: collectedLogs }) }) it('should not drain logCollector and call sendData if not enabled', () => { diff --git a/packages/dd-trace/test/telemetry/metrics.spec.js b/packages/dd-trace/test/telemetry/metrics.spec.js index 6bf7e38bc73..80811bdb410 100644 --- a/packages/dd-trace/test/telemetry/metrics.spec.js +++ b/packages/dd-trace/test/telemetry/metrics.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') -const { expect } = require('chai') const { describe, it, beforeEach, afterEach } = require('tap').mocha const sinon = require('sinon') const proxyquire = require('proxyquire') @@ -121,38 +120,36 @@ describe('metrics', () => { manager.send(config, application, host) - expect(sendData).to.have.been - .calledWith(config, application, host, 'generate-metrics', { - namespace: 'test1', - series: [ - { - metric: 'metric1', - points: [[now / 1e3, 1]], - interval: undefined, - type: 'count', - tags: [ - 'bar:baz' - ], - common: true - } - ] - }) - expect(sendData).to.have.been - .calledWith(config, application, host, 'generate-metrics', { - namespace: 'test2', - series: [ - { - metric: 'metric2', - points: [[now / 1e3, 1]], - interval: undefined, - type: 'count', - tags: [ - 'bux:bax' - ], - common: true - } - ] - }) + sinon.assert.calledWith(sendData, config, application, host, 'generate-metrics', { + namespace: 'test1', + series: [ + { + metric: 'metric1', + points: [[now / 1e3, 1]], + interval: undefined, + type: 'count', + tags: [ + 'bar:baz' + ], + common: true + } + ] + }) + sinon.assert.calledWith(sendData, config, application, host, 'generate-metrics', { + namespace: 'test2', + series: [ + { + metric: 'metric2', + points: [[now / 1e3, 1]], + interval: undefined, + type: 'count', + tags: [ + 'bux:bax' + ], + common: true + } + ] + }) }) it('should not send empty metrics', () => { @@ -338,7 +335,7 @@ describe('metrics', () => { metric.dec() - expect(metric.track).to.be.calledWith(-1) + sinon.assert.calledWith(metric.track, -1) assert.deepStrictEqual(metric.points, [ [now / 1e3, 1] @@ -355,7 +352,7 @@ describe('metrics', () => { metric.dec(2) - expect(metric.track).to.be.calledWith(-2) + sinon.assert.calledWith(metric.track, -2) assert.deepStrictEqual(metric.points, [ [now / 1e3, 1] From 10d5edcba24b0107cb6c6bc308ac3bd811badcd3 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Thu, 18 Dec 2025 20:25:02 +0100 Subject: [PATCH 08/15] test(debugger): fix flaky time budget integration test (#7103) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds unit tests with mocked time for the debugger snapshot collector deadline mechanism and removes strict timing assertions from the flaky 1ms budget integration test. The integration test was failing in CI due to unpredictable execution times in GitHub Actions environments. The test set a 1ms capture timeout and expected thread pause time ≤16ms, which was too tight for resource-constrained CI environments. --- .../debugger/snapshot-time-budget.spec.js | 27 ++- .../snapshot/collector-deadline.spec.js | 205 ++++++++++++++++++ 2 files changed, 225 insertions(+), 7 deletions(-) create mode 100644 packages/dd-trace/test/debugger/devtools_client/snapshot/collector-deadline.spec.js diff --git a/integration-tests/debugger/snapshot-time-budget.spec.js b/integration-tests/debugger/snapshot-time-budget.spec.js index 02140be6afb..f2d5ab0e6d8 100644 --- a/integration-tests/debugger/snapshot-time-budget.spec.js +++ b/integration-tests/debugger/snapshot-time-budget.spec.js @@ -20,8 +20,11 @@ describe('Dynamic Instrumentation', function () { it( 'should include partial snapshot marked with notCapturedReason: timeout', - // A tolerance of 15ms is used to avoid flakiness - test({ t, maxPausedTime: budget + 15, breakpointIndex: 0, maxReferenceDepth: 5 }, (locals) => { + // Timing is tested in unit tests with mocked time (collector-deadline.spec.js). + // This integration test verifies the end-to-end behavior: that timeout markers + // appear in snapshots when the budget is exceeded. We don't assert on exact timing + // to avoid flakiness in CI environments where execution time is unpredictable. + test({ t, breakpointIndex: 0, maxReferenceDepth: 5 }, (locals) => { assert.strictEqual( containsTimeBudget(locals), true, @@ -187,7 +190,15 @@ describe('Dynamic Instrumentation', function () { }) }) -function test ({ t, maxPausedTime = 0, breakpointIndex, maxReferenceDepth }, assertFn) { +/** + * @param {object} config + * @param {object} config.t - Test environment + * @param {number} [config.maxPausedTime] - Optional maximum pause time in ms (skips timing assertion if not provided) + * @param {number} config.breakpointIndex - Index of the breakpoint to test + * @param {number} config.maxReferenceDepth - Maximum reference depth for snapshot + * @param {Function} [assertFn] - Optional assertion function for the snapshot locals + */ +function test ({ t, maxPausedTime, breakpointIndex, maxReferenceDepth }, assertFn) { const breakpoint = t.breakpoints[breakpointIndex] return async function () { @@ -200,10 +211,12 @@ function test ({ t, maxPausedTime = 0, breakpointIndex, maxReferenceDepth }, ass const { data } = await breakpoint.triggerBreakpoint() - assert.ok( - data.paused <= maxPausedTime, - `expected thread to be paused <=${maxPausedTime}ms, but was paused for ~${data.paused}ms` - ) + if (maxPausedTime !== undefined) { + assert.ok( + data.paused <= maxPausedTime, + `expected thread to be paused <=${maxPausedTime}ms, but was paused for ~${data.paused}ms` + ) + } const snapshot = await snapshotPromise assertFn?.(snapshot.captures.lines[breakpoint.line].locals) diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/collector-deadline.spec.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/collector-deadline.spec.js new file mode 100644 index 00000000000..d8104b6be86 --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/collector-deadline.spec.js @@ -0,0 +1,205 @@ +'use strict' + +const assert = require('node:assert/strict') +const { afterEach, beforeEach, describe, it } = require('mocha') +const sinon = require('sinon') +require('../../../setup/mocha') + +const session = require('./stub-session') +const proxyquire = require('proxyquire') +const { timeBudgetSym } = require('../../../../src/debugger/devtools_client/snapshot/symbols') + +describe('debugger -> devtools client -> snapshot collector deadline', function () { + let collectObjectProperties + let clock + + beforeEach(async function () { + clock = sinon.useFakeTimers() + + // Stub the collector with the stubbed session + const collectorWithStub = proxyquire('../../../../src/debugger/devtools_client/snapshot/collector', { + '../session': session + }) + collectObjectProperties = collectorWithStub.collectObjectProperties + + await session.post('Debugger.enable') + }) + + afterEach(async function () { + session.removeAllListeners('Debugger.scriptParsed') + session.removeAllListeners('Debugger.paused') + await session.post('Debugger.disable') + clock.restore() + }) + + it('should not mark properties with timeout when deadline is not exceeded', async function () { + const ctx = { + deadlineReached: false, + captureErrors: [] + } + + const opts = { + maxReferenceDepth: 3, + maxCollectionSize: 100, + maxFieldCount: 100, + deadlineNs: 100_000_000n, // 100ms + ctx + } + + const obj = { a: 1, b: 2, c: 3 } + + const objectId = await getObjectIdForObject(obj) + const properties = await collectObjectProperties(objectId, opts, 0) + + // Verify no properties are marked with timeout symbol + for (const prop of properties) { + assert.strictEqual(prop.value?.[timeBudgetSym], undefined) + } + + // Verify deadline was not reached + assert.strictEqual(ctx.deadlineReached, false) + }) + + it('should mark properties with timeout when deadline is exceeded', async function () { + // Override the hrtime stub to advance time on each call + // This simulates time passing during collection + sinon.restore() + clock = sinon.useFakeTimers() + sinon.stub(process.hrtime, 'bigint').callsFake(() => { + const time = BigInt(clock.now) * 1_000_000n + clock.tick(50) // Advance by 50ms after each call + return time + }) + + const ctx = { + deadlineReached: false, + captureErrors: [] + } + + const opts = { + maxReferenceDepth: 5, // Deep enough to require multiple calls + maxCollectionSize: 100, + maxFieldCount: 100, + deadlineNs: 10_000_000n, // 10ms (very tight deadline) + ctx + } + + // Create a nested object structure that will take time to collect + const nestedObj = { + level1: { + level2: { + level3: { + level4: { + level5: { a: 1, b: 2, c: 3 } + } + } + } + } + } + + const objectId = await getObjectIdForObject(nestedObj) + await collectObjectProperties(objectId, opts, 0) + + // Verify deadline was reached during collection + assert.strictEqual(ctx.deadlineReached, true) + }) + + it('should cache deadline reached state in ctx', async function () { + let hrtimeCallCount = 0 + + // Override the hrtime stub to track calls and advance time + sinon.restore() + clock = sinon.useFakeTimers() + sinon.stub(process.hrtime, 'bigint').callsFake(() => { + const time = BigInt(clock.now) * 1_000_000n + hrtimeCallCount++ + clock.tick(30) // Advance by 30ms after each call + return time + }) + + const ctx = { + deadlineReached: false, + captureErrors: [] + } + + const opts = { + maxReferenceDepth: 5, + maxCollectionSize: 100, + maxFieldCount: 100, + deadlineNs: 50_000_000n, // 50ms (will be exceeded after a few calls) + ctx + } + + // Create an object with multiple nested properties to trigger multiple overBudget checks + const objWithManyProps = { + a: { nested: { deep: 1 } }, + b: { nested: { deep: 2 } }, + c: { nested: { deep: 3 } }, + d: { nested: { deep: 4 } } + } + + const objectId = await getObjectIdForObject(objWithManyProps) + await collectObjectProperties(objectId, opts, 0) + + // Verify deadline was reached + assert.strictEqual(ctx.deadlineReached, true) + + // The hrtime should be called at least a couple times + // but once deadlineReached is cached, it shouldn't be called again + assert.ok(hrtimeCallCount >= 2, `Expected at least 2 hrtime calls, got ${hrtimeCallCount}`) + }) + + it('should immediately return true for overBudget when deadline already reached', async function () { + // Advance time past deadline + clock.tick(200) + + const ctx = { + deadlineReached: true, // Already marked as reached + captureErrors: [] + } + + const opts = { + maxReferenceDepth: 5, + maxCollectionSize: 100, + maxFieldCount: 100, + deadlineNs: 100_000_000n, // 100ms + ctx + } + + // Track CDP calls to verify we short-circuit + let cdpCallCount = 0 + const originalPost = session.post.bind(session) + session.post = function (method, params) { + if (method === 'Runtime.getProperties') { + cdpCallCount++ + } + return originalPost(method, params) + } + + // Create a nested object that would normally require many CDP calls + const nestedObj = { + a: { nested: { deep: 1 } }, + b: { nested: { deep: 2 } }, + c: { nested: { deep: 3 } }, + d: { nested: { deep: 4 } } + } + + const objectId = await getObjectIdForObject(nestedObj) + await collectObjectProperties(objectId, opts, 0) + + // Restore original + session.post = originalPost + + // Verify ctx.deadlineReached remains true + assert.strictEqual(ctx.deadlineReached, true) + + // Verify we made very few CDP calls (should be 1 for the root object only) + // If deadline wasn't already reached, this would make many more calls for nested properties + assert.ok(cdpCallCount <= 1, `Expected at most 1 CDP call due to short-circuit, but made ${cdpCallCount}`) + }) +}) + +async function getObjectIdForObject (obj) { + const { result: { objectId } } = await session.post('Runtime.evaluate', { expression: `(${JSON.stringify(obj)})` }) + return objectId +} From be825007388ac890ad01e357d688716b1a3344a1 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Thu, 18 Dec 2025 20:26:55 +0100 Subject: [PATCH 09/15] docs: add commit signing requirement to CONTRIBUTING.md (#7108) Add a new "Sign your commits" section to the contributing guidelines that requires all commits in pull requests to be signed. The section includes: - Explanation of the commit signing requirement - Step-by-step instructions for setting up GPG key signing - Instructions for automatic commit signing configuration - Guide for retroactively signing existing commits This ensures the authenticity and integrity of contributions to the project. --- CONTRIBUTING.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0f93768c7c8..ec653e9cdf8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -70,6 +70,24 @@ We follow an all-green policy which means that for any PR to be merged _all_ tes Eventually we plan to look into putting these permission-required tests behind a label which team members can add to their PRs at creation to run the full CI and can add to outside contributor PRs to trigger the CI from their own user credentials. If the label is not present there will be another action which checks the label is present. Rather than showing a bunch of confusing failures to new contributors it would just show a single job failure which indicates an additional label is required, and we can name it in a way that makes it clear that it's not the responsibility of the outside contributor to add it. Something like `approve-full-ci` is one possible choice there. +## Sign your commits + +All commits in a pull request must be signed. We require commit signing to ensure the authenticity and integrity of contributions to the project. + +**Datadog employees:** We recommend using the [sign-pull-request tool](https://datadoghq.atlassian.net/wiki/spaces/SECENG/pages/5371593157/Easily+sign+commits+with+sign-pull-request+tool) for easy signing of commits. + +You can also sign your commits manually using one of the following methods: + +- [Signing commits with GPG](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) +- [Signing commits with SSH](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification#ssh-commit-signature-verification) +- [Signing commits with 1Password](https://developer.1password.com/docs/ssh/git-commit-signing/) + +If you have already created commits without signing them, you can sign them retroactively by using an interactive rebase: + +```sh +$ git rebase --exec 'git commit --amend --no-edit -n -S' -i +``` + ## Development Requirements Since this project supports multiple Node.js versions, using a version manager From ee1bf2dc253f58b848a850c3da26c4215bc591a6 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Fri, 19 Dec 2025 20:01:35 +0100 Subject: [PATCH 10/15] fix: pin import-in-the-middle to v2.0.0 (#7148) Version 2.0.1 was just released which causes issue with some of our CI tests. It's too early to say if this is only a CI issue, but until we can investigate, it's better to pin it. --- package.json | 2 +- yarn.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 207923f7f11..2fc9d2280b5 100644 --- a/package.json +++ b/package.json @@ -127,7 +127,7 @@ ], "dependencies": { "dc-polyfill": "^0.1.10", - "import-in-the-middle": "^2.0.0" + "import-in-the-middle": "2.0.0" }, "optionalDependencies": { "@datadog/libdatadog": "0.7.0", diff --git a/yarn.lock b/yarn.lock index 4fc5b01245e..872c826c8ad 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2642,7 +2642,7 @@ import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-in-the-middle@^2.0.0: +import-in-the-middle@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-2.0.0.tgz#295948cee94d0565314824c6bd75379d13e5b1a5" integrity sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A== From 719aa7e2339a03a679999fd726f9a881efc8d29a Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Fri, 19 Dec 2025 11:57:50 -0800 Subject: [PATCH 11/15] feat(ws): add websocket context propagation (#7077) --- packages/datadog-plugin-ws/src/close.js | 57 ++++++- packages/datadog-plugin-ws/src/producer.js | 40 ++++- packages/datadog-plugin-ws/src/receiver.js | 40 ++++- packages/datadog-plugin-ws/src/server.js | 13 ++ packages/datadog-plugin-ws/src/util.js | 107 +++++++++++++ packages/datadog-plugin-ws/test/index.spec.js | 148 ++++++++++++++++++ packages/dd-trace/src/constants.js | 5 + 7 files changed, 407 insertions(+), 3 deletions(-) create mode 100644 packages/datadog-plugin-ws/src/util.js diff --git a/packages/datadog-plugin-ws/src/close.js b/packages/datadog-plugin-ws/src/close.js index c2574de43e4..6c6690ca3fd 100644 --- a/packages/datadog-plugin-ws/src/close.js +++ b/packages/datadog-plugin-ws/src/close.js @@ -1,6 +1,16 @@ 'use strict' const TracingPlugin = require('../../dd-trace/src/plugins/tracing.js') +const { + incrementWebSocketCounter, + buildWebSocketSpanPointerHash, + hasDistributedTracingContext +} = require('./util') +const { + WEBSOCKET_PTR_KIND, + SPAN_POINTER_DIRECTION, + SPAN_POINTER_DIRECTION_NAME +} = require('../../dd-trace/src/constants') class WSClosePlugin extends TracingPlugin { static get id () { return 'ws' } @@ -60,7 +70,52 @@ class WSClosePlugin extends TracingPlugin { end (ctx) { if (!Object.hasOwn(ctx, 'result') || !ctx.span) return - if (ctx.socket.spanContext) ctx.span.addLink({ context: ctx.socket.spanContext }) + if (ctx.socket.spanContext) { + const linkAttributes = {} + + // Determine link kind based on whether this is peer close (incoming) or self close (outgoing) + const isIncoming = ctx.isPeerClose + linkAttributes['dd.kind'] = isIncoming ? 'executed_by' : 'resuming' + + // Add span pointer for context propagation + if (this.config.traceWebsocketMessagesEnabled && ctx.socket.handshakeSpan) { + const handshakeSpan = ctx.socket.handshakeSpan + + // Only add span pointers if distributed tracing is enabled and handshake has distributed context + if (hasDistributedTracingContext(handshakeSpan, ctx.socket)) { + const counterType = isIncoming ? 'receiveCounter' : 'sendCounter' + const counter = incrementWebSocketCounter(ctx.socket, counterType) + const handshakeContext = handshakeSpan.context() + + const ptrHash = buildWebSocketSpanPointerHash( + handshakeContext._traceId, + handshakeContext._spanId, + counter, + true, // isServer + isIncoming + ) + + const directionName = isIncoming + ? SPAN_POINTER_DIRECTION_NAME.UPSTREAM + : SPAN_POINTER_DIRECTION_NAME.DOWNSTREAM + const direction = isIncoming + ? SPAN_POINTER_DIRECTION.UPSTREAM + : SPAN_POINTER_DIRECTION.DOWNSTREAM + + // Add span pointer attributes to link + linkAttributes['link.name'] = directionName + linkAttributes['dd.kind'] = 'span-pointer' + linkAttributes['ptr.kind'] = WEBSOCKET_PTR_KIND + linkAttributes['ptr.dir'] = direction + linkAttributes['ptr.hash'] = ptrHash + } + } + + ctx.span.addLink({ + context: ctx.socket.spanContext, + attributes: linkAttributes + }) + } ctx.span.finish() } diff --git a/packages/datadog-plugin-ws/src/producer.js b/packages/datadog-plugin-ws/src/producer.js index 8a0ea6a1613..8d7cd11f50f 100644 --- a/packages/datadog-plugin-ws/src/producer.js +++ b/packages/datadog-plugin-ws/src/producer.js @@ -1,6 +1,16 @@ 'use strict' const TracingPlugin = require('../../dd-trace/src/plugins/tracing.js') +const { + incrementWebSocketCounter, + buildWebSocketSpanPointerHash, + hasDistributedTracingContext +} = require('./util') +const { + WEBSOCKET_PTR_KIND, + SPAN_POINTER_DIRECTION, + SPAN_POINTER_DIRECTION_NAME +} = require('../../dd-trace/src/constants') class WSProducerPlugin extends TracingPlugin { static get id () { return 'ws' } @@ -51,9 +61,37 @@ class WSProducerPlugin extends TracingPlugin { if (!Object.hasOwn(ctx, 'result') || !ctx.span) return if (ctx.socket.spanContext) { + const linkAttributes = { 'dd.kind': 'resuming' } + + // Add span pointer for context propagation + if (this.config.traceWebsocketMessagesEnabled && ctx.socket.handshakeSpan) { + const handshakeSpan = ctx.socket.handshakeSpan + + // Only add span pointers if distributed tracing is enabled and handshake has distributed context + if (hasDistributedTracingContext(handshakeSpan, ctx.socket)) { + const counter = incrementWebSocketCounter(ctx.socket, 'sendCounter') + const handshakeContext = handshakeSpan.context() + + const ptrHash = buildWebSocketSpanPointerHash( + handshakeContext._traceId, + handshakeContext._spanId, + counter, + true, // isServer + false // isIncoming (this is outgoing) + ) + + // Add span pointer attributes to link + linkAttributes['link.name'] = SPAN_POINTER_DIRECTION_NAME.DOWNSTREAM + linkAttributes['dd.kind'] = 'span-pointer' + linkAttributes['ptr.kind'] = WEBSOCKET_PTR_KIND + linkAttributes['ptr.dir'] = SPAN_POINTER_DIRECTION.DOWNSTREAM + linkAttributes['ptr.hash'] = ptrHash + } + } + ctx.span.addLink({ context: ctx.socket.spanContext, - attributes: { 'dd.kind': 'resuming' }, + attributes: linkAttributes, }) } diff --git a/packages/datadog-plugin-ws/src/receiver.js b/packages/datadog-plugin-ws/src/receiver.js index 35c5a4bfb6f..991d7ff5aff 100644 --- a/packages/datadog-plugin-ws/src/receiver.js +++ b/packages/datadog-plugin-ws/src/receiver.js @@ -1,6 +1,16 @@ 'use strict' const TracingPlugin = require('../../dd-trace/src/plugins/tracing.js') +const { + incrementWebSocketCounter, + buildWebSocketSpanPointerHash, + hasDistributedTracingContext +} = require('./util') +const { + WEBSOCKET_PTR_KIND, + SPAN_POINTER_DIRECTION, + SPAN_POINTER_DIRECTION_NAME +} = require('../../dd-trace/src/constants') class WSReceiverPlugin extends TracingPlugin { static get id () { return 'ws' } @@ -61,9 +71,37 @@ class WSReceiverPlugin extends TracingPlugin { if (!Object.hasOwn(ctx, 'result') || !ctx.span) return if (ctx.socket.spanContext) { + const linkAttributes = { 'dd.kind': 'executed_by' } + + // Add span pointer for context propagation + if (this.config.traceWebsocketMessagesEnabled && ctx.socket.handshakeSpan) { + const handshakeSpan = ctx.socket.handshakeSpan + + // Only add span pointers if distributed tracing is enabled and handshake has distributed context + if (hasDistributedTracingContext(handshakeSpan, ctx.socket)) { + const counter = incrementWebSocketCounter(ctx.socket, 'receiveCounter') + const handshakeContext = handshakeSpan.context() + + const ptrHash = buildWebSocketSpanPointerHash( + handshakeContext._traceId, + handshakeContext._spanId, + counter, + true, // isServer + true // isIncoming + ) + + // Add span pointer attributes to link + linkAttributes['link.name'] = SPAN_POINTER_DIRECTION_NAME.UPSTREAM + linkAttributes['dd.kind'] = 'span-pointer' + linkAttributes['ptr.kind'] = WEBSOCKET_PTR_KIND + linkAttributes['ptr.dir'] = SPAN_POINTER_DIRECTION.UPSTREAM + linkAttributes['ptr.hash'] = ptrHash + } + } + ctx.span.addLink({ context: ctx.socket.spanContext, - attributes: { 'dd.kind': 'executed_by' }, + attributes: linkAttributes, }) } diff --git a/packages/datadog-plugin-ws/src/server.js b/packages/datadog-plugin-ws/src/server.js index aea3c6a16d2..73d27fe8599 100644 --- a/packages/datadog-plugin-ws/src/server.js +++ b/packages/datadog-plugin-ws/src/server.js @@ -2,6 +2,8 @@ const TracingPlugin = require('../../dd-trace/src/plugins/tracing.js') const tags = require('../../../ext/tags.js') +const { initWebSocketMessageCounters } = require('./util') +const { FORMAT_HTTP_HEADERS } = require('../../../ext/formats') const HTTP_STATUS_CODE = tags.HTTP_STATUS_CODE @@ -28,9 +30,13 @@ class WSServerPlugin extends TracingPlugin { ctx.args = { options } + // Extract distributed tracing context from request headers + const childOf = this.tracer.extract(FORMAT_HTTP_HEADERS, req.headers) + const service = this.serviceName({ pluginConfig: this.config }) const span = this.startSpan(this.operationName(), { service, + childOf, meta: { 'span.type': 'websocket', 'http.upgraded': 'websocket', @@ -46,6 +52,13 @@ class WSServerPlugin extends TracingPlugin { ctx.socket.spanContext = ctx.span._spanContext ctx.socket.spanContext.spanTags = ctx.span._spanContext._tags + // Store the handshake span for use in message span pointers + ctx.socket.handshakeSpan = ctx.span + // Store the request headers for distributed tracing check + ctx.socket.requestHeaders = req.headers + + // Initialize message counters for span pointers + initWebSocketMessageCounters(ctx.socket) return ctx.currentStore } diff --git a/packages/datadog-plugin-ws/src/util.js b/packages/datadog-plugin-ws/src/util.js new file mode 100644 index 00000000000..4e5d665e0e6 --- /dev/null +++ b/packages/datadog-plugin-ws/src/util.js @@ -0,0 +1,107 @@ +'use strict' + +// WeakMap to store message counters per socket without mutating the socket object +const socketCounters = new WeakMap() + +/** + * Initializes WebSocket message counters for a socket. + * @param {object} socket - The WebSocket socket object + */ +function initWebSocketMessageCounters (socket) { + if (!socketCounters.has(socket)) { + socketCounters.set(socket, { + receiveCounter: 0, + sendCounter: 0 + }) + } +} + +/** + * Increments and returns the WebSocket message counter. + * @param {object} socket - The WebSocket socket object + * @param {string} counterType - Either 'receiveCounter' or 'sendCounter' + * @returns {number} The incremented counter value + */ +function incrementWebSocketCounter (socket, counterType) { + if (!socketCounters.has(socket)) { + initWebSocketMessageCounters(socket) + } + const counters = socketCounters.get(socket) + counters[counterType]++ + return counters[counterType] +} + +/** + * Builds a WebSocket span pointer hash. + * + * Format: <128 bit hex trace id><64 bit hex span id><32 bit hex counter> + * Prefix: 'S' for server outgoing or client incoming, 'C' for server incoming or client outgoing + * + * @param {bigint} handshakeTraceId - The trace ID from the handshake span (as a BigInt) + * @param {bigint} handshakeSpanId - The span ID from the handshake span (as a BigInt) + * @param {number} counter - The message counter + * @param {boolean} isServer - Whether this is a server (true) or client (false) + * @param {boolean} isIncoming - Whether this is an incoming message (true) or outgoing (false) + * @returns {string} The span pointer hash + */ +function buildWebSocketSpanPointerHash (handshakeTraceId, handshakeSpanId, counter, isServer, isIncoming) { + // Determine prefix based on server/client and incoming/outgoing + // Server outgoing or client incoming: 'S' + // Server incoming or client outgoing: 'C' + const prefix = (isServer && !isIncoming) || (!isServer && isIncoming) ? 'S' : 'C' + + // Pad trace ID to 32 hex chars (128 bits) + const traceIdHex = handshakeTraceId.toString(16).padStart(32, '0') + + // Pad span ID to 16 hex chars (64 bits) + const spanIdHex = handshakeSpanId.toString(16).padStart(16, '0') + + // Pad counter to 8 hex chars (32 bits) + const counterHex = counter.toString(16).padStart(8, '0') + + return `${prefix}${traceIdHex}${spanIdHex}${counterHex}` +} + +/** + * Checks if the handshake span has extracted distributed tracing context. + * A websocket server must not set the span pointer if the handshake has not extracted a context. + * + * A span has distributed tracing context if it has a parent context that was + * extracted from headers (remote parent). + * + * @param {object} span - The handshake span + * @param {object} socket - The WebSocket socket object + * @returns {boolean} True if the span has distributed tracing context + */ +function hasDistributedTracingContext (span, socket) { + if (!span) return false + const context = span.context() + if (!context) return false + + // Check if this span has a parent. If the parent was extracted from remote headers, + // then this span is part of a distributed trace. + // We check if the span has a parent by looking at _parentId. + // In the JavaScript tracer, when a context is extracted from headers and a child span + // is created, the child will have _parentId set to the extracted parent's span ID. + // + // For testing purposes, we also check if Datadog trace headers are present in the socket's + // upgrade request, which indicates distributed tracing context was sent by the client. + if (context._parentId !== null) { + return true + } + + // Fallback check: look for distributed tracing headers in the stored request headers + if (socket && socket.requestHeaders) { + const headers = socket.requestHeaders + return !!(headers['x-datadog-trace-id'] || headers.traceparent) + } + + return false +} + +module.exports = { + initWebSocketMessageCounters, + incrementWebSocketCounter, + buildWebSocketSpanPointerHash, + hasDistributedTracingContext +} diff --git a/packages/datadog-plugin-ws/test/index.spec.js b/packages/datadog-plugin-ws/test/index.spec.js index 4a0d968e80f..cf56081d09c 100644 --- a/packages/datadog-plugin-ws/test/index.spec.js +++ b/packages/datadog-plugin-ws/test/index.spec.js @@ -2,6 +2,7 @@ const assert = require('node:assert') const { once } = require('node:events') +const { expect } = require('chai') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') @@ -390,6 +391,153 @@ describe('Plugin', () => { }) }) }) + + describe('with span pointers', () => { + let tracer + + beforeEach(async () => { + tracer = require('../../dd-trace') + await agent.load(['ws'], [{ + service: 'ws-with-pointers', + traceWebsocketMessagesEnabled: true, + }]) + WebSocket = require(`../../../versions/ws@${version}`).get() + + wsServer = new WebSocket.Server({ port: clientPort }) + + // Create a parent span within a trace to properly set up distributed tracing context + tracer.trace('test.parent', parentSpan => { + const headers = {} + tracer.inject(parentSpan, 'http_headers', headers) + + // Inject distributed tracing headers to enable span pointers + client = new WebSocket(`ws://localhost:${clientPort}/${route}?active=true`, { + headers + }) + }) + }) + + afterEach(async () => { + clientPort++ + agent.close({ ritmReset: false, wipe: true }) + }) + + it('should add span pointers to producer spans', async () => { + wsServer.on('connection', (ws) => { + ws.send('test message with pointer') + }) + + client.on('message', (data) => { + assert.strictEqual(data.toString(), 'test message with pointer') + }) + + let didFindPointerLink = false + + await agent.assertSomeTraces(traces => { + const producerSpan = traces[0][0] + assert.strictEqual(producerSpan.name, 'websocket.send') + assert.strictEqual(producerSpan.service, 'ws-with-pointers') + + // Check for span links with span pointer attributes + assert.ok(producerSpan.meta['_dd.span_links'], 'Producer span should have span links') + const spanLinks = JSON.parse(producerSpan.meta['_dd.span_links']) + const pointerLink = spanLinks.find(link => + link.attributes && link.attributes['dd.kind'] === 'span-pointer' + ) + assert.ok(pointerLink, 'Should have a span pointer link') + didFindPointerLink = true + + expect(pointerLink.attributes).to.have.property('ptr.kind', 'websocket') + expect(pointerLink.attributes).to.have.property('ptr.dir', 'd') + expect(pointerLink.attributes).to.have.property('ptr.hash') + expect(pointerLink.attributes).to.have.property('link.name', 'span-pointer-down') + expect(pointerLink.attributes['ptr.hash']).to.be.a('string') + expect(pointerLink.attributes['ptr.hash']).to.have.lengthOf(57) + // Hash format: <32 hex trace id><16 hex span id><8 hex counter> + expect(pointerLink.attributes['ptr.hash']).to.match(/^[SC][0-9a-f]{32}[0-9a-f]{16}[0-9a-f]{8}$/) + }) + + expect(didFindPointerLink).to.be.true + }) + + it('should add span pointers to consumer spans', async () => { + wsServer.on('connection', (ws) => { + ws.on('message', (data) => { + assert.strictEqual(data.toString(), 'client message with pointer') + }) + }) + + client.on('open', () => { + client.send('client message with pointer') + }) + + let didFindPointerLink = false + + await agent.assertSomeTraces(traces => { + const consumerSpan = traces.find(t => t[0].name === 'websocket.receive')?.[0] + assert.ok(consumerSpan, 'Should have a consumer span') + assert.strictEqual(consumerSpan.service, 'ws-with-pointers') + + // Check for span links with span pointer attributes + assert.ok(consumerSpan.meta['_dd.span_links'], 'Consumer span should have span links') + const spanLinks = JSON.parse(consumerSpan.meta['_dd.span_links']) + const pointerLink = spanLinks.find(link => + link.attributes && link.attributes['dd.kind'] === 'span-pointer' + ) + assert.ok(pointerLink, 'Should have a span pointer link') + didFindPointerLink = true + + expect(pointerLink.attributes).to.have.property('ptr.kind', 'websocket') + expect(pointerLink.attributes).to.have.property('ptr.dir', 'u') + expect(pointerLink.attributes).to.have.property('ptr.hash') + expect(pointerLink.attributes).to.have.property('link.name', 'span-pointer-up') + expect(pointerLink.attributes['ptr.hash']).to.be.a('string') + expect(pointerLink.attributes['ptr.hash']).to.have.lengthOf(57) + // Hash format: <32 hex trace id><16 hex span id><8 hex counter> + expect(pointerLink.attributes['ptr.hash']).to.match(/^[SC][0-9a-f]{32}[0-9a-f]{16}[0-9a-f]{8}$/) + }) + + expect(didFindPointerLink).to.be.true + }) + + it('should generate unique hashes for each message', () => { + const testMessage = 'test message' + const hashes = new Set() + + wsServer.on('connection', (ws) => { + ws.send(testMessage) + // Send a second message to test counter increment + setTimeout(() => ws.send(testMessage), 10) + }) + + client.on('message', (data) => { + assert.strictEqual(data.toString(), testMessage) + }) + + return agent.assertSomeTraces(traces => { + // Find all producer spans + const producerTraces = traces.filter(t => t[0].name === 'websocket.send') + + producerTraces.forEach(trace => { + if (trace[0].meta['_dd.span_links']) { + const spanLinks = JSON.parse(trace[0].meta['_dd.span_links']) + const pointerLink = spanLinks.find(link => + link.attributes && link.attributes['dd.kind'] === 'span-pointer' + ) + if (pointerLink) { + const hash = pointerLink.attributes['ptr.hash'] + hashes.add(hash) + } + } + }) + + // Each message should have a unique hash due to counter increment + if (hashes.size > 1) { + assert.ok(hashes.size >= 2, 'Multiple messages should have different hashes') + } + }) + }) + }) }) }) }) diff --git a/packages/dd-trace/src/constants.js b/packages/dd-trace/src/constants.js index c184e595237..8af34cb6e3f 100644 --- a/packages/dd-trace/src/constants.js +++ b/packages/dd-trace/src/constants.js @@ -50,8 +50,13 @@ module.exports = { GRPC_SERVER_ERROR_STATUSES: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], DYNAMODB_PTR_KIND: 'aws.dynamodb.item', S3_PTR_KIND: 'aws.s3.object', + WEBSOCKET_PTR_KIND: 'websocket', SPAN_POINTER_DIRECTION: Object.freeze({ UPSTREAM: 'u', DOWNSTREAM: 'd' + }), + SPAN_POINTER_DIRECTION_NAME: Object.freeze({ + UPSTREAM: 'span-pointer-up', + DOWNSTREAM: 'span-pointer-down' }) } From 0ad1d8f2cac2af978cdb425f8393ccf44b0db9f3 Mon Sep 17 00:00:00 2001 From: robcarlan-datadog Date: Fri, 19 Dec 2025 16:42:26 -0500 Subject: [PATCH 12/15] chore(datastreams): separate DSM specific test behavior and add to CODEOWNERS (#7076) --- CODEOWNERS | 6 + package.json | 2 +- .../datadog-plugin-amqplib/test/dsm.spec.js | 281 +++++++++++++++ .../datadog-plugin-amqplib/test/index.spec.js | 230 ------------- .../test/kinesis.dsm.spec.js | 242 +++++++++++++ .../test/kinesis.spec.js | 192 ----------- .../test/sns.dsm.spec.js | 269 +++++++++++++++ .../datadog-plugin-aws-sdk/test/sns.spec.js | 200 +---------- .../test/sqs.dsm.spec.js | 307 +++++++++++++++++ .../datadog-plugin-aws-sdk/test/sqs.spec.js | 269 --------------- .../test/dsm.spec.js | 324 ++++++++++++++++++ .../test/index.spec.js | 252 -------------- .../test/dsm.spec.js | 166 +++++++++ .../test/index.spec.js | 114 ------ .../datadog-plugin-kafkajs/test/dsm.spec.js | 249 ++++++++++++++ .../datadog-plugin-kafkajs/test/index.spec.js | 194 +---------- 16 files changed, 1848 insertions(+), 1449 deletions(-) create mode 100644 packages/datadog-plugin-amqplib/test/dsm.spec.js create mode 100644 packages/datadog-plugin-aws-sdk/test/kinesis.dsm.spec.js create mode 100644 packages/datadog-plugin-aws-sdk/test/sns.dsm.spec.js create mode 100644 packages/datadog-plugin-aws-sdk/test/sqs.dsm.spec.js create mode 100644 packages/datadog-plugin-confluentinc-kafka-javascript/test/dsm.spec.js create mode 100644 packages/datadog-plugin-google-cloud-pubsub/test/dsm.spec.js create mode 100644 packages/datadog-plugin-kafkajs/test/dsm.spec.js diff --git a/CODEOWNERS b/CODEOWNERS index c8129b66780..17cc9e79a61 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -77,6 +77,12 @@ /packages/dd-trace/src/llmobs/ @DataDog/ml-observability /packages/dd-trace/test/llmobs/ @DataDog/ml-observability +# Data Streams Monitoring +/packages/dd-trace/src/datastreams/ @DataDog/data-streams-monitoring +/packages/dd-trace/test/datastreams/ @DataDog/data-streams-monitoring +/packages/**/dsm.spec.js @DataDog/data-streams-monitoring +/packages/**/*.dsm.spec.js @DataDog/data-streams-monitoring + # API SDK /packages/dd-trace/src/telemetry/ @DataDog/apm-sdk-capabilities-js /packages/dd-trace/test/telemetry/ @DataDog/apm-sdk-capabilities-js diff --git a/package.json b/package.json index 2fc9d2280b5..6e98ac5c443 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ "test:llmobs:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/llmobs/**/*.js\" -- npm run test:llmobs:plugins", "test:openfeature": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/openfeature/*.spec.js\"", "test:openfeature:ci": "nyc --no-clean --include \"packages/dd-trace/src/openfeature/**/*.js\" -- npm run test:openfeature", - "test:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/@($(echo ${SPEC:-'*'})).spec.js\"", + "test:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/@($(echo ${SPEC:-'*'}))*.spec.js\"", "test:plugins:ci": "yarn services && nyc --no-clean --include \"packages/datadog-plugin-@($(echo $PLUGINS))/src/**/*.js\" -- npm run test:plugins", "test:plugins:ci:flaky": "yarn services && nyc --no-clean --include \"packages/datadog-plugin-@($(echo $PLUGINS))/src/**/*.js\" -- npm run test:plugins -- --bail --retries 2", "test:plugins:upstream": "node ./packages/dd-trace/test/plugins/suite.js", diff --git a/packages/datadog-plugin-amqplib/test/dsm.spec.js b/packages/datadog-plugin-amqplib/test/dsm.spec.js new file mode 100644 index 00000000000..6d6a75f02d7 --- /dev/null +++ b/packages/datadog-plugin-amqplib/test/dsm.spec.js @@ -0,0 +1,281 @@ +'use strict' + +const assert = require('node:assert/strict') +const { Buffer } = require('node:buffer') + +const { afterEach, beforeEach, describe, it } = require('mocha') + +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') +const id = require('../../dd-trace/src/id') +const agent = require('../../dd-trace/test/plugins/agent') +const { withVersions } = require('../../dd-trace/test/setup/mocha') +const { assertObjectContains } = require('../../../integration-tests/helpers') + +describe('Plugin', () => { + let connection + let channel + let queue + + describe('amqplib', () => { + withVersions('amqplib', 'amqplib', version => { + beforeEach(() => { + process.env.DD_DATA_STREAMS_ENABLED = 'true' + queue = `test-${id()}` + }) + + afterEach(() => { + connection.close() + }) + + describe('data stream monitoring', function () { + this.timeout(10000) + + let expectedProducerHashWithTopic + let expectedProducerHashWithExchange + let expectedConsumerHash + + beforeEach(done => { + agent.load('amqplib').then(() => { + require(`../../../versions/amqplib@${version}`).get('amqplib/callback_api') + .connect((err, conn) => { + connection = conn + + if (err != null) { + return done(err) + } + + conn.createChannel((err, ch) => { + channel = ch + return done(err) + }) + }) + }) + }) + + afterEach(() => { + return agent.close({ ritmReset: false }) + }) + + beforeEach(() => { + const producerHashWithTopic = computePathwayHash('test', 'tester', [ + 'direction:out', + 'has_routing_key:true', + `topic:${queue}`, + 'type:rabbitmq' + ], ENTRY_PARENT_HASH) + + expectedProducerHashWithTopic = producerHashWithTopic.readBigUInt64LE(0).toString() + + expectedProducerHashWithExchange = computePathwayHash('test', 'tester', [ + 'direction:out', + 'exchange:namedExchange', + 'has_routing_key:true', + 'type:rabbitmq' + ], ENTRY_PARENT_HASH).readBigUInt64LE(0).toString() + + expectedConsumerHash = computePathwayHash('test', 'tester', [ + 'direction:in', + `topic:${queue}`, + 'type:rabbitmq' + ], producerHashWithTopic).readBigUInt64LE(0).toString() + }) + + it('Should emit DSM stats to the agent when sending a message on an unnamed exchange', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = [] + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) + }) + } + }) + assert.ok(statsPointsReceived.length >= 1) + assert.deepStrictEqual(statsPointsReceived[0].EdgeTags, [ + 'direction:out', + 'has_routing_key:true', + `topic:${queue}`, + 'type:rabbitmq' + ]) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHashWithTopic), true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) + }) + }) + + it('Should emit DSM stats to the agent when sending a message on an named exchange', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = [] + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) + }) + } + }) + assert.ok(statsPointsReceived.length >= 1) + assert.deepStrictEqual(statsPointsReceived[0].EdgeTags, [ + 'direction:out', + 'exchange:namedExchange', + 'has_routing_key:true', + 'type:rabbitmq' + ]) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHashWithExchange), true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertExchange('namedExchange', 'direct', {}, (err, ok) => { + if (err) return done(err) + + channel.publish('namedExchange', 'anyOldRoutingKey', Buffer.from('DSM pathway test')) + }) + }) + + it('Should emit DSM stats to the agent when receiving a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = [] + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) + }) + } + }) + assert.strictEqual(statsPointsReceived.length, 2) + assert.deepStrictEqual(statsPointsReceived[1].EdgeTags, + ['direction:in', `topic:${queue}`, 'type:rabbitmq']) + assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) + channel.consume(ok.queue, () => {}, {}, (err, ok) => { + if (err) done(err) + }) + }) + }) + + it('Should emit DSM stats to the agent when sending another message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = [] + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) + }) + } + }) + assert.strictEqual(statsPointsReceived.length, 1) + assert.deepStrictEqual(statsPointsReceived[0].EdgeTags, [ + 'direction:out', + 'has_routing_key:true', + `topic:${queue}`, + 'type:rabbitmq' + ]) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHashWithTopic), true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) + }) + }) + + it('Should emit DSM stats to the agent when receiving a message with get', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = [] + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) + }) + } + }) + assert.strictEqual(statsPointsReceived.length, 2) + assert.deepStrictEqual(statsPointsReceived[1].EdgeTags, + ['direction:in', `topic:${queue}`, 'type:rabbitmq']) + assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) + }, { timeoutMs: 10000 }).then(done, done) + + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) + channel.get(ok.queue, {}, (err, ok) => { + if (err) done(err) + }) + }) + }) + + it('regression test: should handle basic.get when queue is empty', done => { + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.get(ok.queue, {}, (err, msg) => { + if (err) return done(err) + assert.strictEqual(msg, false) + done() + }) + }) + }) + + it('Should set pathway hash tag on a span when producing', (done) => { + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('dsm test')) + + let produceSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('basic.publish')) { + produceSpanMeta = span.meta + } + + assertObjectContains(produceSpanMeta, { + 'pathway.hash': expectedProducerHashWithTopic + }) + }, { timeoutMs: 10000 }).then(done, done) + }) + }) + + it('Should set pathway hash tag on a span when consuming', (done) => { + channel.assertQueue(queue, {}, (err, ok) => { + if (err) return done(err) + + channel.sendToQueue(ok.queue, Buffer.from('dsm test')) + channel.consume(ok.queue, () => {}, {}, (err, ok) => { + if (err) return done(err) + + let consumeSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('basic.deliver')) { + consumeSpanMeta = span.meta + } + + assertObjectContains(consumeSpanMeta, { + 'pathway.hash': expectedConsumerHash + }) + }, { timeoutMs: 10000 }).then(done, done) + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-amqplib/test/index.spec.js b/packages/datadog-plugin-amqplib/test/index.spec.js index 741a23b4f9c..ab70b3e9a8b 100644 --- a/packages/datadog-plugin-amqplib/test/index.spec.js +++ b/packages/datadog-plugin-amqplib/test/index.spec.js @@ -6,13 +6,10 @@ const { Buffer } = require('node:buffer') const { afterEach, beforeEach, describe, it } = require('mocha') const { ERROR_MESSAGE, ERROR_STACK, ERROR_TYPE } = require('../../dd-trace/src/constants') -const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') const id = require('../../dd-trace/src/id') const agent = require('../../dd-trace/test/plugins/agent') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const { expectedSchema, rawExpectedSchema } = require('./naming') -const { assertObjectContains } = require('../../../integration-tests/helpers') describe('Plugin', () => { let tracer @@ -309,233 +306,6 @@ describe('Plugin', () => { .catch(done) }) }) - - describe('when data streams monitoring is enabled', function () { - this.timeout(10000) - - let expectedProducerHashWithTopic - let expectedProducerHashWithExchange - let expectedConsumerHash - - beforeEach(() => { - const producerHashWithTopic = computePathwayHash('test', 'tester', [ - 'direction:out', - 'has_routing_key:true', - `topic:${queue}`, - 'type:rabbitmq' - ], ENTRY_PARENT_HASH) - - expectedProducerHashWithTopic = producerHashWithTopic.readBigUInt64LE(0).toString() - - expectedProducerHashWithExchange = computePathwayHash('test', 'tester', [ - 'direction:out', - 'exchange:namedExchange', - 'has_routing_key:true', - 'type:rabbitmq' - ], ENTRY_PARENT_HASH).readBigUInt64LE(0).toString() - - expectedConsumerHash = computePathwayHash('test', 'tester', [ - 'direction:in', - `topic:${queue}`, - 'type:rabbitmq' - ], producerHashWithTopic).readBigUInt64LE(0).toString() - }) - - it('Should emit DSM stats to the agent when sending a message on an unnamed exchange', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = [] - // we should have 1 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) - }) - } - }) - assert.ok(statsPointsReceived.length >= 1) - assert.deepStrictEqual(statsPointsReceived[0].EdgeTags, [ - 'direction:out', - 'has_routing_key:true', - `topic:${queue}`, - 'type:rabbitmq' - ]) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHashWithTopic), true) - }, { timeoutMs: 10000 }).then(done, done) - - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) - }) - }) - - it('Should emit DSM stats to the agent when sending a message on an named exchange', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = [] - // we should have 1 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) - }) - } - }) - assert.ok(statsPointsReceived.length >= 1) - assert.deepStrictEqual(statsPointsReceived[0].EdgeTags, [ - 'direction:out', - 'exchange:namedExchange', - 'has_routing_key:true', - 'type:rabbitmq' - ]) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHashWithExchange), true) - }, { timeoutMs: 10000 }).then(done, done) - - channel.assertExchange('namedExchange', 'direct', {}, (err, ok) => { - if (err) return done(err) - - channel.publish('namedExchange', 'anyOldRoutingKey', Buffer.from('DSM pathway test')) - }) - }) - - it('Should emit DSM stats to the agent when receiving a message', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = [] - // we should have 2 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) - }) - } - }) - assert.strictEqual(statsPointsReceived.length, 2) - assert.deepStrictEqual(statsPointsReceived[1].EdgeTags, - ['direction:in', `topic:${queue}`, 'type:rabbitmq']) - assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) - }, { timeoutMs: 10000 }).then(done, done) - - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) - channel.consume(ok.queue, () => {}, {}, (err, ok) => { - if (err) done(err) - }) - }) - }) - - it('Should emit DSM stats to the agent when sending another message', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = [] - // we should have 1 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) - }) - } - }) - assert.strictEqual(statsPointsReceived.length, 1) - assert.deepStrictEqual(statsPointsReceived[0].EdgeTags, [ - 'direction:out', - 'has_routing_key:true', - `topic:${queue}`, - 'type:rabbitmq' - ]) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHashWithTopic), true) - }, { timeoutMs: 10000 }).then(done, done) - - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) - }) - }) - - it('Should emit DSM stats to the agent when receiving a message with get', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = [] - // we should have 2 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived = statsPointsReceived.concat(statsBuckets.Stats) - }) - } - }) - assert.strictEqual(statsPointsReceived.length, 2) - assert.deepStrictEqual(statsPointsReceived[1].EdgeTags, - ['direction:in', `topic:${queue}`, 'type:rabbitmq']) - assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) - }, { timeoutMs: 10000 }).then(done, done) - - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.sendToQueue(ok.queue, Buffer.from('DSM pathway test')) - channel.get(ok.queue, {}, (err, ok) => { - if (err) done(err) - }) - }) - }) - - it('regression test: should handle basic.get when queue is empty', done => { - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.get(ok.queue, {}, (err, msg) => { - if (err) return done(err) - assert.strictEqual(msg, false) - done() - }) - }) - }) - - it('Should set pathway hash tag on a span when producing', (done) => { - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.sendToQueue(ok.queue, Buffer.from('dsm test')) - - let produceSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.resource.startsWith('basic.publish')) { - produceSpanMeta = span.meta - } - - assertObjectContains(produceSpanMeta, { - 'pathway.hash': expectedProducerHashWithTopic - }) - }, { timeoutMs: 10000 }).then(done, done) - }) - }) - - it('Should set pathway hash tag on a span when consuming', (done) => { - channel.assertQueue(queue, {}, (err, ok) => { - if (err) return done(err) - - channel.sendToQueue(ok.queue, Buffer.from('dsm test')) - channel.consume(ok.queue, () => {}, {}, (err, ok) => { - if (err) return done(err) - - let consumeSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.resource.startsWith('basic.deliver')) { - consumeSpanMeta = span.meta - } - - assertObjectContains(consumeSpanMeta, { - 'pathway.hash': expectedConsumerHash - }) - }, { timeoutMs: 10000 }).then(done, done) - }) - }) - }) - }) }) describe('with configuration', () => { diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.dsm.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.dsm.spec.js new file mode 100644 index 00000000000..164473cd484 --- /dev/null +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.dsm.spec.js @@ -0,0 +1,242 @@ +'use strict' + +const assert = require('node:assert/strict') + +const { afterEach, beforeEach, describe, it } = require('mocha') +const sinon = require('sinon') + +const { assertObjectContains } = require('../../../integration-tests/helpers') +const { withVersions } = require('../../dd-trace/test/setup/mocha') +const agent = require('../../dd-trace/test/plugins/agent') +const { setup } = require('./spec_helpers') +const helpers = require('./kinesis_helpers') +const id = require('../../dd-trace/src/id') +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') + +describe('Kinesis', function () { + this.timeout(10000) + setup() + + withVersions('aws-sdk', ['aws-sdk', '@aws-sdk/smithy-client'], (version, moduleName) => { + let AWS + let kinesis + let tracer + + const kinesisClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-kinesis' : 'aws-sdk' + + function createResources (streamName, cb) { + AWS = require(`../../../versions/${kinesisClientName}@${version}`).get() + + const params = { + endpoint: 'http://127.0.0.1:4566', + region: 'us-east-1' + } + + if (moduleName === '@aws-sdk/smithy-client') { + const { NodeHttpHandler } = require(`../../../versions/@aws-sdk/node-http-handler@${version}`).get() + + params.requestHandler = new NodeHttpHandler() + } + + kinesis = new AWS.Kinesis(params) + + kinesis.createStream({ + StreamName: streamName, + ShardCount: 1 + }, (err, res) => { + if (err) return cb(err) + + helpers.waitForActiveStream(kinesis, streamName, cb) + }) + } + + describe('DSM Context Propagation', () => { + let expectedProducerHash + let expectedConsumerHash + let nowStub + let streamNameDSM + + beforeEach(() => { + return agent.load('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) + }) + + beforeEach(done => { + tracer = require('../../dd-trace') + tracer.use('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) + + streamNameDSM = `MyStreamDSM-${id()}` + + const producerHash = computePathwayHash( + 'test', + 'tester', + ['direction:out', 'topic:' + streamNameDSM, 'type:kinesis'], + ENTRY_PARENT_HASH + ) + + expectedProducerHash = producerHash.readBigUInt64LE(0).toString() + expectedConsumerHash = computePathwayHash( + 'test', + 'tester', + ['direction:in', 'topic:' + streamNameDSM, 'type:kinesis'], + producerHash + ).readBigUInt64LE(0).toString() + + createResources(streamNameDSM, done) + }) + + afterEach(done => { + kinesis.deleteStream({ + StreamName: streamNameDSM + }, (err, res) => { + if (err) return done(err) + + helpers.waitForDeletedStream(kinesis, streamNameDSM, done) + }) + }) + + afterEach(() => { + try { + nowStub.restore() + } catch { + // pass + } + agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) + }) + + it('injects DSM pathway hash during Kinesis getRecord to the span', done => { + let getRecordSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.name === 'aws.response') { + getRecordSpanMeta = span.meta + } + + assertObjectContains(getRecordSpanMeta, { + 'pathway.hash': expectedConsumerHash + }) + }, { timeoutMs: 10000 }).then(done, done) + + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { + if (err) return done(err) + + helpers.getTestData(kinesis, streamNameDSM, data, () => {}) + }) + }) + + it('injects DSM pathway hash during Kinesis putRecord to the span', done => { + let putRecordSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('putRecord')) { + putRecordSpanMeta = span.meta + } + + assertObjectContains(putRecordSpanMeta, { + 'pathway.hash': expectedProducerHash + }) + }).then(done, done) + + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, () => {}) + }) + + it('emits DSM stats to the agent during Kinesis putRecord', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have only have 1 stats point since we only had 1 put operation + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 1) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) + }, { timeoutMs: 10000 }).then(done, done) + + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, () => {}) + }) + + it('emits DSM stats to the agent during Kinesis getRecord', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have only have 1 stats point since we only had 1 put operation + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }, { timeoutMs: 10000 }) + assert.ok(statsPointsReceived >= 2) + assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) + }, { timeoutMs: 10000 }).then(done, done) + + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { + if (err) return done(err) + + helpers.getTestData(kinesis, streamNameDSM, data, () => {}) + }) + }) + + // eslint-disable-next-line @stylistic/max-len + it('emits DSM stats to the agent during Kinesis getRecord when the putRecord was done without DSM enabled', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have only have 1 stats point since we only had 1 put operation + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }, { timeoutMs: 10000 }) + assert.strictEqual(statsPointsReceived, 1) + assert.strictEqual(agent.dsmStatsExistWithParentHash(agent, '0'), true) + }, { timeoutMs: 10000 }).then(done, done) + + // TODO: Fix this. The third argument is not used. Check all usages of agent.reload. + agent.reload('aws-sdk', { kinesis: { dsmEnabled: false } }, { dsmEnabled: false }) + helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { + if (err) return done(err) + + // TODO: Fix this. The third argument is not used. Check all usages of agent.reload. + agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) + helpers.getTestData(kinesis, streamNameDSM, data, () => {}) + }) + }) + + it('emits DSM stats to the agent during Kinesis putRecords', done => { + // we need to stub Date.now() to ensure a new stats bucket is created for each call + // otherwise, all stats checkpoints will be combined into a single stats points + let now = Date.now() + nowStub = sinon.stub(Date, 'now') + nowStub.callsFake(() => { + now += 1000000 + return now + }) + + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have only have 3 stats points since we only had 3 records published + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 3) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) + }, { timeoutMs: 10000 }).then(done, done) + + helpers.putTestRecords(kinesis, streamNameDSM, (err, data) => { + // Swallow the error as it doesn't matter for this test. + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js index 3baf4c8c6f4..af0249b98ca 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js @@ -3,7 +3,6 @@ const assert = require('node:assert/strict') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') -const sinon = require('sinon') const { assertObjectContains } = require('../../../integration-tests/helpers') const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha') @@ -12,8 +11,6 @@ const { setup } = require('./spec_helpers') const helpers = require('./kinesis_helpers') const { rawExpectedSchema } = require('./kinesis-naming') const id = require('../../dd-trace/src/id') -const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') describe('Kinesis', function () { this.timeout(10000) @@ -22,7 +19,6 @@ describe('Kinesis', function () { withVersions('aws-sdk', ['aws-sdk', '@aws-sdk/smithy-client'], (version, moduleName) => { let AWS let kinesis - let tracer const kinesisClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-kinesis' : 'aws-sdk' @@ -194,193 +190,5 @@ describe('Kinesis', function () { }) }) }) - - describe('DSM Context Propagation', () => { - let expectedProducerHash - let expectedConsumerHash - let nowStub - let streamNameDSM - - beforeEach(() => { - return agent.load('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) - }) - - beforeEach(done => { - tracer = require('../../dd-trace') - tracer.use('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) - - streamNameDSM = `MyStreamDSM-${id()}` - - const producerHash = computePathwayHash( - 'test', - 'tester', - ['direction:out', 'topic:' + streamNameDSM, 'type:kinesis'], - ENTRY_PARENT_HASH - ) - - expectedProducerHash = producerHash.readBigUInt64LE(0).toString() - expectedConsumerHash = computePathwayHash( - 'test', - 'tester', - ['direction:in', 'topic:' + streamNameDSM, 'type:kinesis'], - producerHash - ).readBigUInt64LE(0).toString() - - createResources(streamNameDSM, done) - }) - - afterEach(done => { - kinesis.deleteStream({ - StreamName: streamNameDSM - }, (err, res) => { - if (err) return done(err) - - helpers.waitForDeletedStream(kinesis, streamNameDSM, done) - }) - }) - - afterEach(() => { - try { - nowStub.restore() - } catch { - // pass - } - agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) - }) - - it('injects DSM pathway hash during Kinesis getRecord to the span', done => { - let getRecordSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.name === 'aws.response') { - getRecordSpanMeta = span.meta - } - - assertObjectContains(getRecordSpanMeta, { - 'pathway.hash': expectedConsumerHash - }) - }, { timeoutMs: 10000 }).then(done, done) - - helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { - if (err) return done(err) - - helpers.getTestData(kinesis, streamNameDSM, data, () => {}) - }) - }) - - it('injects DSM pathway hash during Kinesis putRecord to the span', done => { - let putRecordSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.resource.startsWith('putRecord')) { - putRecordSpanMeta = span.meta - } - - assertObjectContains(putRecordSpanMeta, { - 'pathway.hash': expectedProducerHash - }) - }).then(done, done) - - helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, () => {}) - }) - - it('emits DSM stats to the agent during Kinesis putRecord', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have only have 1 stats point since we only had 1 put operation - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 1) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) - }, { timeoutMs: 10000 }).then(done, done) - - helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, () => {}) - }) - - it('emits DSM stats to the agent during Kinesis getRecord', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have only have 1 stats point since we only had 1 put operation - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }, { timeoutMs: 10000 }) - assert.ok(statsPointsReceived >= 2) - assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) - }, { timeoutMs: 10000 }).then(done, done) - - helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { - if (err) return done(err) - - helpers.getTestData(kinesis, streamNameDSM, data, () => {}) - }) - }) - - // eslint-disable-next-line @stylistic/max-len - it('emits DSM stats to the agent during Kinesis getRecord when the putRecord was done without DSM enabled', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have only have 1 stats point since we only had 1 put operation - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }, { timeoutMs: 10000 }) - assert.strictEqual(statsPointsReceived, 1) - assert.strictEqual(agent.dsmStatsExistWithParentHash(agent, '0'), true) - }, { timeoutMs: 10000 }).then(done, done) - - // TODO: Fix this. The third argument is not used. Check all usages of agent.reload. - agent.reload('aws-sdk', { kinesis: { dsmEnabled: false } }, { dsmEnabled: false }) - helpers.putTestRecord(kinesis, streamNameDSM, helpers.dataBuffer, (err, data) => { - if (err) return done(err) - - // TODO: Fix this. The third argument is not used. Check all usages of agent.reload. - agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) - helpers.getTestData(kinesis, streamNameDSM, data, () => {}) - }) - }) - - it('emits DSM stats to the agent during Kinesis putRecords', done => { - // we need to stub Date.now() to ensure a new stats bucket is created for each call - // otherwise, all stats checkpoints will be combined into a single stats points - let now = Date.now() - nowStub = sinon.stub(Date, 'now') - nowStub.callsFake(() => { - now += 1000000 - return now - }) - - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have only have 3 stats points since we only had 3 records published - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 3) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) - }, { timeoutMs: 10000 }).then(done, done) - - helpers.putTestRecords(kinesis, streamNameDSM, (err, data) => { - // Swallow the error as it doesn't matter for this test. - }) - }) - }) }) }) diff --git a/packages/datadog-plugin-aws-sdk/test/sns.dsm.spec.js b/packages/datadog-plugin-aws-sdk/test/sns.dsm.spec.js new file mode 100644 index 00000000000..508f033283b --- /dev/null +++ b/packages/datadog-plugin-aws-sdk/test/sns.dsm.spec.js @@ -0,0 +1,269 @@ +'use strict' + +const assert = require('node:assert/strict') + +const { after, afterEach, before, describe, it } = require('mocha') +const sinon = require('sinon') +const semver = require('semver') + +const { assertObjectContains } = require('../../../integration-tests/helpers') +const { withVersions } = require('../../dd-trace/test/setup/mocha') +const agent = require('../../dd-trace/test/plugins/agent') +const { setup } = require('./spec_helpers') + +describe('Sns', function () { + setup() + this.timeout(20000) + + withVersions('aws-sdk', ['aws-sdk', '@aws-sdk/smithy-client'], (version, moduleName) => { + let sns + let sqs + let subParams + let receiveParams + let TopicArn + let QueueUrl + let tracer + + const snsClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-sns' : 'aws-sdk' + const sqsClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-sqs' : 'aws-sdk' + + function createResources (queueName, topicName, cb) { + const { SNS } = require(`../../../versions/${snsClientName}@${version}`).get() + const { SQS } = require(`../../../versions/${sqsClientName}@${version}`).get() + + sns = new SNS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) + sqs = new SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) + + sns.createTopic({ Name: topicName }, (err, data) => { + if (err) return cb(err) + + TopicArn = data.TopicArn + + sqs.createQueue({ QueueName: queueName }, (err, data) => { + if (err) return cb(err) + + QueueUrl = data.QueueUrl + + sqs.getQueueAttributes({ + QueueUrl, + AttributeNames: ['QueueArn'] + }, (err, data) => { + if (err) return cb(err) + + const QueueArn = data.Attributes.QueueArn + + subParams = { + Protocol: 'sqs', + TopicArn, + Endpoint: QueueArn + } + + receiveParams = { + QueueUrl, + MessageAttributeNames: ['.*'], + WaitTimeSeconds: 1 + } + + cb() + }) + }) + }) + } + + describe('Data Streams Monitoring', () => { + const expectedProducerHash = '15386798273908484982' + const expectedConsumerHash = '15162998336469814920' + let nowStub + + before(() => { + return agent.load('aws-sdk', { sns: { dsmEnabled: true }, sqs: { dsmEnabled: true } }, { dsmEnabled: true }) + }) + + before(done => { + process.env.DD_DATA_STREAMS_ENABLED = 'true' + tracer = require('../../dd-trace') + tracer.use('aws-sdk', { sns: { dsmEnabled: true }, sqs: { dsmEnabled: true } }) + + createResources('TestQueueDSM', 'TestTopicDSM', done) + }) + + after(done => { + sns.deleteTopic({ TopicArn }, done) + }) + + after(done => { + sqs.deleteQueue({ QueueUrl }, done) + }) + + after(() => { + return agent.close({ ritmReset: false, wipe: true }) + }) + + afterEach(() => { + try { + nowStub.restore() + } catch { + // pass + } + // TODO: Fix this. The third argument is not used. + agent.reload('aws-sdk', { sns: { dsmEnabled: true, batchPropagationEnabled: true } }, { dsmEnabled: true }) + }) + + it('injects DSM pathway hash to SNS publish span', done => { + sns.subscribe(subParams, (err, data) => { + if (err) return done(err) + + sns.publish( + { TopicArn, Message: 'message DSM' }, + (err) => { + if (err) return done(err) + + let publishSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('publish')) { + publishSpanMeta = span.meta + } + + assertObjectContains(publishSpanMeta, { + 'pathway.hash': expectedProducerHash + }) + }).then(done, done) + }) + }) + }) + + it('injects DSM pathway hash to SQS receive span from SNS topic', done => { + sns.subscribe(subParams, (err, data) => { + if (err) return done(err) + + sns.publish( + { TopicArn, Message: 'message DSM' }, + (err) => { + if (err) return done(err) + }) + + sqs.receiveMessage( + receiveParams, + (err, res) => { + if (err) return done(err) + + let consumeSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.name === 'aws.response') { + consumeSpanMeta = span.meta + } + + assertObjectContains(consumeSpanMeta, { + 'pathway.hash': expectedConsumerHash + }) + }).then(done, done) + }) + }) + }) + + it('outputs DSM stats to the agent when publishing a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 1) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) + }).then(done, done) + + sns.subscribe(subParams, () => { + sns.publish({ TopicArn, Message: 'message DSM' }, () => {}) + }) + }) + + it('outputs DSM stats to the agent when consuming a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 2) + assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) + }).then(done, done) + + sns.subscribe(subParams, () => { + sns.publish({ TopicArn, Message: 'message DSM' }, () => { + sqs.receiveMessage(receiveParams, () => {}) + }) + }) + }) + + it('outputs DSM stats to the agent when publishing batch messages', function (done) { + // publishBatch was released with version 2.1031.0 for the aws-sdk + // publishBatch does not work with smithy-client 3.0.0, unable to find compatible version it + // was released for, but works on 3.374.0 + if ( + (moduleName === '@aws-sdk/smithy-client' && semver.intersects(version, '>=3.374.0')) || + (moduleName === 'aws-sdk' && semver.intersects(version, '>=2.1031.0')) + ) { + // we need to stub Date.now() to ensure a new stats bucket is created for each call + // otherwise, all stats checkpoints will be combined into a single stats points + let now = Date.now() + nowStub = sinon.stub(Date, 'now') + nowStub.callsFake(() => { + now += 1000000 + return now + }) + + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 3 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 3) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) + }, { timeoutMs: 2000 }).then(done, done) + + sns.subscribe(subParams, () => { + sns.publishBatch( + { + TopicArn, + PublishBatchRequestEntries: [ + { + Id: '1', + Message: 'message DSM 1' + }, + { + Id: '2', + Message: 'message DSM 2' + }, + { + Id: '3', + Message: 'message DSM 3' + } + ] + }, () => { + nowStub.restore() + }) + }) + } else { + this.skip() + } + }) + }) + }) +}) diff --git a/packages/datadog-plugin-aws-sdk/test/sns.spec.js b/packages/datadog-plugin-aws-sdk/test/sns.spec.js index 544049a974a..8f8e4497c0b 100644 --- a/packages/datadog-plugin-aws-sdk/test/sns.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sns.spec.js @@ -2,8 +2,7 @@ const assert = require('node:assert/strict') -const { after, afterEach, before, describe, it } = require('mocha') -const sinon = require('sinon') +const { after, before, describe, it } = require('mocha') const semver = require('semver') const { assertObjectContains } = require('../../../integration-tests/helpers') @@ -330,7 +329,6 @@ describe('Sns', function () { }) before(done => { - process.env.DD_DATA_STREAMS_ENABLED = 'true' tracer = require('../../dd-trace') tracer.use('aws-sdk', { sns: { dsmEnabled: false, batchPropagationEnabled: true } }) @@ -492,201 +490,5 @@ describe('Sns', function () { sns.publish({ TopicArn, Message: 'message 1' }, e => e && done(e)) }) }) - - describe('Data Streams Monitoring', () => { - const expectedProducerHash = '15386798273908484982' - const expectedConsumerHash = '15162998336469814920' - let nowStub - - before(() => { - return agent.load('aws-sdk', { sns: { dsmEnabled: true }, sqs: { dsmEnabled: true } }, { dsmEnabled: true }) - }) - - before(done => { - process.env.DD_DATA_STREAMS_ENABLED = 'true' - tracer = require('../../dd-trace') - tracer.use('aws-sdk', { sns: { dsmEnabled: true }, sqs: { dsmEnabled: true } }) - - createResources('TestQueueDSM', 'TestTopicDSM', done) - }) - - after(done => { - sns.deleteTopic({ TopicArn }, done) - }) - - after(done => { - sqs.deleteQueue({ QueueUrl }, done) - }) - - after(() => { - return agent.close({ ritmReset: false, wipe: true }) - }) - - afterEach(() => { - try { - nowStub.restore() - } catch { - // pass - } - // TODO: Fix this. The third argument is not used. - agent.reload('aws-sdk', { sns: { dsmEnabled: true, batchPropagationEnabled: true } }, { dsmEnabled: true }) - }) - - it('injects DSM pathway hash to SNS publish span', done => { - sns.subscribe(subParams, (err, data) => { - if (err) return done(err) - - sns.publish( - { TopicArn, Message: 'message DSM' }, - (err) => { - if (err) return done(err) - - let publishSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.resource.startsWith('publish')) { - publishSpanMeta = span.meta - } - - assertObjectContains(publishSpanMeta, { - 'pathway.hash': expectedProducerHash - }) - }).then(done, done) - }) - }) - }) - - it('injects DSM pathway hash to SQS receive span from SNS topic', done => { - sns.subscribe(subParams, (err, data) => { - if (err) return done(err) - - sns.publish( - { TopicArn, Message: 'message DSM' }, - (err) => { - if (err) return done(err) - }) - - sqs.receiveMessage( - receiveParams, - (err, res) => { - if (err) return done(err) - - let consumeSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.name === 'aws.response') { - consumeSpanMeta = span.meta - } - - assertObjectContains(consumeSpanMeta, { - 'pathway.hash': expectedConsumerHash - }) - }).then(done, done) - }) - }) - }) - - it('outputs DSM stats to the agent when publishing a message', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 1 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 1) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) - }).then(done, done) - - sns.subscribe(subParams, () => { - sns.publish({ TopicArn, Message: 'message DSM' }, () => {}) - }) - }) - - it('outputs DSM stats to the agent when consuming a message', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 2 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 2) - assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) - }).then(done, done) - - sns.subscribe(subParams, () => { - sns.publish({ TopicArn, Message: 'message DSM' }, () => { - sqs.receiveMessage(receiveParams, () => {}) - }) - }) - }) - - it('outputs DSM stats to the agent when publishing batch messages', function (done) { - // publishBatch was released with version 2.1031.0 for the aws-sdk - // publishBatch does not work with smithy-client 3.0.0, unable to find compatible version it - // was released for, but works on 3.374.0 - if ( - (moduleName === '@aws-sdk/smithy-client' && semver.intersects(version, '>=3.374.0')) || - (moduleName === 'aws-sdk' && semver.intersects(version, '>=2.1031.0')) - ) { - // we need to stub Date.now() to ensure a new stats bucket is created for each call - // otherwise, all stats checkpoints will be combined into a single stats points - let now = Date.now() - nowStub = sinon.stub(Date, 'now') - nowStub.callsFake(() => { - now += 1000000 - return now - }) - - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 3 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 3) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) - }, { timeoutMs: 2000 }).then(done, done) - - sns.subscribe(subParams, () => { - sns.publishBatch( - { - TopicArn, - PublishBatchRequestEntries: [ - { - Id: '1', - Message: 'message DSM 1' - }, - { - Id: '2', - Message: 'message DSM 2' - }, - { - Id: '3', - Message: 'message DSM 3' - } - ] - }, () => { - nowStub.restore() - }) - }) - } else { - this.skip() - } - }) - }) }) }) diff --git a/packages/datadog-plugin-aws-sdk/test/sqs.dsm.spec.js b/packages/datadog-plugin-aws-sdk/test/sqs.dsm.spec.js new file mode 100644 index 00000000000..4609fdaf26e --- /dev/null +++ b/packages/datadog-plugin-aws-sdk/test/sqs.dsm.spec.js @@ -0,0 +1,307 @@ +'use strict' + +const assert = require('node:assert/strict') +const { randomUUID } = require('node:crypto') + +const { after, afterEach, before, beforeEach, describe, it } = require('mocha') +const semver = require('semver') +const sinon = require('sinon') + +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') +const agent = require('../../dd-trace/test/plugins/agent') +const { withVersions } = require('../../dd-trace/test/setup/mocha') +const { setup } = require('./spec_helpers') +const { assertObjectContains } = require('../../../integration-tests/helpers') + +const getQueueParams = (queueName) => { + return { + QueueName: queueName, + Attributes: { + MessageRetentionPeriod: '86400' + } + } +} + +describe('Plugin', () => { + describe('aws-sdk (sqs)', function () { + this.timeout(10000) + setup() + + withVersions('aws-sdk', ['aws-sdk', '@aws-sdk/smithy-client'], (version, moduleName) => { + let AWS + let sqs + let queueNameDSM + let queueNameDSMConsumerOnly + let queueOptionsDsm + let queueOptionsDsmConsumerOnly + let QueueUrlDsm + let QueueUrlDsmConsumerOnly + let tracer + + const sqsClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-sqs' : 'aws-sdk' + + beforeEach(() => { + const id = randomUUID() + queueNameDSM = `SQS_QUEUE_NAME_DSM-${id}` + queueNameDSMConsumerOnly = `SQS_QUEUE_NAME_DSM_CONSUMER_ONLY-${id}` + queueOptionsDsm = getQueueParams(queueNameDSM) + queueOptionsDsmConsumerOnly = getQueueParams(queueNameDSMConsumerOnly) + QueueUrlDsm = `http://127.0.0.1:4566/00000000000000000000/${queueNameDSM}` + QueueUrlDsmConsumerOnly = `http://127.0.0.1:4566/00000000000000000000/${queueNameDSMConsumerOnly}` + }) + + describe('data stream monitoring', () => { + let expectedProducerHash + let expectedConsumerHash + let nowStub + + before(() => { + process.env.DD_DATA_STREAMS_ENABLED = 'true' + tracer = require('../../dd-trace') + tracer.use('aws-sdk', { sqs: { dsmEnabled: true } }) + }) + + before(async () => { + return agent.load('aws-sdk', { + sqs: { + dsmEnabled: true + } + }, + { dsmEnabled: true }) + }) + + before(() => { + AWS = require(`../../../versions/${sqsClientName}@${version}`).get() + sqs = new AWS.SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) + }) + + beforeEach(() => { + const producerHash = computePathwayHash( + 'test', + 'tester', + ['direction:out', 'topic:' + queueNameDSM, 'type:sqs'], + ENTRY_PARENT_HASH + ) + + expectedProducerHash = producerHash.readBigUInt64LE(0).toString() + expectedConsumerHash = computePathwayHash( + 'test', + 'tester', + ['direction:in', 'topic:' + queueNameDSM, 'type:sqs'], + producerHash + ).readBigUInt64LE(0).toString() + }) + + beforeEach(done => { + sqs.createQueue(queueOptionsDsm, (err, res) => err ? done(err) : done()) + }) + + beforeEach(done => { + sqs.createQueue(queueOptionsDsmConsumerOnly, (err, res) => err ? done(err) : done()) + }) + + afterEach(done => { + sqs.deleteQueue({ QueueUrl: QueueUrlDsm }, done) + }) + + afterEach(done => { + sqs.deleteQueue({ QueueUrl: QueueUrlDsmConsumerOnly }, done) + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + afterEach(() => { + try { + nowStub.restore() + } catch { + // pass + } + agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) + }) + + it('Should set pathway hash tag on a span when producing', (done) => { + sqs.sendMessage({ + MessageBody: 'test DSM', + QueueUrl: QueueUrlDsm + }, (err) => { + if (err) return done(err) + + let produceSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.resource.startsWith('sendMessage')) { + produceSpanMeta = span.meta + } + + assertObjectContains(produceSpanMeta, { + 'pathway.hash': expectedProducerHash + }) + }).then(done, done) + }) + }) + + it('Should set pathway hash tag on a span when consuming', (done) => { + sqs.sendMessage({ + MessageBody: 'test DSM', + QueueUrl: QueueUrlDsm + }, (err) => { + if (err) return done(err) + + sqs.receiveMessage({ + QueueUrl: QueueUrlDsm, + MessageAttributeNames: ['.*'] + }, (err) => { + if (err) return done(err) + + let consumeSpanMeta = {} + agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.name === 'aws.response') { + consumeSpanMeta = span.meta + } + + assertObjectContains(consumeSpanMeta, { + 'pathway.hash': expectedConsumerHash + }) + }).then(done, done) + }) + }) + }) + + if (sqsClientName === 'aws-sdk' && semver.intersects(version, '>=2.3')) { + it('Should set pathway hash tag on a span when consuming and promise() was used over a callback', + async () => { + let consumeSpanMeta = {} + const tracePromise = agent.assertSomeTraces(traces => { + const span = traces[0][0] + + if (span.name === 'aws.request' && span.meta['aws.operation'] === 'receiveMessage') { + consumeSpanMeta = span.meta + } + + assertObjectContains(consumeSpanMeta, { + 'pathway.hash': expectedConsumerHash + }) + }) + + await sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }).promise() + await sqs.receiveMessage({ QueueUrl: QueueUrlDsm }).promise() + + return tracePromise + }) + } + + it('Should emit DSM stats to the agent when sending a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 1) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) + }).then(done, done) + + sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }, () => {}) + }) + + it('Should emit DSM stats to the agent when receiving a message', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 2) + assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) + }, { timeoutMs: 5000 }).then(done, done) + + sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }, () => { + sqs.receiveMessage({ QueueUrl: QueueUrlDsm, MessageAttributeNames: ['.*'] }, () => {}) + }) + }) + + it('Should emit DSM stats when receiving a message when the producer was not instrumented', done => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.strictEqual(statsPointsReceived, 1) + assert.strictEqual(agent.dsmStatsExistWithParentHash(agent, '0'), true) + }).then(done, done) + + agent.reload('aws-sdk', { sqs: { dsmEnabled: false } }, { dsmEnabled: false }) + sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsmConsumerOnly }, () => { + agent.reload('aws-sdk', { sqs: { dsmEnabled: true } }, { dsmEnabled: true }) + sqs.receiveMessage({ QueueUrl: QueueUrlDsmConsumerOnly, MessageAttributeNames: ['.*'] }, () => {}) + }) + }) + + it('Should emit DSM stats to the agent when sending batch messages', done => { + // we need to stub Date.now() to ensure a new stats bucket is created for each call + // otherwise, all stats checkpoints will be combined into a single stats points + let now = Date.now() + nowStub = sinon.stub(Date, 'now') + nowStub.callsFake(() => { + now += 1000000 + return now + }) + + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 3 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 3) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) + }).then(done, done) + + sqs.sendMessageBatch( + { + Entries: [ + { + Id: '1', + MessageBody: 'test DSM 1' + }, + { + Id: '2', + MessageBody: 'test DSM 2' + }, + { + Id: '3', + MessageBody: 'test DSM 3' + } + ], + QueueUrl: QueueUrlDsm + }, () => { + nowStub.restore() + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js index 1b8b6950604..9edf1958279 100644 --- a/packages/datadog-plugin-aws-sdk/test/sqs.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sqs.spec.js @@ -3,11 +3,7 @@ const assert = require('node:assert/strict') const { randomUUID } = require('node:crypto') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') -const semver = require('semver') -const sinon = require('sinon') -const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') const agent = require('../../dd-trace/test/plugins/agent') const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const { setup } = require('./spec_helpers') @@ -32,14 +28,8 @@ describe('Plugin', () => { let AWS let sqs let queueName - let queueNameDSM - let queueNameDSMConsumerOnly let queueOptions - let queueOptionsDsm - let queueOptionsDsmConsumerOnly let QueueUrl - let QueueUrlDsm - let QueueUrlDsmConsumerOnly let tracer const sqsClientName = moduleName === '@aws-sdk/smithy-client' ? '@aws-sdk/client-sqs' : 'aws-sdk' @@ -48,21 +38,14 @@ describe('Plugin', () => { const id = randomUUID() queueName = `SQS_QUEUE_NAME-${id}` - queueNameDSM = `SQS_QUEUE_NAME_DSM-${id}` - queueNameDSMConsumerOnly = `SQS_QUEUE_NAME_DSM_CONSUMER_ONLY-${id}` queueOptions = getQueueParams(queueName) - queueOptionsDsm = getQueueParams(queueNameDSM) - queueOptionsDsmConsumerOnly = getQueueParams(queueNameDSMConsumerOnly) QueueUrl = `http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME-${id}` - QueueUrlDsm = `http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME_DSM-${id}` - QueueUrlDsmConsumerOnly = `http://127.0.0.1:4566/00000000000000000000/SQS_QUEUE_NAME_DSM_CONSUMER_ONLY-${id}` }) describe('without configuration', () => { before(() => { - process.env.DD_DATA_STREAMS_ENABLED = 'true' tracer = require('../../dd-trace') tracer.use('aws-sdk', { sqs: { batchPropagationEnabled: true } }) @@ -414,258 +397,6 @@ describe('Plugin', () => { }, 250) }) }) - - describe('data stream monitoring', () => { - let expectedProducerHash - let expectedConsumerHash - let nowStub - - before(() => { - process.env.DD_DATA_STREAMS_ENABLED = 'true' - tracer = require('../../dd-trace') - tracer.use('aws-sdk', { sqs: { dsmEnabled: true } }) - }) - - before(async () => { - return agent.load('aws-sdk', { - sqs: { - dsmEnabled: true - } - }, - { dsmEnabled: true }) - }) - - before(() => { - AWS = require(`../../../versions/${sqsClientName}@${version}`).get() - sqs = new AWS.SQS({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) - }) - - beforeEach(() => { - const producerHash = computePathwayHash( - 'test', - 'tester', - ['direction:out', 'topic:' + queueNameDSM, 'type:sqs'], - ENTRY_PARENT_HASH - ) - - expectedProducerHash = producerHash.readBigUInt64LE(0).toString() - expectedConsumerHash = computePathwayHash( - 'test', - 'tester', - ['direction:in', 'topic:' + queueNameDSM, 'type:sqs'], - producerHash - ).readBigUInt64LE(0).toString() - }) - - beforeEach(done => { - sqs.createQueue(queueOptionsDsm, (err, res) => err ? done(err) : done()) - }) - - beforeEach(done => { - sqs.createQueue(queueOptionsDsmConsumerOnly, (err, res) => err ? done(err) : done()) - }) - - afterEach(done => { - sqs.deleteQueue({ QueueUrl: QueueUrlDsm }, done) - }) - - afterEach(done => { - sqs.deleteQueue({ QueueUrl: QueueUrlDsmConsumerOnly }, done) - }) - - after(() => { - return agent.close({ ritmReset: false }) - }) - - afterEach(() => { - try { - nowStub.restore() - } catch { - // pass - } - agent.reload('aws-sdk', { kinesis: { dsmEnabled: true } }, { dsmEnabled: true }) - }) - - it('Should set pathway hash tag on a span when producing', (done) => { - sqs.sendMessage({ - MessageBody: 'test DSM', - QueueUrl: QueueUrlDsm - }, (err) => { - if (err) return done(err) - - let produceSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.resource.startsWith('sendMessage')) { - produceSpanMeta = span.meta - } - - assertObjectContains(produceSpanMeta, { - 'pathway.hash': expectedProducerHash - }) - }).then(done, done) - }) - }) - - it('Should set pathway hash tag on a span when consuming', (done) => { - sqs.sendMessage({ - MessageBody: 'test DSM', - QueueUrl: QueueUrlDsm - }, (err) => { - if (err) return done(err) - - sqs.receiveMessage({ - QueueUrl: QueueUrlDsm, - MessageAttributeNames: ['.*'] - }, (err) => { - if (err) return done(err) - - let consumeSpanMeta = {} - agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.name === 'aws.response') { - consumeSpanMeta = span.meta - } - - assertObjectContains(consumeSpanMeta, { - 'pathway.hash': expectedConsumerHash - }) - }).then(done, done) - }) - }) - }) - - if (sqsClientName === 'aws-sdk' && semver.intersects(version, '>=2.3')) { - it('Should set pathway hash tag on a span when consuming and promise() was used over a callback', - async () => { - let consumeSpanMeta = {} - const tracePromise = agent.assertSomeTraces(traces => { - const span = traces[0][0] - - if (span.name === 'aws.request' && span.meta['aws.operation'] === 'receiveMessage') { - consumeSpanMeta = span.meta - } - - assertObjectContains(consumeSpanMeta, { - 'pathway.hash': expectedConsumerHash - }) - }) - - await sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }).promise() - await sqs.receiveMessage({ QueueUrl: QueueUrlDsm }).promise() - - return tracePromise - }) - } - - it('Should emit DSM stats to the agent when sending a message', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 1 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 1) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) - }).then(done, done) - - sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }, () => {}) - }) - - it('Should emit DSM stats to the agent when receiving a message', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 2 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 2) - assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash), true) - }, { timeoutMs: 5000 }).then(done, done) - - sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsm }, () => { - sqs.receiveMessage({ QueueUrl: QueueUrlDsm, MessageAttributeNames: ['.*'] }, () => {}) - }) - }) - - it('Should emit DSM stats when receiving a message when the producer was not instrumented', done => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 2 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.strictEqual(statsPointsReceived, 1) - assert.strictEqual(agent.dsmStatsExistWithParentHash(agent, '0'), true) - }).then(done, done) - - agent.reload('aws-sdk', { sqs: { dsmEnabled: false } }, { dsmEnabled: false }) - sqs.sendMessage({ MessageBody: 'test DSM', QueueUrl: QueueUrlDsmConsumerOnly }, () => { - agent.reload('aws-sdk', { sqs: { dsmEnabled: true } }, { dsmEnabled: true }) - sqs.receiveMessage({ QueueUrl: QueueUrlDsmConsumerOnly, MessageAttributeNames: ['.*'] }, () => {}) - }) - }) - - it('Should emit DSM stats to the agent when sending batch messages', done => { - // we need to stub Date.now() to ensure a new stats bucket is created for each call - // otherwise, all stats checkpoints will be combined into a single stats points - let now = Date.now() - nowStub = sinon.stub(Date, 'now') - nowStub.callsFake(() => { - now += 1000000 - return now - }) - - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 3 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 3) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash), true) - }).then(done, done) - - sqs.sendMessageBatch( - { - Entries: [ - { - Id: '1', - MessageBody: 'test DSM 1' - }, - { - Id: '2', - MessageBody: 'test DSM 2' - }, - { - Id: '3', - MessageBody: 'test DSM 3' - } - ], - QueueUrl: QueueUrlDsm - }, () => { - nowStub.restore() - }) - }) - }) }) }) }) diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/dsm.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/dsm.spec.js new file mode 100644 index 00000000000..c4f8693adad --- /dev/null +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/dsm.spec.js @@ -0,0 +1,324 @@ +'use strict' + +const assert = require('node:assert/strict') +const { describe, it, beforeEach, afterEach } = require('mocha') +const sinon = require('sinon') + +const { randomUUID } = require('node:crypto') + +const agent = require('../../dd-trace/test/plugins/agent') +const { withVersions } = require('../../dd-trace/test/setup/mocha') + +const DataStreamsContext = require('../../dd-trace/src/datastreams/context') +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') + +const getDsmPathwayHash = (testTopic, isProducer, parentHash) => { + let edgeTags + if (isProducer) { + edgeTags = ['direction:out', 'topic:' + testTopic, 'type:kafka'] + } else { + edgeTags = ['direction:in', 'group:test-group-confluent', 'topic:' + testTopic, 'type:kafka'] + } + + edgeTags.sort() + return computePathwayHash('test', 'tester', edgeTags, parentHash) +} + +describe('Plugin', () => { + const module = '@confluentinc/kafka-javascript' + const groupId = 'test-group-confluent' + + describe('confluentinc-kafka-javascript', function () { + this.timeout(30000) + + afterEach(() => { + return agent.close({ ritmReset: false }) + }) + + withVersions('confluentinc-kafka-javascript', module, (version) => { + let kafka + let admin + let tracer + let Kafka + let ConfluentKafka + let messages + let nativeApi + let testTopic + + describe('data stream monitoring', () => { + let consumer + let expectedProducerHash + let expectedConsumerHash + + beforeEach(async () => { + messages = [{ key: 'key1', value: 'test2' }] + + process.env.DD_DATA_STREAMS_ENABLED = 'true' + tracer = require('../../dd-trace') + await agent.load('confluentinc-kafka-javascript') + const lib = require(`../../../versions/${module}@${version}`).get() + + // Store the module for later use + nativeApi = lib + + // Setup for the KafkaJS wrapper tests + ConfluentKafka = lib.KafkaJS + Kafka = ConfluentKafka.Kafka + kafka = new Kafka({ + kafkaJS: { + clientId: `kafkajs-test-${version}`, + brokers: ['127.0.0.1:9092'], + logLevel: ConfluentKafka.logLevel.WARN + } + }) + testTopic = `test-topic-${randomUUID()}` + admin = kafka.admin() + await admin.connect() + await admin.createTopics({ + topics: [{ + topic: testTopic, + numPartitions: 1, + replicationFactor: 1 + }] + }) + await admin.disconnect() + + consumer = kafka.consumer({ + kafkaJS: { groupId, fromBeginning: true } + }) + await consumer.connect() + await consumer.subscribe({ topic: testTopic }) + }) + + beforeEach(() => { + expectedProducerHash = getDsmPathwayHash(testTopic, true, ENTRY_PARENT_HASH) + expectedConsumerHash = getDsmPathwayHash(testTopic, false, expectedProducerHash) + }) + + afterEach(async () => { + await consumer.disconnect() + }) + + describe('checkpoints', () => { + let setDataStreamsContextSpy + + beforeEach(() => { + setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') + }) + + afterEach(async () => { + setDataStreamsContextSpy.restore() + }) + + it('Should set a checkpoint on produce', async () => { + const messages = [{ key: 'consumerDSM1', value: 'test2' }] + await sendMessages(kafka, testTopic, messages) + assert.strictEqual(setDataStreamsContextSpy.args[0][0].hash, expectedProducerHash) + }) + + it('Should set a checkpoint on consume (eachMessage)', async () => { + const runArgs = [] + let consumerReceiveMessagePromise + await consumer.run({ + eachMessage: async () => { + runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) + consumerReceiveMessagePromise = Promise.resolve() + } + }) + await sendMessages(kafka, testTopic, messages).then( + async () => await consumerReceiveMessagePromise + ) + + for (const runArg of runArgs) { + assert.strictEqual(runArg.hash, expectedConsumerHash) + } + }) + + it('Should set a checkpoint on consume (eachBatch)', async () => { + const runArgs = [] + let consumerReceiveMessagePromise + await consumer.run({ + eachBatch: async () => { + runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) + consumerReceiveMessagePromise = Promise.resolve() + } + }) + await sendMessages(kafka, testTopic, messages).then( + async () => await consumerReceiveMessagePromise + ) + for (const runArg of runArgs) { + assert.strictEqual(runArg.hash, expectedConsumerHash) + } + }) + + it('Should set a message payload size when producing a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + recordCheckpointSpy.restore() + }) + + it('Should set a message payload size when consuming a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + let consumerReceiveMessagePromise + await consumer.run({ + eachMessage: async () => { + assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + recordCheckpointSpy.restore() + consumerReceiveMessagePromise = Promise.resolve() + } + }) + await sendMessages(kafka, testTopic, messages).then( + async () => await consumerReceiveMessagePromise + ) + }) + }) + + describe('backlogs', () => { + let setOffsetSpy + + beforeEach(() => { + setOffsetSpy = sinon.spy(tracer._tracer._dataStreamsProcessor, 'setOffset') + }) + + afterEach(() => { + setOffsetSpy.restore() + }) + + it('Should add backlog on consumer explicit commit', async () => { + // Send a message, consume it, and record the last consumed offset + let commitMeta + + let messageProcessedResolve + const messageProcessedPromise = new Promise(resolve => { + messageProcessedResolve = resolve + }) + + const consumerRunPromise = consumer.run({ + eachMessage: async payload => { + const { topic, partition, message } = payload + commitMeta = { + topic, + partition, + offset: Number(message.offset) + } + messageProcessedResolve() + } + }) + + await consumerRunPromise + + // wait for the message to be processed before continuing + await sendMessages(kafka, testTopic, messages) + await messageProcessedPromise + await consumer.disconnect() + + for (const call of setOffsetSpy.getCalls()) { + assert.notStrictEqual(call.args[0]?.type, 'kafka_commit') + } + + const newConsumer = kafka.consumer({ + kafkaJS: { groupId, fromBeginning: true, autoCommit: false } + }) + await newConsumer.connect() + await sendMessages(kafka, testTopic, [{ key: 'key1', value: 'test2' }]) + await newConsumer.run({ + eachMessage: async () => { + await newConsumer.disconnect() + } + }) + setOffsetSpy.resetHistory() + await newConsumer.commitOffsets() + + // Check our work + const runArg = setOffsetSpy.lastCall.args[0] + assert.strictEqual(runArg?.offset, commitMeta.offset) + assert.strictEqual(runArg?.partition, commitMeta.partition) + assert.strictEqual(runArg?.topic, commitMeta.topic) + assert.strictEqual(runArg?.type, 'kafka_commit') + assert.strictEqual(runArg?.consumer_group, groupId) + }) + + it('Should add backlog on producer response', async () => { + await sendMessages(kafka, testTopic, messages) + sinon.assert.calledOnce(setOffsetSpy) + const { topic } = setOffsetSpy.lastCall.args[0] + assert.strictEqual(topic, testTopic) + }) + }) + + describe('when using a kafka broker version that does not support message headers', () => { + class KafkaJSError extends Error { + constructor (message) { + super(message) + this.name = 'KafkaJSError' + this.type = 'ERR_UNKNOWN' + } + } + let error + let producer + let produceStub + + beforeEach(async () => { + // simulate a kafka error for the broker version not supporting message headers + error = new KafkaJSError() + error.message = 'Simulated KafkaJSError ERR_UNKNOWN from Producer.produce stub' + producer = kafka.producer() + await producer.connect() + + // Spy on the produce method from the native library before it gets wrapped + produceStub = sinon.stub(nativeApi.Producer.prototype, 'produce') + .callsFake((topic, partition, message, key) => { + throw error + }) + }) + + afterEach(async () => { + produceStub.restore() + await producer.disconnect() + }) + + it('should hit an error for the first send and not inject headers in later sends', async () => { + const testMessages = [{ key: 'key1', value: 'test1' }] + const testMessages2 = [{ key: 'key2', value: 'test2' }] + + try { + await producer.send({ topic: testTopic, messages: testMessages }) + assert.fail('First producer.send() should have thrown an error') + } catch (e) { + assert.strictEqual(e, error) + } + // Verify headers were injected in the first attempt + assert.ok(Object.hasOwn(testMessages[0].headers[0], 'x-datadog-trace-id')) + + // restore the stub to allow the next send to succeed + produceStub.restore() + + const result = await producer.send({ topic: testTopic, messages: testMessages2 }) + assert.strictEqual(testMessages2[0].headers, null) + assert.notStrictEqual(result, undefined) + }) + }) + }) + }) + }) +}) + +async function sendMessages (kafka, topic, messages) { + const producer = kafka.producer() + await producer.connect() + await producer.send({ + topic, + messages + }) + await producer.disconnect() +} diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js index 4ddf4d34638..b7bbaaf23e5 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js @@ -2,7 +2,6 @@ const assert = require('node:assert/strict') const { describe, it, beforeEach, afterEach } = require('mocha') -const sinon = require('sinon') const { randomUUID } = require('node:crypto') @@ -11,24 +10,8 @@ const { expectSomeSpan, withDefaults } = require('../../dd-trace/test/plugins/he const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') const { expectedSchema } = require('./naming') const { withVersions } = require('../../dd-trace/test/setup/mocha') - -const DataStreamsContext = require('../../dd-trace/src/datastreams/context') -const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') const { assertObjectContains } = require('../../../integration-tests/helpers') -const getDsmPathwayHash = (testTopic, isProducer, parentHash) => { - let edgeTags - if (isProducer) { - edgeTags = ['direction:out', 'topic:' + testTopic, 'type:kafka'] - } else { - edgeTags = ['direction:in', 'group:test-group-confluent', 'topic:' + testTopic, 'type:kafka'] - } - - edgeTags.sort() - return computePathwayHash('test', 'tester', edgeTags, parentHash) -} - describe('Plugin', () => { const module = '@confluentinc/kafka-javascript' const groupId = 'test-group-confluent' @@ -504,241 +487,6 @@ describe('Plugin', () => { // }) }) }) - - describe('data stream monitoring', () => { - let consumer - let expectedProducerHash - let expectedConsumerHash - - beforeEach(async () => { - tracer.init() - tracer.use('confluentinc-kafka-javascript', { dsmEnabled: true }) - messages = [{ key: 'key1', value: 'test2' }] - consumer = kafka.consumer({ - kafkaJS: { groupId, fromBeginning: true } - }) - await consumer.connect() - await consumer.subscribe({ topic: testTopic }) - }) - - beforeEach(() => { - expectedProducerHash = getDsmPathwayHash(testTopic, true, ENTRY_PARENT_HASH) - expectedConsumerHash = getDsmPathwayHash(testTopic, false, expectedProducerHash) - }) - - afterEach(async () => { - await consumer.disconnect() - }) - - describe('checkpoints', () => { - let setDataStreamsContextSpy - - beforeEach(() => { - setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') - }) - - afterEach(async () => { - setDataStreamsContextSpy.restore() - }) - - it('Should set a checkpoint on produce', async () => { - const messages = [{ key: 'consumerDSM1', value: 'test2' }] - await sendMessages(kafka, testTopic, messages) - assert.strictEqual(setDataStreamsContextSpy.args[0][0].hash, expectedProducerHash) - }) - - it('Should set a checkpoint on consume (eachMessage)', async () => { - const runArgs = [] - let consumerReceiveMessagePromise - await consumer.run({ - eachMessage: async () => { - runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) - consumerReceiveMessagePromise = Promise.resolve() - } - }) - await sendMessages(kafka, testTopic, messages).then( - async () => await consumerReceiveMessagePromise - ) - - for (const runArg of runArgs) { - assert.strictEqual(runArg.hash, expectedConsumerHash) - } - }) - - it('Should set a checkpoint on consume (eachBatch)', async () => { - const runArgs = [] - let consumerReceiveMessagePromise - await consumer.run({ - eachBatch: async () => { - runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) - consumerReceiveMessagePromise = Promise.resolve() - } - }) - await sendMessages(kafka, testTopic, messages).then( - async () => await consumerReceiveMessagePromise - ) - for (const runArg of runArgs) { - assert.strictEqual(runArg.hash, expectedConsumerHash) - } - }) - - it('Should set a message payload size when producing a message', async () => { - const messages = [{ key: 'key1', value: 'test2' }] - if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { - DataStreamsProcessor.prototype.recordCheckpoint.restore() - } - const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - await sendMessages(kafka, testTopic, messages) - assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) - recordCheckpointSpy.restore() - }) - - it('Should set a message payload size when consuming a message', async () => { - const messages = [{ key: 'key1', value: 'test2' }] - if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { - DataStreamsProcessor.prototype.recordCheckpoint.restore() - } - const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - let consumerReceiveMessagePromise - await consumer.run({ - eachMessage: async () => { - assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) - recordCheckpointSpy.restore() - consumerReceiveMessagePromise = Promise.resolve() - } - }) - await sendMessages(kafka, testTopic, messages).then( - async () => await consumerReceiveMessagePromise - ) - }) - }) - - describe('backlogs', () => { - let setOffsetSpy - - beforeEach(() => { - setOffsetSpy = sinon.spy(tracer._tracer._dataStreamsProcessor, 'setOffset') - }) - - afterEach(() => { - setOffsetSpy.restore() - }) - - it('Should add backlog on consumer explicit commit', async () => { - // Send a message, consume it, and record the last consumed offset - let commitMeta - - let messageProcessedResolve - const messageProcessedPromise = new Promise(resolve => { - messageProcessedResolve = resolve - }) - - const consumerRunPromise = consumer.run({ - eachMessage: async payload => { - const { topic, partition, message } = payload - commitMeta = { - topic, - partition, - offset: Number(message.offset) - } - messageProcessedResolve() - } - }) - - await consumerRunPromise - - // wait for the message to be processed before continuing - await sendMessages(kafka, testTopic, messages) - await messageProcessedPromise - await consumer.disconnect() - - for (const call of setOffsetSpy.getCalls()) { - assert.notStrictEqual(call.args[0]?.type, 'kafka_commit') - } - - const newConsumer = kafka.consumer({ - kafkaJS: { groupId, fromBeginning: true, autoCommit: false } - }) - await newConsumer.connect() - await sendMessages(kafka, testTopic, [{ key: 'key1', value: 'test2' }]) - await newConsumer.run({ - eachMessage: async () => { - await newConsumer.disconnect() - } - }) - setOffsetSpy.resetHistory() - await newConsumer.commitOffsets() - - // Check our work - const runArg = setOffsetSpy.lastCall.args[0] - assert.strictEqual(runArg?.offset, commitMeta.offset) - assert.strictEqual(runArg?.partition, commitMeta.partition) - assert.strictEqual(runArg?.topic, commitMeta.topic) - assert.strictEqual(runArg?.type, 'kafka_commit') - assert.strictEqual(runArg?.consumer_group, groupId) - }) - - it('Should add backlog on producer response', async () => { - await sendMessages(kafka, testTopic, messages) - sinon.assert.calledOnce(setOffsetSpy) - const { topic } = setOffsetSpy.lastCall.args[0] - assert.strictEqual(topic, testTopic) - }) - }) - - describe('when using a kafka broker version that does not support message headers', () => { - class KafkaJSError extends Error { - constructor (message) { - super(message) - this.name = 'KafkaJSError' - this.type = 'ERR_UNKNOWN' - } - } - let error - let producer - let produceStub - - beforeEach(async () => { - // simulate a kafka error for the broker version not supporting message headers - error = new KafkaJSError() - error.message = 'Simulated KafkaJSError ERR_UNKNOWN from Producer.produce stub' - producer = kafka.producer() - await producer.connect() - - // Spy on the produce method from the native library before it gets wrapped - produceStub = sinon.stub(nativeApi.Producer.prototype, 'produce') - .callsFake((topic, partition, message, key) => { - throw error - }) - }) - - afterEach(async () => { - produceStub.restore() - await producer.disconnect() - }) - - it('should hit an error for the first send and not inject headers in later sends', async () => { - const testMessages = [{ key: 'key1', value: 'test1' }] - const testMessages2 = [{ key: 'key2', value: 'test2' }] - - try { - await producer.send({ topic: testTopic, messages: testMessages }) - assert.fail('First producer.send() should have thrown an error') - } catch (e) { - assert.strictEqual(e, error) - } - // Verify headers were injected in the first attempt - assert.ok(Object.hasOwn(testMessages[0].headers[0], 'x-datadog-trace-id')) - - // restore the stub to allow the next send to succeed - produceStub.restore() - - const result = await producer.send({ topic: testTopic, messages: testMessages2 }) - assert.strictEqual(testMessages2[0].headers, null) - assert.notStrictEqual(result, undefined) - }) - }) - }) }) }) }) diff --git a/packages/datadog-plugin-google-cloud-pubsub/test/dsm.spec.js b/packages/datadog-plugin-google-cloud-pubsub/test/dsm.spec.js new file mode 100644 index 00000000000..05f5a3e9f20 --- /dev/null +++ b/packages/datadog-plugin-google-cloud-pubsub/test/dsm.spec.js @@ -0,0 +1,166 @@ +'use strict' + +const assert = require('node:assert/strict') + +const { after, before, beforeEach, describe, it } = require('mocha') +const sinon = require('sinon') + +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') +const id = require('../../dd-trace/src/id') +const agent = require('../../dd-trace/test/plugins/agent') +const { withVersions } = require('../../dd-trace/test/setup/mocha') + +const TIMEOUT = 30000 +const dsmTopicName = 'dsm-topic' + +describe('Plugin', () => { + let tracer + + describe('google-cloud-pubsub', function () { + this.timeout(TIMEOUT) + + before(() => { + process.env.PUBSUB_EMULATOR_HOST = 'localhost:8081' + process.env.DD_DATA_STREAMS_ENABLED = 'true' + }) + + after(() => { + delete process.env.PUBSUB_EMULATOR_HOST + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + withVersions('google-cloud-pubsub', '@google-cloud/pubsub', version => { + let pubsub + let project + let expectedProducerHash + let expectedConsumerHash + + describe('data stream monitoring', () => { + let dsmTopic + let sub + let consume + + before(async () => { + tracer = require('../../dd-trace') + await agent.load('google-cloud-pubsub', { + dsmEnabled: true + }) + tracer.use('google-cloud-pubsub', { dsmEnabled: true }) + + const { PubSub } = require(`../../../versions/@google-cloud/pubsub@${version}`).get() + project = getProjectId() + pubsub = new PubSub({ projectId: project }) + + dsmTopic = await pubsub.createTopic(dsmTopicName) + dsmTopic = dsmTopic[0] + sub = await dsmTopic.createSubscription('DSM') + sub = sub[0] + consume = function (cb) { + sub.on('message', cb) + } + + const dsmFullTopic = `projects/${project}/topics/${dsmTopicName}` + + expectedProducerHash = computePathwayHash( + 'test', + 'tester', + ['direction:out', 'topic:' + dsmFullTopic, 'type:google-pubsub'], + ENTRY_PARENT_HASH + ) + expectedConsumerHash = computePathwayHash( + 'test', + 'tester', + ['direction:in', 'topic:' + dsmFullTopic, 'type:google-pubsub'], + expectedProducerHash + ) + }) + + beforeEach(() => { + return agent.load('google-cloud-pubsub', { + dsmEnabled: true + }) + }) + + describe('should set a DSM checkpoint', () => { + it('on produce', async () => { + await publish(dsmTopic, { data: Buffer.from('DSM produce checkpoint') }) + + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 1 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 1) + assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash.readBigUInt64BE(0).toString()), true) + }, { timeoutMs: TIMEOUT }) + }) + + it('on consume', async () => { + await publish(dsmTopic, { data: Buffer.from('DSM consume checkpoint') }) + await consume(async () => { + agent.expectPipelineStats(dsmStats => { + let statsPointsReceived = 0 + // we should have 2 dsm stats points + dsmStats.forEach((timeStatsBucket) => { + if (timeStatsBucket && timeStatsBucket.Stats) { + timeStatsBucket.Stats.forEach((statsBuckets) => { + statsPointsReceived += statsBuckets.Stats.length + }) + } + }) + assert.ok(statsPointsReceived >= 2) + assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash.readBigUInt64BE(0).toString()), true) + }, { timeoutMs: TIMEOUT }) + }) + }) + }) + + describe('it should set a message payload size', () => { + let recordCheckpointSpy + + beforeEach(() => { + recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + }) + + afterEach(() => { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + }) + + it('when producing a message', async () => { + await publish(dsmTopic, { data: Buffer.from('DSM produce payload size') }) + assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + }) + + it('when consuming a message', async () => { + await publish(dsmTopic, { data: Buffer.from('DSM consume payload size') }) + + await consume(async () => { + assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + }) + }) + }) + }) + }) + }) +}) + +function getProjectId () { + return `test-project-dsm-${id()}` +} + +function publish (topic, options) { + if (topic.publishMessage) { + return topic.publishMessage(options) + } else { + return topic.publish(options.data) + } +} diff --git a/packages/datadog-plugin-google-cloud-pubsub/test/index.spec.js b/packages/datadog-plugin-google-cloud-pubsub/test/index.spec.js index 47d064c5b91..f6375385803 100644 --- a/packages/datadog-plugin-google-cloud-pubsub/test/index.spec.js +++ b/packages/datadog-plugin-google-cloud-pubsub/test/index.spec.js @@ -3,18 +3,14 @@ const assert = require('node:assert/strict') const { after, afterEach, before, beforeEach, describe, it } = require('mocha') -const sinon = require('sinon') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') const id = require('../../dd-trace/src/id') const agent = require('../../dd-trace/test/plugins/agent') const { expectSomeSpan, withDefaults } = require('../../dd-trace/test/plugins/helpers') const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha') const { expectedSchema, rawExpectedSchema } = require('./naming') const TIMEOUT = 30000 -const dsmTopicName = 'dsm-topic' describe('Plugin', () => { let tracer @@ -24,7 +20,6 @@ describe('Plugin', () => { before(() => { process.env.PUBSUB_EMULATOR_HOST = 'localhost:8081' - process.env.DD_DATA_STREAMS_ENABLED = 'true' }) after(() => { @@ -41,8 +36,6 @@ describe('Plugin', () => { let resource let v1 let gax - let expectedProducerHash - let expectedConsumerHash describe('without configuration', () => { beforeEach(() => { @@ -332,113 +325,6 @@ describe('Plugin', () => { }) }) - describe('data stream monitoring', () => { - let dsmTopic - let sub - let consume - - beforeEach(() => { - return agent.load('google-cloud-pubsub', { - dsmEnabled: true - }) - }) - - before(async () => { - const { PubSub } = require(`../../../versions/@google-cloud/pubsub@${version}`).get() - project = getProjectId() - resource = `projects/${project}/topics/${dsmTopicName}` - pubsub = new PubSub({ projectId: project }) - tracer.use('google-cloud-pubsub', { dsmEnabled: true }) - - dsmTopic = await pubsub.createTopic(dsmTopicName) - dsmTopic = dsmTopic[0] - sub = await dsmTopic.createSubscription('DSM') - sub = sub[0] - consume = function (cb) { - sub.on('message', cb) - } - - const dsmFullTopic = `projects/${project}/topics/${dsmTopicName}` - - expectedProducerHash = computePathwayHash( - 'test', - 'tester', - ['direction:out', 'topic:' + dsmFullTopic, 'type:google-pubsub'], - ENTRY_PARENT_HASH - ) - expectedConsumerHash = computePathwayHash( - 'test', - 'tester', - ['direction:in', 'topic:' + dsmFullTopic, 'type:google-pubsub'], - expectedProducerHash - ) - }) - - describe('should set a DSM checkpoint', () => { - it('on produce', async () => { - await publish(dsmTopic, { data: Buffer.from('DSM produce checkpoint') }) - - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 1 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 1) - assert.strictEqual(agent.dsmStatsExist(agent, expectedProducerHash.readBigUInt64BE(0).toString()), true) - }, { timeoutMs: TIMEOUT }) - }) - - it('on consume', async () => { - await publish(dsmTopic, { data: Buffer.from('DSM consume checkpoint') }) - await consume(async () => { - agent.expectPipelineStats(dsmStats => { - let statsPointsReceived = 0 - // we should have 2 dsm stats points - dsmStats.forEach((timeStatsBucket) => { - if (timeStatsBucket && timeStatsBucket.Stats) { - timeStatsBucket.Stats.forEach((statsBuckets) => { - statsPointsReceived += statsBuckets.Stats.length - }) - } - }) - assert.ok(statsPointsReceived >= 2) - assert.strictEqual(agent.dsmStatsExist(agent, expectedConsumerHash.readBigUInt64BE(0).toString()), true) - }, { timeoutMs: TIMEOUT }) - }) - }) - }) - - describe('it should set a message payload size', () => { - let recordCheckpointSpy - - beforeEach(() => { - recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - }) - - afterEach(() => { - DataStreamsProcessor.prototype.recordCheckpoint.restore() - }) - - it('when producing a message', async () => { - await publish(dsmTopic, { data: Buffer.from('DSM produce payload size') }) - assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) - }) - - it('when consuming a message', async () => { - await publish(dsmTopic, { data: Buffer.from('DSM consume payload size') }) - - await consume(async () => { - assert.ok(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) - }) - }) - }) - }) - function expectSpanWithDefaults (expected) { const prefixedResource = [expected.meta['pubsub.method'], resource].filter(x => x).join(' ') const service = expected.meta['pubsub.method'] ? 'test-pubsub' : 'test' diff --git a/packages/datadog-plugin-kafkajs/test/dsm.spec.js b/packages/datadog-plugin-kafkajs/test/dsm.spec.js new file mode 100644 index 00000000000..f2bd23f95fb --- /dev/null +++ b/packages/datadog-plugin-kafkajs/test/dsm.spec.js @@ -0,0 +1,249 @@ +'use strict' + +const assert = require('node:assert/strict') +const { randomUUID } = require('crypto') +const { describe, it, beforeEach, afterEach } = require('mocha') +const semver = require('semver') +const sinon = require('sinon') + +const { withVersions } = require('../../dd-trace/test/setup/mocha') +const agent = require('../../dd-trace/test/plugins/agent') + +const DataStreamsContext = require('../../dd-trace/src/datastreams/context') +const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') +const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') +const { assertObjectContains } = require('../../../integration-tests/helpers') + +const testKafkaClusterId = '5L6g3nShT-eMCtK--X86sw' + +const getDsmPathwayHash = (testTopic, clusterIdAvailable, isProducer, parentHash) => { + let edgeTags + if (isProducer) { + edgeTags = ['direction:out', 'topic:' + testTopic, 'type:kafka'] + } else { + edgeTags = ['direction:in', 'group:test-group', 'topic:' + testTopic, 'type:kafka'] + } + + if (clusterIdAvailable) { + edgeTags.push(`kafka_cluster_id:${testKafkaClusterId}`) + } + edgeTags.sort() + return computePathwayHash('test', 'tester', edgeTags, parentHash) +} + +describe('Plugin', () => { + describe('kafkajs', function () { + this.timeout(10000) + + afterEach(() => { + return agent.close({ ritmReset: false }) + }) + + withVersions('kafkajs', 'kafkajs', (version) => { + let kafka + let admin + let tracer + let Kafka + let clusterIdAvailable + let expectedProducerHash + let expectedConsumerHash + let testTopic + + describe('data stream monitoring', () => { + const messages = [{ key: 'key1', value: 'test2' }] + + beforeEach(async () => { + process.env.DD_DATA_STREAMS_ENABLED = 'true' + tracer = require('../../dd-trace') + await agent.load('kafkajs') + const lib = require(`../../../versions/kafkajs@${version}`).get() + Kafka = lib.Kafka + kafka = new Kafka({ + clientId: `kafkajs-test-${version}`, + brokers: ['127.0.0.1:9092'], + logLevel: lib.logLevel.WARN + }) + testTopic = `test-topic-${randomUUID()}` + admin = kafka.admin() + await admin.createTopics({ + topics: [{ + topic: testTopic, + numPartitions: 1, + replicationFactor: 1 + }] + }) + clusterIdAvailable = semver.intersects(version, '>=1.13') + expectedProducerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, true, ENTRY_PARENT_HASH) + expectedConsumerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, false, expectedProducerHash) + }) + + describe('checkpoints', () => { + let consumer + let setDataStreamsContextSpy + + beforeEach(async () => { + tracer.init() + tracer.use('kafkajs', { dsmEnabled: true }) + consumer = kafka.consumer({ groupId: 'test-group' }) + await consumer.connect() + await consumer.subscribe({ topic: testTopic }) + setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') + }) + + afterEach(async () => { + setDataStreamsContextSpy.restore() + await consumer.disconnect() + }) + + it('Should set a checkpoint on produce', async () => { + const messages = [{ key: 'consumerDSM1', value: 'test2' }] + await sendMessages(kafka, testTopic, messages) + assert.strictEqual(setDataStreamsContextSpy.args[0][0].hash, expectedProducerHash) + }) + + it('Should set a checkpoint on consume (eachMessage)', async () => { + const runArgs = [] + await consumer.run({ + eachMessage: async () => { + runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) + } + }) + await sendMessages(kafka, testTopic, messages) + await consumer.disconnect() + for (const runArg of runArgs) { + assert.strictEqual(runArg.hash, expectedConsumerHash) + } + }) + + it('Should set a checkpoint on consume (eachBatch)', async () => { + const runArgs = [] + await consumer.run({ + eachBatch: async () => { + runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) + } + }) + await sendMessages(kafka, testTopic, messages) + await consumer.disconnect() + for (const runArg of runArgs) { + assert.strictEqual(runArg.hash, expectedConsumerHash) + } + }) + + it('Should set a message payload size when producing a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + assert.ok(Object.hasOwn(recordCheckpointSpy.args[0][0], 'payloadSize')) + recordCheckpointSpy.restore() + }) + + it('Should set a message payload size when consuming a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + await consumer.run({ + eachMessage: async () => { + assert.ok(Object.hasOwn(recordCheckpointSpy.args[0][0], 'payloadSize')) + recordCheckpointSpy.restore() + } + }) + }) + }) + + describe('backlogs', () => { + let consumer + let setOffsetSpy + + beforeEach(async () => { + tracer.init() + tracer.use('kafkajs', { dsmEnabled: true }) + consumer = kafka.consumer({ groupId: 'test-group' }) + await consumer.connect() + await consumer.subscribe({ topic: testTopic }) + setOffsetSpy = sinon.spy(tracer._tracer._dataStreamsProcessor, 'setOffset') + }) + + afterEach(async () => { + setOffsetSpy.restore() + await consumer.disconnect() + }) + + if (semver.intersects(version, '>=1.10')) { + it('Should add backlog on consumer explicit commit', async () => { + // Send a message, consume it, and record the last consumed offset + let commitMeta + const deferred = {} + deferred.promise = new Promise((resolve, reject) => { + deferred.resolve = resolve + deferred.reject = reject + }) + await consumer.run({ + eachMessage: async payload => { + const { topic, partition, message } = payload + commitMeta = { + topic, + partition, + offset: Number(message.offset) + } + deferred.resolve() + }, + autoCommit: false + }) + await sendMessages(kafka, testTopic, messages) + await deferred.promise + await consumer.disconnect() // Flush ongoing `eachMessage` calls + for (const call of setOffsetSpy.getCalls()) { + assert.notStrictEqual(call.args[0]?.type, 'kafka_commit') + } + + /** + * No choice but to reinitialize everything, because the only way to flush eachMessage + * calls is to disconnect. + */ + consumer.connect() + await sendMessages(kafka, testTopic, messages) + await consumer.run({ eachMessage: async () => {}, autoCommit: false }) + setOffsetSpy.resetHistory() + await consumer.commitOffsets([commitMeta]) + await consumer.disconnect() + + // Check our work + const runArg = setOffsetSpy.lastCall.args[0] + sinon.assert.calledOnce(setOffsetSpy) + assert.strictEqual(runArg?.offset, commitMeta.offset) + assert.strictEqual(runArg?.partition, commitMeta.partition) + assert.strictEqual(runArg?.topic, commitMeta.topic) + assertObjectContains(runArg, { + type: 'kafka_commit', + consumer_group: 'test-group' + }) + }) + } + + it('Should add backlog on producer response', async () => { + await sendMessages(kafka, testTopic, messages) + sinon.assert.calledOnce(setOffsetSpy) + const { topic } = setOffsetSpy.lastCall.args[0] + assert.strictEqual(topic, testTopic) + }) + }) + }) + }) + }) +}) + +async function sendMessages (kafka, topic, messages) { + const producer = kafka.producer() + await producer.connect() + await producer.send({ + topic, + messages + }) + await producer.disconnect() +} diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index 9d58bbbd994..160eeadf46b 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -4,7 +4,7 @@ const assert = require('node:assert/strict') const { randomUUID } = require('node:crypto') const dc = require('dc-polyfill') -const { describe, it, beforeEach, afterEach, before } = require('mocha') +const { describe, it, beforeEach, afterEach } = require('mocha') const semver = require('semver') const sinon = require('sinon') @@ -12,30 +12,12 @@ const { withNamingSchema, withPeerService, withVersions } = require('../../dd-tr const agent = require('../../dd-trace/test/plugins/agent') const { expectSomeSpan, withDefaults } = require('../../dd-trace/test/plugins/helpers') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') +const { assertObjectContains } = require('../../../integration-tests/helpers') const { expectedSchema, rawExpectedSchema } = require('./naming') -const DataStreamsContext = require('../../dd-trace/src/datastreams/context') -const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') -const { assertObjectContains } = require('../../../integration-tests/helpers') const testKafkaClusterId = '5L6g3nShT-eMCtK--X86sw' -const getDsmPathwayHash = (testTopic, clusterIdAvailable, isProducer, parentHash) => { - let edgeTags - if (isProducer) { - edgeTags = ['direction:out', 'topic:' + testTopic, 'type:kafka'] - } else { - edgeTags = ['direction:in', 'group:test-group', 'topic:' + testTopic, 'type:kafka'] - } - - if (clusterIdAvailable) { - edgeTags.push(`kafka_cluster_id:${testKafkaClusterId}`) - } - edgeTags.sort() - return computePathwayHash('test', 'tester', edgeTags, parentHash) -} - describe('Plugin', () => { describe('kafkajs', function () { // TODO: remove when new internal trace has landed @@ -51,8 +33,6 @@ describe('Plugin', () => { let Kafka let Broker let clusterIdAvailable - let expectedProducerHash - let expectedConsumerHash let testTopic describe('without configuration', () => { @@ -60,7 +40,6 @@ describe('Plugin', () => { const messages2 = [{ key: 'key2', value: 'test3' }] beforeEach(async () => { - process.env.DD_DATA_STREAMS_ENABLED = 'true' tracer = require('../../dd-trace') await agent.load('kafkajs') const lib = require(`../../../versions/kafkajs@${version}`).get() @@ -81,8 +60,6 @@ describe('Plugin', () => { }] }) clusterIdAvailable = semver.intersects(version, '>=1.13') - expectedProducerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, true, ENTRY_PARENT_HASH) - expectedConsumerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, false, expectedProducerHash) }) describe('producer', () => { @@ -90,7 +67,6 @@ describe('Plugin', () => { const meta = { 'span.kind': 'producer', component: 'kafkajs', - 'pathway.hash': expectedProducerHash.readBigUInt64LE(0).toString(), 'messaging.destination.name': testTopic, 'messaging.kafka.bootstrap.servers': '127.0.0.1:9092' } @@ -252,7 +228,6 @@ describe('Plugin', () => { meta: { 'span.kind': 'consumer', component: 'kafkajs', - 'pathway.hash': expectedConsumerHash.readBigUInt64LE(0).toString(), 'messaging.destination.name': testTopic }, resource: testTopic, @@ -429,171 +404,6 @@ describe('Plugin', () => { rawExpectedSchema.receive ) }) - - describe('data stream monitoring', () => { - let consumer - - beforeEach(async () => { - tracer.init() - tracer.use('kafkajs', { dsmEnabled: true }) - consumer = kafka.consumer({ groupId: 'test-group' }) - await consumer.connect() - await consumer.subscribe({ topic: testTopic }) - }) - - before(() => { - clusterIdAvailable = semver.intersects(version, '>=1.13') - expectedProducerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, true, ENTRY_PARENT_HASH) - expectedConsumerHash = getDsmPathwayHash(testTopic, clusterIdAvailable, false, expectedProducerHash) - }) - - afterEach(async () => { - await consumer.disconnect() - }) - - describe('checkpoints', () => { - let setDataStreamsContextSpy - - beforeEach(() => { - setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') - }) - - afterEach(() => { - setDataStreamsContextSpy.restore() - }) - - it('Should set a checkpoint on produce', async () => { - const messages = [{ key: 'consumerDSM1', value: 'test2' }] - await sendMessages(kafka, testTopic, messages) - assert.strictEqual(setDataStreamsContextSpy.args[0][0].hash, expectedProducerHash) - }) - - it('Should set a checkpoint on consume (eachMessage)', async () => { - const runArgs = [] - await consumer.run({ - eachMessage: async () => { - runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) - } - }) - await sendMessages(kafka, testTopic, messages) - await consumer.disconnect() - for (const runArg of runArgs) { - assert.strictEqual(runArg.hash, expectedConsumerHash) - } - }) - - it('Should set a checkpoint on consume (eachBatch)', async () => { - const runArgs = [] - await consumer.run({ - eachBatch: async () => { - runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) - } - }) - await sendMessages(kafka, testTopic, messages) - await consumer.disconnect() - for (const runArg of runArgs) { - assert.strictEqual(runArg.hash, expectedConsumerHash) - } - }) - - it('Should set a message payload size when producing a message', async () => { - const messages = [{ key: 'key1', value: 'test2' }] - if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { - DataStreamsProcessor.prototype.recordCheckpoint.restore() - } - const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - await sendMessages(kafka, testTopic, messages) - assert.ok(Object.hasOwn(recordCheckpointSpy.args[0][0], 'payloadSize')) - recordCheckpointSpy.restore() - }) - - it('Should set a message payload size when consuming a message', async () => { - const messages = [{ key: 'key1', value: 'test2' }] - if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { - DataStreamsProcessor.prototype.recordCheckpoint.restore() - } - const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - await sendMessages(kafka, testTopic, messages) - await consumer.run({ - eachMessage: async () => { - assert.ok(Object.hasOwn(recordCheckpointSpy.args[0][0], 'payloadSize')) - recordCheckpointSpy.restore() - } - }) - }) - }) - - describe('backlogs', () => { - let setOffsetSpy - - beforeEach(() => { - setOffsetSpy = sinon.spy(tracer._tracer._dataStreamsProcessor, 'setOffset') - }) - - afterEach(() => { - setOffsetSpy.restore() - }) - - if (semver.intersects(version, '>=1.10')) { - it('Should add backlog on consumer explicit commit', async () => { - // Send a message, consume it, and record the last consumed offset - let commitMeta - const deferred = {} - deferred.promise = new Promise((resolve, reject) => { - deferred.resolve = resolve - deferred.reject = reject - }) - await consumer.run({ - eachMessage: async payload => { - const { topic, partition, message } = payload - commitMeta = { - topic, - partition, - offset: Number(message.offset) - } - deferred.resolve() - }, - autoCommit: false - }) - await sendMessages(kafka, testTopic, messages) - await deferred.promise - await consumer.disconnect() // Flush ongoing `eachMessage` calls - for (const call of setOffsetSpy.getCalls()) { - assert.notStrictEqual(call.args[0]?.type, 'kafka_commit') - } - - /** - * No choice but to reinitialize everything, because the only way to flush eachMessage - * calls is to disconnect. - */ - consumer.connect() - await sendMessages(kafka, testTopic, messages) - await consumer.run({ eachMessage: async () => {}, autoCommit: false }) - setOffsetSpy.resetHistory() - await consumer.commitOffsets([commitMeta]) - await consumer.disconnect() - - // Check our work - const runArg = setOffsetSpy.lastCall.args[0] - sinon.assert.calledOnce(setOffsetSpy) - assert.strictEqual(runArg?.offset, commitMeta.offset) - assert.strictEqual(runArg?.partition, commitMeta.partition) - assert.strictEqual(runArg?.topic, commitMeta.topic) - assertObjectContains(runArg, { - type: 'kafka_commit', - consumer_group: 'test-group' - }) - }) - } - - it('Should add backlog on producer response', async () => { - await sendMessages(kafka, testTopic, messages) - sinon.assert.calledOnce(setOffsetSpy) - const { topic } = setOffsetSpy.lastCall.args[0] - assert.strictEqual(topic, testTopic) - }) - }) - }) }) }) }) From 4eea2d1d52cfb3ff944cefa70d464c0e85f5d4d8 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Fri, 19 Dec 2025 22:53:45 +0100 Subject: [PATCH 13/15] refactor(config): use getEnv instead of getEnvironmentVariables (#7147) --- packages/dd-trace/src/config.js | 34 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index f32a8097b77..a39dc187c94 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -276,8 +276,6 @@ class Config { this.stableConfig = new StableConfig() } - const envs = getEnvironmentVariables() - options = { ...options, appsec: options.appsec == null ? options.experimental?.appsec : options.appsec, @@ -321,7 +319,7 @@ class Config { this.#defaults = defaults this.#applyDefaults() this.#applyStableConfig(this.stableConfig?.localEntries ?? {}, this.#localStableConfig) - this.#applyEnvironment(envs) + this.#applyEnvironment() this.#applyStableConfig(this.stableConfig?.fleetEntries ?? {}, this.#fleetStableConfig) this.#applyOptions(options) this.#applyCalculated() @@ -342,7 +340,7 @@ class Config { } if (this.gitMetadataEnabled) { - this.#loadGitMetadata(envs) + this.#loadGitMetadata() } } @@ -1473,26 +1471,24 @@ class Config { } } - #loadGitMetadata (envs) { + #loadGitMetadata () { // try to read Git metadata from the environment variables this.repositoryUrl = removeUserSensitiveInfo( - envs.DD_GIT_REPOSITORY_URL ?? - this.tags[GIT_REPOSITORY_URL] + getEnv('DD_GIT_REPOSITORY_URL') ?? this.tags[GIT_REPOSITORY_URL] ) - this.commitSHA = envs.DD_GIT_COMMIT_SHA ?? - this.tags[GIT_COMMIT_SHA] + this.commitSHA = getEnv('DD_GIT_COMMIT_SHA') ?? this.tags[GIT_COMMIT_SHA] // otherwise, try to read Git metadata from the git.properties file if (!this.repositoryUrl || !this.commitSHA) { - const DD_GIT_PROPERTIES_FILE = envs.DD_GIT_PROPERTIES_FILE ?? - `${process.cwd()}/git.properties` + const DD_GIT_PROPERTIES_FILE = getEnv('DD_GIT_PROPERTIES_FILE') + const gitPropertiesFile = DD_GIT_PROPERTIES_FILE ?? `${process.cwd()}/git.properties` let gitPropertiesString try { - gitPropertiesString = fs.readFileSync(DD_GIT_PROPERTIES_FILE, 'utf8') + gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8') } catch (e) { // Only log error if the user has set a git.properties path - if (envs.DD_GIT_PROPERTIES_FILE) { - log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', DD_GIT_PROPERTIES_FILE, e) + if (DD_GIT_PROPERTIES_FILE) { + log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, e) } } if (gitPropertiesString) { @@ -1503,11 +1499,11 @@ class Config { } // otherwise, try to read Git metadata from the .git/ folder if (!this.repositoryUrl || !this.commitSHA) { - const DD_GIT_FOLDER_PATH = envs.DD_GIT_FOLDER_PATH ?? - path.join(process.cwd(), '.git') + const DD_GIT_FOLDER_PATH = getEnv('DD_GIT_FOLDER_PATH') + const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git') if (!this.repositoryUrl) { // try to read git config (repository URL) - const gitConfigPath = path.join(DD_GIT_FOLDER_PATH, 'config') + const gitConfigPath = path.join(gitFolderPath, 'config') try { const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8') if (gitConfigContent) { @@ -1515,14 +1511,14 @@ class Config { } } catch (e) { // Only log error if the user has set a .git/ path - if (envs.DD_GIT_FOLDER_PATH) { + if (DD_GIT_FOLDER_PATH) { log.error('Error reading git config: %s', gitConfigPath, e) } } } if (!this.commitSHA) { // try to read git HEAD (commit SHA) - const gitHeadSha = resolveGitHeadSHA(DD_GIT_FOLDER_PATH) + const gitHeadSha = resolveGitHeadSHA(gitFolderPath) if (gitHeadSha) { this.commitSHA = gitHeadSha } From 5210356c847e13fc7023b629a97f3acf9a0a7ed9 Mon Sep 17 00:00:00 2001 From: Thomas Watson Date: Fri, 19 Dec 2025 23:46:01 +0100 Subject: [PATCH 14/15] docs: add AGENTS.md and update CONTRIBUTING.md (#7109) Add comprehensive developer guide (AGENTS.md) and enhance CONTRIBUTING.md with detailed documentation covering: - Project setup, structure, and overview - Testing workflows (unit tests, integration tests, plugin tests) - Code style and linting standards (import ordering, ECMAScript/Node.js APIs) - Performance and memory considerations (async/await restrictions, array iteration) - Development workflow best practices (backportability, refactoring principles) - How to add new configuration options and instrumentations - Debugging, logging, and error handling patterns The AGENTS.md file follows the agents.md spec and provides a quick reference for common development tasks and established best practices to help both AI coding assistants and human contributors navigate the codebase effectively. Includes CLAUDE.md symlink for compatibility with Claude-based tools. This reduces onboarding friction, enforces consistency, prevents common mistakes, and improves code quality by making best practices discoverable. --- .vscode/settings.json | 12 ++ AGENTS.md | 290 ++++++++++++++++++++++++++++++++++++++++++ CLAUDE.md | 1 + CONTRIBUTING.md | 274 ++++++++++++++++++++++++++++++++++++--- 4 files changed, 556 insertions(+), 21 deletions(-) create mode 100644 AGENTS.md create mode 120000 CLAUDE.md diff --git a/.vscode/settings.json b/.vscode/settings.json index a8e5bc0bfe7..8e6f1faf746 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -2,4 +2,16 @@ // Use the workspace version of TypeScript instead of VSCode's bundled version "typescript.tsdk": "node_modules/typescript/lib", "typescript.enablePromptUseWorkspaceTsdk": true, + "cSpell.words": [ + "aerospike", + "appsec", + "backportability", + "kafkajs", + "llmobs", + "microbenchmarks", + "oracledb", + "rabbitmq", + "rspack", + "sirun" + ], } diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000000..119acc646cc --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,290 @@ +# AGENTS.md + +## Prerequisites + +- Node.js >= 18 +- yarn 1.x +- Docker + docker-compose (for running service dependencies in tests) + +## Setup + +- Install dependencies: `yarn install` + +**Note:** This project uses yarn, not npm. Always use `yarn` commands instead of `npm` commands. + +## Project Overview + +dd-trace is the Datadog client library for Node.js. + +**Key Directories:** +- `packages/dd-trace/` - Main library (APM, profiling, debugger, appsec, llmobs, CI visibility) +- `packages/datadog-core/` - Async context storage, shared utilities +- `packages/datadog-instrumentations/` - Instrumentation implementations +- `packages/datadog-plugin-*/` - 100+ plugins for third-party integrations +- `integration-tests/` - E2E integration tests +- `benchmark/` - Performance benchmarks + +## Testing Instructions + +### Testing Workflow + +When developing a feature or fixing a bug: + +1. Start with individual test files to verify things work +2. Run component tests: `yarn test:` (e.g., `yarn test:debugger`, `yarn test:appsec`) +3. Run integration tests: `yarn test:integration:` (e.g., `yarn test:integration:debugger`) + +### Running Individual Tests + +**IMPORTANT**: Never run `yarn test` directly. Use `mocha` or `tap` directly on test files. + +**Mocha unit tests:** +```bash +./node_modules/.bin/mocha -r "packages/dd-trace/test/setup/mocha.js" path/to/test.spec.js +``` + +**Tap unit tests:** +```bash +./node_modules/.bin/tap path/to/test.spec.js +``` + +**Integration tests:** +```bash +./node_modules/.bin/mocha --timeout 60000 -r "packages/dd-trace/test/setup/core.js" path/to/test.spec.js +``` + +**Target specific tests:** +- Add `--grep "test name pattern"` flag + +**Enable debug logging:** +- Prefix with `DD_TRACE_DEBUG=true` + +**Note**: New tests should be written using mocha, not tap. Existing tap tests use mocha-style `describe` and `it` blocks. + +### Plugin Tests + +**Use `PLUGINS` env var:** +```bash +PLUGINS="amqplib" yarn test:plugins +PLUGINS="amqplib|bluebird" yarn test:plugins # pipe-delimited for multiple +./node_modules/.bin/mocha -r "packages/dd-trace/test/setup/mocha.js" packages/datadog-plugin-amqplib/test/index.spec.js +``` + +**With external services** (check `.github/workflows/apm-integrations.yml` for `SERVICES`): +```bash +export SERVICES="rabbitmq" PLUGINS="amqplib" +docker compose up -d $SERVICES +yarn services && yarn test:plugins +``` + +**ARM64 incompatible:** `aerospike`, `couchbase`, `grpc`, `oracledb` + +### Test Coverage + +```bash +./node_modules/.bin/nyc --include "packages/dd-trace/src/debugger/**/*.js" \ + ./node_modules/.bin/mocha -r "packages/dd-trace/test/setup/mocha.js" \ + "packages/dd-trace/test/debugger/**/*.spec.js" +``` + +**Philosophy:** +- Integration tests (running in sandboxes) don't count towards nyc coverage metrics +- Don't add redundant unit tests solely to improve coverage numbers +- Focus on covering important production code paths with whichever test type makes sense + +### Test Assertions + +Use `node:assert/strict` for standard assertions. For partial deep object checks, use `assertObjectContains` from `integration-tests/helpers/index.js`: + +```js +const assert = require('node:assert/strict') +const { assertObjectContains } = require('../helpers') + +assert.equal(actual, expected) +assertObjectContains(response, { status: 200, body: { user: { name: 'Alice' } } }) +``` + +### Time-Based Testing + +**Never rely on actual time passing in unit tests.** Use sinon's fake timers to mock time and make tests deterministic and fast. + +## Code Style & Linting + +### Linting & Naming +- Lint: `yarn lint` / `yarn lint:fix` +- Files: kebab-case +- JSDoc: TypeScript-compatible syntax (`@param {string}`, `@returns {Promise}`, `@typedef`) + +### Import Ordering + +Separate groups with empty line, sort alphabetically within each: +1. Node.js core modules (with `node:` prefix) +2. Third-party modules +3. Internal imports (by path proximity, then alpha) + +Use destructuring for utility modules when appropriate. + +```js +const fs = require('node:fs') +const path = require('node:path') + +const express = require('express') + +const { myConf } = require('./config') +const log = require('../log') +``` + +### ECMAScript and Node.js API Standards + +**Target Node.js 18.0.0 compatibility:** +- Use modern JS features supported by Node.js (e.g., optional chaining `?.`, nullish coalescing `??`) +- Guard newer APIs with version checks using [`version.js`](./version.js): + ```js + const { NODE_MAJOR } = require('./version') + if (NODE_MAJOR >= 20) { /* Use Node.js 20+ API */ } + ``` +- **Prefix Node.js core modules with `node:`** (e.g., `require('node:assert')`) + +### Performance and Memory + +**CRITICAL: Tracer runs in application hot paths - every operation counts.** + +**Async/Await:** +- Do NOT use `async/await` or promises in production code (npm package) +- Allowed ONLY in: test files, worker threads (e.g., `packages/dd-trace/src/debugger/devtools_client/`) +- Use callbacks or synchronous patterns instead + +**Memory:** +- Minimize allocations in frequently-called paths +- Avoid unnecessary objects, closures, arrays +- Reuse objects and buffers +- Minimize GC pressure + +#### Array Iteration + +**Prefer `for-of`, `for`, `while` loops over functional methods (`map()`, `forEach()`, `filter()`):** +- Avoid `items.forEach(item => process(item))` → use `for (const item of items) { process(item) }` +- Avoid chaining `items.filter(...).map(...)` → use single loop with conditional push +- Functional methods create closures and intermediate arrays + +**Functional methods acceptable in:** +- Test files +- Non-hot-path code where readability benefits +- One-time initialization code + +**Loop selection:** +- `for-of` - Simple iteration +- `for` with index - Need index or better performance in hot paths +- `while` - Custom iteration logic + +### Debugging and Logging + +Use `log` module (`packages/dd-trace/src/log/index.js`) with printf-style formatting (not template strings): + +```js +const log = require('../log') +log.debug('Value: %s', someValue) // printf-style +log.debug(() => `Expensive: ${expensive()}`) // callback for expensive ops +log.error('Error: %s', msg, err) // error as last arg +``` + +Enable: `DD_TRACE_DEBUG=true DD_TRACE_LOG_LEVEL=info node app.js` +Levels: `trace`, `debug`, `info`, `warn`, `error` + +### Error Handling + +**Never crash user apps:** Catch/log errors (`log.error()`/`log.warn()`), resume or disable plugin/subsystem +Avoid try/catch in hot paths - validate inputs early + +## Development Workflow + +### Core Principles +- **Search first**: Check for existing utilities/patterns before creating new code +- **Small PRs**: Break large efforts into incremental, reviewable changes +- **Descriptive code**: Self-documenting with verbs in function names; comment when needed +- **Readable formatting**: Empty lines for grouping, split complex objects, extract variables +- **Avoid large refactors**: Iterative changes, gradual pattern introduction +- **Test changes**: Test logic (not mocks), failure cases, edge cases - always update tests + +### Always Consider Backportability + +**We always backport `master` to older versions.** +- Keep breaking changes to a minimum +- Don't use language/runtime features that are too new +- **Guard breaking changes with version checks** using [`version.js`](./version.js): + ```js + const { DD_MAJOR } = require('./version') + if (DD_MAJOR >= 6) { + // New behavior for v6+ + } else { + // Old behavior for v5 and earlier + } + ``` + +## Adding New Configuration Options + +1. **Add default value** in `packages/dd-trace/src/config_defaults.js` +2. **Map environment variable** in `packages/dd-trace/src/config.js` (`#applyEnvironment()` method) +3. **Add TypeScript definitions** in `index.d.ts` +4. **Add to telemetry name mapping** (if applicable) in `packages/dd-trace/src/telemetry/telemetry.js` +5. **Update** `packages/dd-trace/src/supported-configurations.json` +6. **Document** in `docs/API.md` (non-internal/experimental options only) +7. **Add tests** in `packages/dd-trace/test/config.spec.js` + +**Naming Convention:** Size/time-based config options should have unit suffixes (e.g., `timeoutMs`, `maxBytes`, `intervalSeconds`). + +## Adding New Instrumentation + +**New instrumentations go in `packages/datadog-instrumentations/`.** The instrumentation system uses diagnostic channels for communication. + +Many integrations have corresponding plugins in `packages/datadog-plugin-*/` that work with the instrumentation layer. + +### What Are Plugins? + +Plugins are modular code components in `packages/datadog-plugin-*/` directories that: +- Subscribe to diagnostic channels to receive instrumentation events +- Handle APM tracing logic (spans, metadata, error tracking) +- Manage feature-specific logic (e.g., code origin tracking, LLM observability) + +**Plugin Base Classes:** +- **`Plugin`** - Base class with diagnostic channel subscription, storage binding, enable/disable lifecycle. Use for non-tracing functionality. +- **`TracingPlugin`** - Extends `Plugin` with APM tracing helpers (`startSpan()`, automatic trace events, `activeSpan` getter). Use for plugins creating trace spans. +- **`CompositePlugin`** - Extends `Plugin` to compose multiple sub-plugins. Use when one integration needs multiple feature plugins (e.g., `express` combines tracing and code origin plugins). + +**Plugin Loading:** +- Plugins load lazily when application `require()`s the corresponding library +- Disable with `DD_TRACE_DISABLED_PLUGINS` or `DD_TRACE__ENABLED=false` +- Test framework plugins only load when Test Optimization mode (`isCiVisibility`) is enabled + +**When to Create a New Plugin:** +1. Adding support for a new third-party library/framework +2. Adding a new product feature that integrates with existing libraries (use `CompositePlugin`) + +### Creating a New Plugin + +```bash +mkdir -p packages/datadog-plugin-/{src,test} +cp packages/datadog-plugin-kafkajs/src/index.js packages/datadog-plugin-/src/ +``` + +Edit `src/index.js`, create `test/index.spec.js`, then register in: +`packages/dd-trace/src/plugins/index.js`, `index.d.ts`, `docs/test.ts`, `docs/API.md`, `.github/workflows/apm-integrations.yml` + +## Pull Requests and CI + +### Commit Messages + +Conventional format: `type(scope): description` +Types: `feat`, `fix`, `docs`, `refactor`, `test`, `chore`, `ci` +Example: `feat(appsec): add new WAF rule` + +### PR Requirements + +- Use template from `.github/pull_request_template.md` +- Label: `semver-patch` (fixes only), `semver-minor` (new features), `semver-major` (breaking) +- **All tests must pass - all-green policy, no exceptions** + +## Vendoring Dependencies + +Using rspack: Run `yarn` in `vendor/` to install/bundle dependencies → `packages/node_modules/` +(Some deps excluded, e.g., `@opentelemetry/api`) diff --git a/CLAUDE.md b/CLAUDE.md new file mode 120000 index 00000000000..47dc3e3d863 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +AGENTS.md \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ec653e9cdf8..76439e5f43d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,15 +28,26 @@ Sometimes new patterns or new ideas emerge which would be a substantial improvem ## Test everything -It's very difficult to know if a change is valid unless there are tests to prove it. As an extension of that, it's also difficult to know the _use_ of that code is valid if the way it is integrated is not propertly tested. For this reason we generally favour integration tests over unit tests. If an API is expected to be used in different places or in different ways then it should generally include unit tests too for each unique scenario, however great care should be taken to ensure unit tests are actually testing the _logic_ and not just testing the _mocks_. It's a very common mistake to write a unit test that abstracts away the actual use of the interface so much that it doesn't actually test how that interface works in real-world scenarios. Remember to test how it handles failures, how it operates under heavy load, and how it impacts usability of what its purpose is. +It's very difficult to know if a change is valid unless there are tests to prove it. As an extension of that, it's also difficult to know the _use_ of that code is valid if the way it is integrated is not properly tested. For this reason we generally favour integration tests over unit tests. If an API is expected to be used in different places or in different ways then it should generally include unit tests too for each unique scenario, however great care should be taken to ensure unit tests are actually testing the _logic_ and not just testing the _mocks_. It's a very common mistake to write a unit test that abstracts away the actual use of the interface so much that it doesn't actually test how that interface works in real-world scenarios. Remember to test how it handles failures, how it operates under heavy load, and how it impacts usability of what its purpose is. ## Don't forget benchmarks -Observability products tend to have quite a bit of their behaviour running in app code hot paths. It's important we extensively benchmark anything we expect to have heavy use to ensure it performs well and we don't cause any significant regressions through future changes. Measuring once at the time of writing is insufficient--a graph with just one data point is not going to tell you much of anything. +Observability products tend to have quite a bit of their behavior running in app code hot paths. It's important we extensively benchmark anything we expect to have heavy use to ensure it performs well and we don't cause any significant regressions through future changes. Measuring once at the time of writing is insufficient--a graph with just one data point is not going to tell you much of anything. ## Always consider backportability -To reduce delta between release lines and make it easier for us to support older versions we try as much as possible to backport every change we can. We should be diligent about keeping breaking changes to a minimum and ensuring we don't use language or runtime features which are too new. This way we can generally be confident that a change can be backported. +We always backport changes from `master` to older versions to avoid release lines drifting apart and to prevent merge conflicts. We should be diligent about keeping breaking changes to a minimum and ensuring we don't use language or runtime features which are too new. This way we can generally be confident that a change can be backported. + +**Breaking changes must be guarded by version checks** so they can land in `master` and be safely backported to older versions. Check the major version of the dd-trace package using the `version.js` module in the root of the project: + +```js +const { DD_MAJOR } = require('./version') +if (DD_MAJOR >= 6) { + // New behavior for v6+ +} else { + // Old behavior for v5 and earlier +} +``` To reduce the surface area of a breaking change, the breaking aspects could be placed behind a flag which is disabled by default or isolated to a function. In the next major the change would then be just to change the default of the flag or to start or stop calling the isolated function. By isolating the breaking logic it also becomes easier to delete later when it's no longer relevant on any release line. @@ -48,11 +59,11 @@ This library follows the semantic versioning standard, but there are some subtle ### semver-patch -If the change is a bug or security fix, it should be labelled as semver-patch. These changes should generally not alter existing behaviour in any way other than to correct the specific issue. +If the change is a bug or security fix, it should be labelled as semver-patch. These changes should generally not alter existing behavior in any way other than to correct the specific issue. ### semver-minor -Any addition of new functionality should be labelled as semver-minor and should not change any existing behaviour either in how any existing API works or in changing the contents or value of any existing data being reported except in purely additive cases where all existing data retains its prior state. Such changes may include new configuration options which when used will change behaviour, or may include the addition of new data being captured such as a new instrumentation, but should not impact the current operating design of any existing features. +Any addition of new functionality should be labelled as semver-minor and should not change any existing behavior either in how any existing API works or in changing the contents or value of any existing data being reported except in purely additive cases where all existing data retains its prior state. Such changes may include new configuration options which when used will change behavior, or may include the addition of new data being captured such as a new instrumentation, but should not impact the current operating design of any existing features. ### semver-major @@ -70,6 +81,10 @@ We follow an all-green policy which means that for any PR to be merged _all_ tes Eventually we plan to look into putting these permission-required tests behind a label which team members can add to their PRs at creation to run the full CI and can add to outside contributor PRs to trigger the CI from their own user credentials. If the label is not present there will be another action which checks the label is present. Rather than showing a bunch of confusing failures to new contributors it would just show a single job failure which indicates an additional label is required, and we can name it in a way that makes it clear that it's not the responsibility of the outside contributor to add it. Something like `approve-full-ci` is one possible choice there. +## Search before creating + +Always search the codebase first before creating new code to avoid duplicates. Check for existing utilities, helpers, or patterns that solve similar problems. Reuse existing code when possible rather than reinventing solutions. + ## Sign your commits All commits in a pull request must be signed. We require commit signing to ensure the authenticity and integrity of contributions to the project. @@ -95,7 +110,7 @@ such as [nvm](https://github.com/creationix/nvm) is recommended. If you're unsure which version of Node.js to use, just use the latest version, which should always work. -We use [yarn](https://yarnpkg.com/) 1.x for its workspace functionality, so make sure to install that as well. The easist way to install yarn 1.x with with npm: +We use [yarn](https://yarnpkg.com/) 1.x for its workspace functionality, so make sure to install that as well. The easiest way to install yarn 1.x with with npm: ```sh $ npm install -g yarn @@ -107,28 +122,160 @@ To install dependencies once you have Node and yarn installed, run this in the p $ yarn ``` +## Coding Standards + +### File Naming and Import Conventions + +Use **kebab-case** for file names (e.g., `my-module.js`, not `myModule.js`). + +Organize imports in the following order (each group separated by an empty line): + +1. Node.js core modules first (sorted alphabetically) - always prefix with `node:` +2. Third-party modules (sorted alphabetically) +3. Internal imports (sorted by path proximity first - closest first - then alphabetically) + +Example: + +```js +const fs = require('node:fs') +const path = require('node:path') + +const express = require('express') +const lodash = require('lodash') + +const { myConf } = require('./config') +const { foo } = require('./helper') +const log = require('../log') +``` + +### Node.js Version Compatibility + +Follow the ECMAScript standard and Node.js APIs supported by **Node.js 18.0.0**. Never use features or APIs only supported in newer versions unless explicitly required. If newer APIs are needed, guard them with version checks using the `version.js` module located in the root of the project: + +```js +const { NODE_MAJOR } = require('./version') +if (NODE_MAJOR >= 20) { + // Use Node.js 20+ API +} +``` + +### Performance Considerations + +This tracer runs in application hot paths, so performance is critical: + +- **Avoid `async/await` and promises** in production code (they add overhead). Use callbacks or synchronous patterns instead. Async/await is acceptable in test files and worker threads. +- **Minimize memory allocations** in frequently-called code paths +- **Prefer imperative loops over functional array methods** (`for-of`, `for`, `while` instead of `map()`, `forEach()`, `filter()`) to avoid closure overhead and intermediate arrays +- Avoid creating unnecessary objects, closures, or arrays +- Reuse objects and buffers where possible + +Example of preferred loop style: + +```js +// ❌ Avoid - creates closure overhead +items.forEach(item => { + process(item) +}) + +// ✅ Prefer - no closure overhead +for (const item of items) { + process(item) +} + +// ❌ Avoid - loops over data multiple times, creates intermediate array, adds closure overhead +const result = items + .filter(item => item.active) + .map(item => item.value) + +// ✅ Prefer - single loop, no intermediate array +const result = [] +for (const item of items) { + if (item.active) { + result.push(item.value) + } +} +``` + +### Error Handling + +The tracer should never crash user applications. Catch errors and log them with `log.error()` or `log.warn()` as appropriate. Resume normal operation if possible, or disable the plugin/subsystem if not. + +### Logging and Debugging + +Use the `log` module (located at `packages/dd-trace/src/log/index.js`) for all logging: + +```js +const log = require('../log') + +log.debug('Debug message with value: %s', someValue) +log.info('Info message') +log.warn('Warning with data: %o', objectValue) +log.error('Error reading file %s', filepath, err) +``` + +**Important:** Use printf-style formatting (`%s`, `%d`, `%o`) instead of template strings to avoid unnecessary string concatenation when logging is disabled. + +For expensive computations in the log message itself, use a callback function: + +```js +// Callback is only executed if debug logging is enabled +log.debug(() => `Processed data: ${expensive.computation()}`) +``` + +When logging errors, pass the error object as the last argument after the format string: + +```js +log.error('Error processing request', err) +// or with additional context: +log.error('Error reading file %s', filename, err) +``` + +To enable debug logging when running tests or applications: + +```sh +# Run application with debug logging +DD_TRACE_DEBUG=true node your-app.js + +# Run a specific test suite with debug logging +DD_TRACE_DEBUG=true yarn test:debugger +``` + +### Documentation + +Document all APIs with TypeScript-compatible JSDoc to ensure proper types without using TypeScript. This enables type checking and IDE autocompletion while maintaining the JavaScript codebase. Use TypeScript type syntax in JSDoc annotations (e.g., `@param {string}`, `@returns {Promise}`). + +## Adding New Configuration Options + +To add a new configuration option: + +1. **Add the default value** in `packages/dd-trace/src/config_defaults.js` +2. **Map the environment variable** in `packages/dd-trace/src/config.js` (add to destructuring in `#applyEnvironment()` method) +3. **Add TypeScript definitions** in `index.d.ts` +4. **Add to telemetry name mapping** (if applicable) in `packages/dd-trace/src/telemetry/telemetry.js` +5. **Update supported configurations** in `packages/dd-trace/src/supported-configurations.json` +6. **Document the option** in `docs/API.md` (for non-internal/experimental options) +7. **Add tests** in `packages/dd-trace/test/config.spec.js` + +**Naming Convention:** Size/time-based config options should have unit suffixes (e.g., `timeoutMs`, `maxBytes`, `intervalSeconds`). + ## Adding a Plugin +Plugins are modular code components in `packages/datadog-plugin-*/` directories that integrate with specific third-party libraries and frameworks. They subscribe to diagnostic channels to receive instrumentation events and handle APM tracing logic, feature-specific logic, and more. + To create a new plugin for a third-party package, follow these steps: -1. `mkdir -p packages/datadog-plugin-/src` -2. Copy an `index.js` file from another plugin to use as a starting point: `cp packages/datadog-plugin-kafkajs/src/index.js packages/datadog-plugin-/src` -3. Edit index.js as appropriate for your new plugin -4. `mkdir -p packages/datadog-plugin-/test` -5. Create an packages/datadog-plugin-/test/index.spec.js file and add the necessary tests. See other plugin tests for inspiration to file structure. +1. `mkdir -p packages/datadog-plugin-/src` +2. Copy an `index.js` file from another plugin to use as a starting point: `cp packages/datadog-plugin-kafkajs/src/index.js packages/datadog-plugin-/src` +3. Edit index.js as appropriate for your plugin +4. `mkdir -p packages/datadog-plugin-/test` +5. Create an packages/datadog-plugin-/test/index.spec.js file and add the necessary tests. See other plugin tests for inspiration to file structure. 6. Edit index.spec.js as appropriate for your new plugin 7. Add entries to the following files for your new plugin: - `packages/dd-trace/src/plugins/index.js` - `index.d.ts` - `docs/test.ts` - `docs/API.md` - - `.github/workflows/apm-integrations.yml` - -### Adding a Plugin Test to CI - -The plugin tests run on pull requests in Github Actions. Each plugin test suite has its own Github job, so adding a new suite to CI -requires adding a new job to the Github Actions config. The file containing these configs is `.github/workflows/apm-integrations.yml`. -You can copypaste and modify an existing plugin job configuration in this file to create a new job config. + - `.github/workflows/apm-integrations.yml` (see [Adding a Plugin Test to CI](#adding-a-plugin-test-to-ci)) ## Testing @@ -137,6 +284,93 @@ You can copypaste and modify an existing plugin job configuration in this file t The `pg-native` package requires `pg_config` to be in your `$PATH` to be able to install. Please refer to [the "Install" section](https://github.com/brianc/node-postgres/tree/master/packages/pg-native#install) of the `pg-native` documentation for how to ensure your environment is configured correctly. +### Running Individual Tests + +When developing, it's often faster to run individual test files rather than entire test suites. **Never run `yarn test` directly** as it requires too much setup and takes too long. + +To target specific tests, use the `--grep` flag with mocha or tap to match test names: + +```sh +yarn test:debugger --grep "test name pattern" +yarn test:appsec --grep "specific test" +``` + +**Note:** This project uses a mix of tap and mocha for testing. However, new tests should be written using mocha, not tap. + +### Test Assertions + +Use the Node.js core `assert` library for assertions in tests. Import from `node:assert/strict` to ensure all assertions use strict equality without type coercion: + +```js +const assert = require('node:assert/strict') + +assert.equal(actual, expected) +assert.deepEqual(actualObject, expectedObject) +``` + +For asserting that an object contains certain properties (deeply), use `assertObjectContains` from `integration-tests/helpers/index.js`: + +```js +const { assertObjectContains } = require('../helpers') + +// Assert an object contains specific properties (actual object can have more) +assertObjectContains(response, { + status: 200, + body: { user: { name: 'Alice' } } +}) +``` + +This helper performs partial deep equality checking and provides better error messages than individual assertions. + +### Time-Based Testing + +**Never rely on actual time passing in unit tests.** Tests that use `setTimeout()`, `setInterval()`, or `Date.now()` should use [sinon's fake timers](https://sinonjs.org/releases/latest/fake-timers/) to mock time. This makes tests: +- Run instantly instead of waiting for real time +- Deterministic and reliable (no timing-related flakiness) +- Easier to reason about + +Example: + +```js +const sinon = require('sinon') + +describe('my test', () => { + let clock + + beforeEach(() => { + clock = sinon.useFakeTimers() + }) + + afterEach(() => { + clock.restore() + }) + + it('should handle timeout', () => { + let called = false + setTimeout(() => { called = true }, 1000) + + clock.tick(1000) // Instantly advance time by 1 second + assert.equal(called, true) + }) +}) +``` + +Use `clock.tick(ms)` to advance time, `clock.restore()` to restore real timers, and `clock.reset()` to reset fake time to 0. + +### Test Coverage + +Coverage is measured with nyc. To check coverage for your changes, use the `:ci` variant of the test scripts: + +```sh +# Run tests with coverage for specific components +yarn test:debugger:ci +yarn test:appsec:ci +yarn test:llmobs:sdk:ci +yarn test:lambda:ci +``` + +**Coverage Philosophy:** Given the nature of this library (instrumenting third-party code, hooking into runtime internals), unit tests can become overly complex when everything needs to be mocked. Integration tests that run in sandboxes don't count towards nyc's coverage metrics, so coverage numbers may look low even when code is well-tested. **Don't add redundant unit tests solely to improve coverage numbers.** + ### Plugin Tests Before running _plugin_ tests, the supporting docker containers need to be running. You _can_ attempt to start all of them using docker-compose, but that's a drain on your system, and not all the images will even run at all on AMD64 devices. @@ -194,7 +428,7 @@ details. When running integration tests, some packages are installed from npm into temporary sandboxes. If running locally without an internet connection, it's possible to use the environment variable `OFFLINE=true` to make `yarn` use the `--prefer-offline` flag, -which will use the local yarn cache instead of fecthing packages from npm. +which will use the local yarn cache instead of fetching packages from npm. ### Adding a Plugin Test to CI @@ -216,7 +450,6 @@ $ yarn lint This also checks that the `LICENSE-3rdparty.csv` file is up-to-date, and checks dependencies for vulnerabilities. - ### Benchmarks Our microbenchmarks live in `benchmark/sirun`. Each directory in there @@ -231,5 +464,4 @@ most efficient algorithm. To run your benchmark, use: $ yarn bench ``` - [1]: https://docs.datadoghq.com/help From 4a3bcf0f08e8e698ae81e2ac295a3dc388d971d8 Mon Sep 17 00:00:00 2001 From: rochdev Date: Sat, 20 Dec 2025 05:06:54 +0000 Subject: [PATCH 15/15] v5.82.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6e98ac5c443..a451a870a33 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.81.0", + "version": "5.82.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts",