diff --git a/lib/internal/inspector/network.js b/lib/internal/inspector/network.js index ddd5b4750bee8d..9c85a86912f71c 100644 --- a/lib/internal/inspector/network.js +++ b/lib/internal/inspector/network.js @@ -10,8 +10,16 @@ const { const dc = require('diagnostics_channel'); const { now } = require('internal/perf/utils'); const { MIMEType } = require('internal/mime'); +const { + createGunzip, + createInflate, + createBrotliDecompress, + createZstdDecompress, +} = require('zlib'); +const { Buffer } = require('buffer'); const kInspectorRequestId = Symbol('kInspectorRequestId'); +const kContentEncoding = Symbol('kContentEncoding'); // https://chromedevtools.github.io/devtools-protocol/1-3/Network/#type-ResourceType const kResourceType = { @@ -70,6 +78,69 @@ function sniffMimeType(contentType) { }; } +/** + * Gets the content encoding from the headers object. + * @param {object} headers - The response headers. + * @returns {string|undefined} - The content encoding (e.g., 'gzip', 'deflate', 'br', 'zstd'). + */ +function getContentEncoding(headers = {}) { + const contentEncoding = headers['content-encoding']; + if (typeof contentEncoding === 'string') { + return StringPrototypeToLowerCase(contentEncoding); + } + return undefined; +} + +/** + * Creates a decompression stream based on the content encoding. + * @param {string} encoding - The content encoding (e.g., 'gzip', 'deflate', 'br', 'zstd'). + * @returns {import('stream').Transform|null} - A decompression stream or null if encoding is not supported. + */ +function createDecompressor(encoding) { + switch (encoding) { + case 'gzip': + case 'x-gzip': + return createGunzip(); + case 'deflate': + return createInflate(); + case 'br': + return createBrotliDecompress(); + case 'zstd': + return createZstdDecompress(); + default: + return null; + } +} + +/** + * Decompresses a chunk of data based on the content encoding. + * @param {Buffer} chunk - The compressed data chunk. + * @param {string} encoding - The content encoding. + * @param {function} callback - Callback with (error, decompressedChunk). + */ +function decompressChunk(chunk, encoding, callback) { + const decompressor = createDecompressor(encoding); + if (!decompressor) { + // No decompression needed, return original chunk + callback(null, chunk); + return; + } + + const chunks = []; + decompressor.on('data', (decompressedChunk) => { + chunks.push(decompressedChunk); + }); + decompressor.on('end', () => { + callback(null, Buffer.concat(chunks)); + }); + decompressor.on('error', (err) => { + // On decompression error, fall back to returning the original chunk + callback(null, chunk); + }); + + decompressor.end(chunk); +} + function registerDiagnosticChannels(listenerPairs) { function enable() { ArrayPrototypeForEach(listenerPairs, ({ 0: channel, 1: listener }) => { @@ -91,9 +162,13 @@ function registerDiagnosticChannels(listenerPairs) { module.exports = { kInspectorRequestId, + kContentEncoding, kResourceType, getMonotonicTime, getNextRequestId, registerDiagnosticChannels, sniffMimeType, + getContentEncoding, + createDecompressor, + decompressChunk, }; diff --git a/lib/internal/inspector/network_http.js b/lib/internal/inspector/network_http.js index 8d324c8c544eea..3fb9146e4b9d92 100644 --- a/lib/internal/inspector/network_http.js +++ b/lib/internal/inspector/network_http.js @@ -10,11 +10,14 @@ const { const { kInspectorRequestId, + kContentEncoding, kResourceType, getMonotonicTime, getNextRequestId, registerDiagnosticChannels, sniffMimeType, + getContentEncoding, + createDecompressor, } = require('internal/inspector/network'); const { Network } = require('inspector'); const EventEmitter = require('events'); @@ -27,6 +30,7 @@ const convertHeaderObject = (headers = {}) => { let host; let charset; let mimeType; + let contentEncoding; const dict = {}; for (const { 0: key, 1: value } of ObjectEntries(headers)) { const lowerCasedKey = key.toLowerCase(); @@ -38,6 +42,9 @@ const convertHeaderObject = (headers = {}) => { charset = result.charset; mimeType = result.mimeType; } + if (lowerCasedKey === 'content-encoding') { + contentEncoding = typeof value === 'string' ? value.toLowerCase() : undefined; + } if (typeof value === 'string') { dict[key] = value; } else if (ArrayIsArray(value)) { @@ -50,7 +57,7 @@ const convertHeaderObject = (headers = {}) => { dict[key] = String(value); } } - return [dict, host, charset, mimeType]; + return [dict, host, charset, mimeType, contentEncoding]; }; /** @@ -105,7 +112,10 @@ function onClientResponseFinish({ request, response }) { return; } - const { 0: headers, 2: charset, 3: mimeType } = convertHeaderObject(response.headers); + const { 0: headers, 2: charset, 3: mimeType, 4: contentEncoding } = convertHeaderObject(response.headers); + + // Store content encoding on the request for later use + request[kContentEncoding] = contentEncoding; Network.responseReceived({ requestId: request[kInspectorRequestId], @@ -121,24 +131,64 @@ function onClientResponseFinish({ request, response }) { }, }); - // Unlike response.on('data', ...), this does not put the stream into flowing mode. - EventEmitter.prototype.on.call(response, 'data', (chunk) => { - Network.dataReceived({ - requestId: request[kInspectorRequestId], - timestamp: getMonotonicTime(), - dataLength: chunk.byteLength, - encodedDataLength: chunk.byteLength, - data: chunk, + // Create a decompressor if the response is compressed + const decompressor = createDecompressor(contentEncoding); + + if (decompressor) { + // Pipe decompressed data to DevTools + decompressor.on('data', (decompressedChunk) => { + Network.dataReceived({ + requestId: request[kInspectorRequestId], + timestamp: getMonotonicTime(), + dataLength: decompressedChunk.byteLength, + encodedDataLength: decompressedChunk.byteLength, + data: decompressedChunk, + }); }); - }); - // Wait until the response body is consumed by user code. - response.once('end', () => { - Network.loadingFinished({ - requestId: request[kInspectorRequestId], - timestamp: getMonotonicTime(), + // Handle decompression errors gracefully - fall back to raw data + decompressor.on('error', () => { + // If decompression fails, the raw data has already been sent via the fallback }); - }); + + // Unlike response.on('data', ...), this does not put the stream into flowing mode. + EventEmitter.prototype.on.call(response, 'data', (chunk) => { + // Feed the chunk into the decompressor + decompressor.write(chunk); + }); + + // Wait until the response body is consumed by user code. + response.once('end', () => { + // End the decompressor stream + decompressor.end(); + decompressor.once('end', () => { + Network.loadingFinished({ + requestId: request[kInspectorRequestId], + timestamp: getMonotonicTime(), + }); + }); + }); + } else { + // No decompression needed, send data directly + // Unlike response.on('data', ...), this does not put the stream into flowing mode. + EventEmitter.prototype.on.call(response, 'data', (chunk) => { + Network.dataReceived({ + requestId: request[kInspectorRequestId], + timestamp: getMonotonicTime(), + dataLength: chunk.byteLength, + encodedDataLength: chunk.byteLength, + data: chunk, + }); + }); + + // Wait until the response body is consumed by user code. + response.once('end', () => { + Network.loadingFinished({ + requestId: request[kInspectorRequestId], + timestamp: getMonotonicTime(), + }); + }); + } } module.exports = registerDiagnosticChannels([ diff --git a/lib/internal/inspector/network_http2.js b/lib/internal/inspector/network_http2.js index 0f0751c44dd1f9..3e7352df51233c 100644 --- a/lib/internal/inspector/network_http2.js +++ b/lib/internal/inspector/network_http2.js @@ -10,15 +10,18 @@ const { const { kInspectorRequestId, + kContentEncoding, kResourceType, getMonotonicTime, getNextRequestId, registerDiagnosticChannels, sniffMimeType, + createDecompressor, } = require('internal/inspector/network'); const { Network } = require('inspector'); const { HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_CONTENT_ENCODING, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_COOKIE, HTTP2_HEADER_METHOD, @@ -42,6 +45,7 @@ function convertHeaderObject(headers = {}) { let statusCode; let charset; let mimeType; + let contentEncoding; const dict = {}; for (const { 0: key, 1: value } of ObjectEntries(headers)) { @@ -61,6 +65,8 @@ function convertHeaderObject(headers = {}) { const result = sniffMimeType(value); charset = result.charset; mimeType = result.mimeType; + } else if (lowerCasedKey === HTTP2_HEADER_CONTENT_ENCODING) { + contentEncoding = typeof value === 'string' ? value.toLowerCase() : undefined; } if (typeof value === 'string') { @@ -78,7 +84,7 @@ function convertHeaderObject(headers = {}) { const url = `${scheme}://${authority}${path}`; - return [dict, url, method, statusCode, charset, mimeType]; + return [dict, url, method, statusCode, charset, mimeType, contentEncoding]; } /** @@ -194,7 +200,16 @@ function onClientStreamFinish({ stream, headers }) { return; } - const { 0: convertedHeaderObject, 3: statusCode, 4: charset, 5: mimeType } = convertHeaderObject(headers); + const { + 0: convertedHeaderObject, + 3: statusCode, + 4: charset, + 5: mimeType, + 6: contentEncoding, + } = convertHeaderObject(headers); + + // Store content encoding on the stream for later use + stream[kContentEncoding] = contentEncoding; Network.responseReceived({ requestId: stream[kInspectorRequestId], @@ -210,23 +225,56 @@ function onClientStreamFinish({ stream, headers }) { }, }); - // Unlike stream.on('data', ...), this does not put the stream into flowing mode. - EventEmitter.prototype.on.call(stream, 'data', (chunk) => { - /** - * When a chunk of the response body has been received, cache it until `getResponseBody` request - * https://chromedevtools.github.io/devtools-protocol/1-3/Network/#method-getResponseBody or - * stream it with `streamResourceContent` request. - * https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-streamResourceContent - */ - - Network.dataReceived({ - requestId: stream[kInspectorRequestId], - timestamp: getMonotonicTime(), - dataLength: chunk.byteLength, - encodedDataLength: chunk.byteLength, - data: chunk, + // Create a decompressor if the response is compressed + const decompressor = createDecompressor(contentEncoding); + + if (decompressor) { + // Pipe decompressed data to DevTools + decompressor.on('data', (decompressedChunk) => { + Network.dataReceived({ + requestId: stream[kInspectorRequestId], + timestamp: getMonotonicTime(), + dataLength: decompressedChunk.byteLength, + encodedDataLength: decompressedChunk.byteLength, + data: decompressedChunk, + }); }); - }); + + // Handle decompression errors gracefully + decompressor.on('error', () => { + // If decompression fails, the raw data has already been sent via the fallback + }); + + // Unlike stream.on('data', ...), this does not put the stream into flowing mode. + EventEmitter.prototype.on.call(stream, 'data', (chunk) => { + // Feed the chunk into the decompressor + decompressor.write(chunk); + }); + + // End the decompressor when the stream closes + stream.once('end', () => { + decompressor.end(); + }); + } else { + // No decompression needed, send data directly + // Unlike stream.on('data', ...), this does not put the stream into flowing mode. + EventEmitter.prototype.on.call(stream, 'data', (chunk) => { + /** + * When a chunk of the response body has been received, cache it until `getResponseBody` request + * https://chromedevtools.github.io/devtools-protocol/1-3/Network/#method-getResponseBody or + * stream it with `streamResourceContent` request. + * https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-streamResourceContent + */ + + Network.dataReceived({ + requestId: stream[kInspectorRequestId], + timestamp: getMonotonicTime(), + dataLength: chunk.byteLength, + encodedDataLength: chunk.byteLength, + data: chunk, + }); + }); + } } /** diff --git a/test/parallel/test-inspector-network-http-compressed.js b/test/parallel/test-inspector-network-http-compressed.js new file mode 100644 index 00000000000000..dac2e093920c66 --- /dev/null +++ b/test/parallel/test-inspector-network-http-compressed.js @@ -0,0 +1,248 @@ +// Flags: --inspect=0 --experimental-network-inspection +'use strict'; +const common = require('../common'); + +common.skipIfInspectorDisabled(); + +const assert = require('node:assert'); +const { once } = require('node:events'); +const fixtures = require('../common/fixtures'); +const http = require('node:http'); +const https = require('node:https'); +const zlib = require('node:zlib'); +const inspector = require('node:inspector/promises'); + +const session = new inspector.Session(); +session.connect(); + +const plainTextBody = 'hello world compressed\n'; + +const setResponseHeaders = (res, encoding) => { + res.setHeader('server', 'node'); + res.setHeader('Content-Type', 'text/plain; charset=utf-8'); + if (encoding) { + res.setHeader('Content-Encoding', encoding); + } +}; + +const handleRequest = (req, res) => { + const path = req.url; + switch (path) { + case '/gzip': + setResponseHeaders(res, 'gzip'); + res.writeHead(200); + zlib.gzip(plainTextBody, (err, compressed) => { + if (err) throw err; + res.end(compressed); + }); + break; + case '/deflate': + setResponseHeaders(res, 'deflate'); + res.writeHead(200); + zlib.deflate(plainTextBody, (err, compressed) => { + if (err) throw err; + res.end(compressed); + }); + break; + case '/br': + setResponseHeaders(res, 'br'); + res.writeHead(200); + zlib.brotliCompress(plainTextBody, (err, compressed) => { + if (err) throw err; + res.end(compressed); + }); + break; + case '/zstd': + setResponseHeaders(res, 'zstd'); + res.writeHead(200); + zlib.zstdCompress(plainTextBody, (err, compressed) => { + if (err) throw err; + res.end(compressed); + }); + break; + case '/plain': + setResponseHeaders(res); + res.writeHead(200); + res.end(plainTextBody); + break; + default: + assert.fail(`Unexpected path: ${path}`); + } +}; + +const httpServer = http.createServer(handleRequest); + +const httpsServer = https.createServer({ + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem') +}, handleRequest); + +const terminate = () => { + session.disconnect(); + httpServer.close(); + httpsServer.close(); + inspector.close(); +}; + +function verifyResponseReceived({ method, params }, expect) { + assert.strictEqual(method, 'Network.responseReceived'); + assert.ok(params.requestId.startsWith('node-network-event-')); + assert.strictEqual(params.response.status, 200); + assert.strictEqual(params.response.url, expect.url); + assert.strictEqual(params.response.mimeType, 'text/plain'); + assert.strictEqual(params.response.charset, 'utf-8'); + return params; +} + +function verifyLoadingFinished({ method, params }) { + assert.strictEqual(method, 'Network.loadingFinished'); + assert.ok(params.requestId.startsWith('node-network-event-')); + return params; +} + +async function testCompressedResponse(server, encoding, path) { + const port = server.address().port; + const protocol = server === httpsServer ? 'https' : 'http'; + const url = `${protocol}://127.0.0.1:${port}${path}`; + + const responseReceivedFuture = once(session, 'Network.responseReceived') + .then(([event]) => verifyResponseReceived(event, { url })); + + const loadingFinishedFuture = once(session, 'Network.loadingFinished') + .then(([event]) => verifyLoadingFinished(event)); + + const client = protocol === 'https' ? https : http; + const chunks = []; + + await new Promise((resolve, reject) => { + const req = client.get({ + host: '127.0.0.1', + port, + path, + rejectUnauthorized: false, + }, (res) => { + // Manually decompress the response to verify it works for user code + let decompressor; + if (encoding === 'gzip') { + decompressor = zlib.createGunzip(); + } else if (encoding === 'deflate') { + decompressor = zlib.createInflate(); + } else if (encoding === 'br') { + decompressor = zlib.createBrotliDecompress(); + } else if (encoding === 'zstd') { + decompressor = zlib.createZstdDecompress(); + } + + if (decompressor) { + res.pipe(decompressor); + decompressor.on('data', (chunk) => chunks.push(chunk)); + decompressor.on('end', resolve); + decompressor.on('error', reject); + } else { + res.on('data', (chunk) => chunks.push(chunk)); + res.on('end', resolve); + } + }); + req.on('error', reject); + }); + + // Verify user code can read the decompressed response + const body = Buffer.concat(chunks).toString(); + assert.strictEqual(body, plainTextBody); + + const responseReceived = await responseReceivedFuture; + await loadingFinishedFuture; + + // Verify the inspector receives the decompressed response body + const responseBody = await session.post('Network.getResponseBody', { + requestId: responseReceived.requestId, + }); + assert.strictEqual(responseBody.base64Encoded, false); + assert.strictEqual(responseBody.body, plainTextBody); +} + +async function testGzipHttp() { + await testCompressedResponse(httpServer, 'gzip', '/gzip'); +} + +async function testGzipHttps() { + await testCompressedResponse(httpsServer, 'gzip', '/gzip'); +} + +async function testDeflateHttp() { + await testCompressedResponse(httpServer, 'deflate', '/deflate'); +} + +async function testDeflateHttps() { + await testCompressedResponse(httpsServer, 'deflate', '/deflate'); +} + +async function testBrotliHttp() { + await testCompressedResponse(httpServer, 'br', '/br'); +} + +async function testBrotliHttps() { + await testCompressedResponse(httpsServer, 'br', '/br'); +} + +async function testZstdHttp() { + await testCompressedResponse(httpServer, 'zstd', '/zstd'); +} + +async function testZstdHttps() { + await testCompressedResponse(httpsServer, 'zstd', '/zstd'); +} + +async function testPlainHttp() { + await testCompressedResponse(httpServer, null, '/plain'); +} + +async function testPlainHttps() { + await testCompressedResponse(httpsServer, null, '/plain'); +} + +const testNetworkInspection = async () => { + // Test gzip + await testGzipHttp(); + session.removeAllListeners(); + await testGzipHttps(); + session.removeAllListeners(); + + // Test deflate + await testDeflateHttp(); + session.removeAllListeners(); + await testDeflateHttps(); + session.removeAllListeners(); + + // Test brotli + await testBrotliHttp(); + session.removeAllListeners(); + await testBrotliHttps(); + session.removeAllListeners(); + + // Test zstd + await testZstdHttp(); + session.removeAllListeners(); + await testZstdHttps(); + session.removeAllListeners(); + + // Test plain (no compression) + await testPlainHttp(); + session.removeAllListeners(); + await testPlainHttps(); + session.removeAllListeners(); +}; + +httpServer.listen(0, () => { + httpsServer.listen(0, async () => { + try { + await session.post('Network.enable'); + await testNetworkInspection(); + await session.post('Network.disable'); + } catch (e) { + assert.fail(e); + } finally { + terminate(); + } + }); +}); diff --git a/test/parallel/test-inspector-network-http2-compressed.js b/test/parallel/test-inspector-network-http2-compressed.js new file mode 100644 index 00000000000000..21d7632ab6afeb --- /dev/null +++ b/test/parallel/test-inspector-network-http2-compressed.js @@ -0,0 +1,263 @@ +// Flags: --inspect=0 --experimental-network-inspection +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +common.skipIfInspectorDisabled(); + +const assert = require('node:assert'); +const { once } = require('node:events'); +const fixtures = require('../common/fixtures'); +const http2 = require('node:http2'); +const zlib = require('node:zlib'); +const inspector = require('node:inspector/promises'); + +const session = new inspector.Session(); +session.connect(); + +const plainTextBody = 'hello world compressed http2\n'; + +const handleStream = common.mustCallAtLeast((stream, headers) => { + const path = headers[http2.constants.HTTP2_HEADER_PATH]; + + const responseHeaders = { + [http2.constants.HTTP2_HEADER_STATUS]: 200, + [http2.constants.HTTP2_HEADER_CONTENT_TYPE]: 'text/plain; charset=utf-8', + }; + + switch (path) { + case '/gzip': + responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'gzip'; + stream.respond(responseHeaders); + zlib.gzip(plainTextBody, (err, compressed) => { + if (err) throw err; + stream.end(compressed); + }); + break; + case '/deflate': + responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'deflate'; + stream.respond(responseHeaders); + zlib.deflate(plainTextBody, (err, compressed) => { + if (err) throw err; + stream.end(compressed); + }); + break; + case '/br': + responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'br'; + stream.respond(responseHeaders); + zlib.brotliCompress(plainTextBody, (err, compressed) => { + if (err) throw err; + stream.end(compressed); + }); + break; + case '/zstd': + responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'zstd'; + stream.respond(responseHeaders); + zlib.zstdCompress(plainTextBody, (err, compressed) => { + if (err) throw err; + stream.end(compressed); + }); + break; + case '/plain': + stream.respond(responseHeaders); + stream.end(plainTextBody); + break; + default: + assert.fail(`Unexpected path: ${path}`); + } +}); + +const http2Server = http2.createServer(); +const http2SecureServer = http2.createSecureServer({ + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem'), +}); + +http2Server.on('stream', handleStream); +http2SecureServer.on('stream', handleStream); + +const terminate = () => { + session.disconnect(); + http2Server.close(); + http2SecureServer.close(); + inspector.close(); +}; + +function verifyResponseReceived({ method, params }, expect) { + assert.strictEqual(method, 'Network.responseReceived'); + assert.ok(params.requestId.startsWith('node-network-event-')); + assert.strictEqual(params.response.status, 200); + assert.strictEqual(params.response.url, expect.url); + assert.strictEqual(params.response.mimeType, 'text/plain'); + assert.strictEqual(params.response.charset, 'utf-8'); + return params; +} + +function verifyLoadingFinished({ method, params }) { + assert.strictEqual(method, 'Network.loadingFinished'); + assert.ok(params.requestId.startsWith('node-network-event-')); + return params; +} + +async function testCompressedResponse(server, encoding, path) { + const port = server.address().port; + const secure = server === http2SecureServer; + const origin = (secure ? 'https' : 'http') + `://localhost:${port}`; + const url = `${origin}${path}`; + + const responseReceivedFuture = once(session, 'Network.responseReceived') + .then(([event]) => verifyResponseReceived(event, { url })); + + const loadingFinishedFuture = once(session, 'Network.loadingFinished') + .then(([event]) => verifyLoadingFinished(event)); + + const chunks = []; + + await new Promise((resolve, reject) => { + const client = http2.connect(origin, { + rejectUnauthorized: false, + }); + + const req = client.request({ + [http2.constants.HTTP2_HEADER_PATH]: path, + [http2.constants.HTTP2_HEADER_METHOD]: 'GET', + }); + + // Manually decompress the response to verify it works for user code + let decompressor; + if (encoding === 'gzip') { + decompressor = zlib.createGunzip(); + } else if (encoding === 'deflate') { + decompressor = zlib.createInflate(); + } else if (encoding === 'br') { + decompressor = zlib.createBrotliDecompress(); + } else if (encoding === 'zstd') { + decompressor = zlib.createZstdDecompress(); + } + + if (decompressor) { + req.pipe(decompressor); + decompressor.on('data', (chunk) => chunks.push(chunk)); + decompressor.on('end', () => { + client.close(); + resolve(); + }); + decompressor.on('error', (err) => { + client.close(); + reject(err); + }); + } else { + req.on('data', (chunk) => chunks.push(chunk)); + req.on('end', () => { + client.close(); + resolve(); + }); + } + + req.on('error', (err) => { + client.close(); + reject(err); + }); + req.end(); + }); + + // Verify user code can read the decompressed response + const body = Buffer.concat(chunks).toString(); + assert.strictEqual(body, plainTextBody); + + const responseReceived = await responseReceivedFuture; + await loadingFinishedFuture; + + // Verify the inspector receives the decompressed response body + const responseBody = await session.post('Network.getResponseBody', { + requestId: responseReceived.requestId, + }); + assert.strictEqual(responseBody.base64Encoded, false); + assert.strictEqual(responseBody.body, plainTextBody); +} + +async function testGzipHttp2() { + await testCompressedResponse(http2Server, 'gzip', '/gzip'); +} + +async function testGzipHttp2Secure() { + await testCompressedResponse(http2SecureServer, 'gzip', '/gzip'); +} + +async function testDeflateHttp2() { + await testCompressedResponse(http2Server, 'deflate', '/deflate'); +} + +async function testDeflateHttp2Secure() { + await testCompressedResponse(http2SecureServer, 'deflate', '/deflate'); +} + +async function testBrotliHttp2() { + await testCompressedResponse(http2Server, 'br', '/br'); +} + +async function testBrotliHttp2Secure() { + await testCompressedResponse(http2SecureServer, 'br', '/br'); +} + +async function testZstdHttp2() { + await testCompressedResponse(http2Server, 'zstd', '/zstd'); +} + +async function testZstdHttp2Secure() { + await testCompressedResponse(http2SecureServer, 'zstd', '/zstd'); +} + +async function testPlainHttp2() { + await testCompressedResponse(http2Server, null, '/plain'); +} + +async function testPlainHttp2Secure() { + await testCompressedResponse(http2SecureServer, null, '/plain'); +} + +const testNetworkInspection = async () => { + // Test gzip + await testGzipHttp2(); + session.removeAllListeners(); + await testGzipHttp2Secure(); + session.removeAllListeners(); + + // Test deflate + await testDeflateHttp2(); + session.removeAllListeners(); + await testDeflateHttp2Secure(); + session.removeAllListeners(); + + // Test brotli + await testBrotliHttp2(); + session.removeAllListeners(); + await testBrotliHttp2Secure(); + session.removeAllListeners(); + + // Test zstd + await testZstdHttp2(); + session.removeAllListeners(); + await testZstdHttp2Secure(); + session.removeAllListeners(); + + // Test plain (no compression) + await testPlainHttp2(); + session.removeAllListeners(); + await testPlainHttp2Secure(); + session.removeAllListeners(); +}; + +http2Server.listen(0, () => { + http2SecureServer.listen(0, async () => { + try { + await session.post('Network.enable'); + await testNetworkInspection(); + await session.post('Network.disable'); + } catch (e) { + assert.fail(e); + } finally { + terminate(); + } + }); +});