Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 75 additions & 0 deletions lib/internal/inspector/network.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,16 @@
const dc = require('diagnostics_channel');
const { now } = require('internal/perf/utils');
const { MIMEType } = require('internal/mime');
const {
createGunzip,
createInflate,
createBrotliDecompress,
createZstdDecompress,
} = require('zlib');
const { Buffer } = require('buffer');

const kInspectorRequestId = Symbol('kInspectorRequestId');
const kContentEncoding = Symbol('kContentEncoding');

// https://chromedevtools.github.io/devtools-protocol/1-3/Network/#type-ResourceType
const kResourceType = {
Expand Down Expand Up @@ -70,6 +78,69 @@
};
}

/**
* Gets the content encoding from the headers object.
* @param {object} headers - The response headers.
* @returns {string|undefined} - The content encoding (e.g., 'gzip', 'deflate', 'br', 'zstd').
*/
function getContentEncoding(headers = {}) {
const contentEncoding = headers['content-encoding'];
if (typeof contentEncoding === 'string') {
return StringPrototypeToLowerCase(contentEncoding);
}
return undefined;
}

/**
* Creates a decompression stream based on the content encoding.
* @param {string} encoding - The content encoding (e.g., 'gzip', 'deflate', 'br', 'zstd').
* @returns {import('stream').Transform|null} - A decompression stream or null if encoding is not supported.
*/
function createDecompressor(encoding) {
switch (encoding) {
case 'gzip':
case 'x-gzip':
return createGunzip();
case 'deflate':
return createInflate();
case 'br':
return createBrotliDecompress();
case 'zstd':
return createZstdDecompress();
default:
return null;
}
}

/**
* Decompresses a chunk of data based on the content encoding.
* @param {Buffer} chunk - The compressed data chunk.
* @param {string} encoding - The content encoding.
* @param {function} callback - Callback with (error, decompressedChunk).

Check failure on line 119 in lib/internal/inspector/network.js

View workflow job for this annotation

GitHub Actions / lint-js-and-md

Syntax error in type: function
*/
function decompressChunk(chunk, encoding, callback) {
const decompressor = createDecompressor(encoding);
if (!decompressor) {
// No decompression needed, return original chunk
callback(null, chunk);
return;
}

const chunks = [];
decompressor.on('data', (decompressedChunk) => {
chunks.push(decompressedChunk);
});
decompressor.on('end', () => {
callback(null, Buffer.concat(chunks));
});
decompressor.on('error', (err) => {
// On decompression error, fall back to returning the original chunk
callback(null, chunk);
});

decompressor.end(chunk);
}

function registerDiagnosticChannels(listenerPairs) {
function enable() {
ArrayPrototypeForEach(listenerPairs, ({ 0: channel, 1: listener }) => {
Expand All @@ -91,9 +162,13 @@

module.exports = {
kInspectorRequestId,
kContentEncoding,
kResourceType,
getMonotonicTime,
getNextRequestId,
registerDiagnosticChannels,
sniffMimeType,
getContentEncoding,
createDecompressor,
decompressChunk,
};
84 changes: 67 additions & 17 deletions lib/internal/inspector/network_http.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,14 @@

const {
kInspectorRequestId,
kContentEncoding,
kResourceType,
getMonotonicTime,
getNextRequestId,
registerDiagnosticChannels,
sniffMimeType,
getContentEncoding,

Check failure on line 19 in lib/internal/inspector/network_http.js

View workflow job for this annotation

GitHub Actions / lint-js-and-md

'getContentEncoding' is assigned a value but never used
createDecompressor,
} = require('internal/inspector/network');
const { Network } = require('inspector');
const EventEmitter = require('events');
Expand All @@ -27,6 +30,7 @@
let host;
let charset;
let mimeType;
let contentEncoding;
const dict = {};
for (const { 0: key, 1: value } of ObjectEntries(headers)) {
const lowerCasedKey = key.toLowerCase();
Expand All @@ -38,6 +42,9 @@
charset = result.charset;
mimeType = result.mimeType;
}
if (lowerCasedKey === 'content-encoding') {
contentEncoding = typeof value === 'string' ? value.toLowerCase() : undefined;
}
if (typeof value === 'string') {
dict[key] = value;
} else if (ArrayIsArray(value)) {
Expand All @@ -50,7 +57,7 @@
dict[key] = String(value);
}
}
return [dict, host, charset, mimeType];
return [dict, host, charset, mimeType, contentEncoding];
};

/**
Expand Down Expand Up @@ -105,7 +112,10 @@
return;
}

const { 0: headers, 2: charset, 3: mimeType } = convertHeaderObject(response.headers);
const { 0: headers, 2: charset, 3: mimeType, 4: contentEncoding } = convertHeaderObject(response.headers);

// Store content encoding on the request for later use
request[kContentEncoding] = contentEncoding;

Network.responseReceived({
requestId: request[kInspectorRequestId],
Expand All @@ -121,24 +131,64 @@
},
});

// Unlike response.on('data', ...), this does not put the stream into flowing mode.
EventEmitter.prototype.on.call(response, 'data', (chunk) => {
Network.dataReceived({
requestId: request[kInspectorRequestId],
timestamp: getMonotonicTime(),
dataLength: chunk.byteLength,
encodedDataLength: chunk.byteLength,
data: chunk,
// Create a decompressor if the response is compressed
const decompressor = createDecompressor(contentEncoding);

if (decompressor) {
// Pipe decompressed data to DevTools
decompressor.on('data', (decompressedChunk) => {
Network.dataReceived({
requestId: request[kInspectorRequestId],
timestamp: getMonotonicTime(),
dataLength: decompressedChunk.byteLength,
encodedDataLength: decompressedChunk.byteLength,
data: decompressedChunk,
});
});
});

// Wait until the response body is consumed by user code.
response.once('end', () => {
Network.loadingFinished({
requestId: request[kInspectorRequestId],
timestamp: getMonotonicTime(),
// Handle decompression errors gracefully - fall back to raw data
decompressor.on('error', () => {
// If decompression fails, the raw data has already been sent via the fallback
});
});

// Unlike response.on('data', ...), this does not put the stream into flowing mode.
EventEmitter.prototype.on.call(response, 'data', (chunk) => {
// Feed the chunk into the decompressor
decompressor.write(chunk);
});

// Wait until the response body is consumed by user code.
response.once('end', () => {
// End the decompressor stream
decompressor.end();
decompressor.once('end', () => {
Network.loadingFinished({
requestId: request[kInspectorRequestId],
timestamp: getMonotonicTime(),
});
});
});
} else {
// No decompression needed, send data directly
// Unlike response.on('data', ...), this does not put the stream into flowing mode.
EventEmitter.prototype.on.call(response, 'data', (chunk) => {
Network.dataReceived({
requestId: request[kInspectorRequestId],
timestamp: getMonotonicTime(),
dataLength: chunk.byteLength,
encodedDataLength: chunk.byteLength,
data: chunk,
});
});

// Wait until the response body is consumed by user code.
response.once('end', () => {
Network.loadingFinished({
requestId: request[kInspectorRequestId],
timestamp: getMonotonicTime(),
});
});
}
}

module.exports = registerDiagnosticChannels([
Expand Down
84 changes: 66 additions & 18 deletions lib/internal/inspector/network_http2.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,18 @@ const {

const {
kInspectorRequestId,
kContentEncoding,
kResourceType,
getMonotonicTime,
getNextRequestId,
registerDiagnosticChannels,
sniffMimeType,
createDecompressor,
} = require('internal/inspector/network');
const { Network } = require('inspector');
const {
HTTP2_HEADER_AUTHORITY,
HTTP2_HEADER_CONTENT_ENCODING,
HTTP2_HEADER_CONTENT_TYPE,
HTTP2_HEADER_COOKIE,
HTTP2_HEADER_METHOD,
Expand All @@ -42,6 +45,7 @@ function convertHeaderObject(headers = {}) {
let statusCode;
let charset;
let mimeType;
let contentEncoding;
const dict = {};

for (const { 0: key, 1: value } of ObjectEntries(headers)) {
Expand All @@ -61,6 +65,8 @@ function convertHeaderObject(headers = {}) {
const result = sniffMimeType(value);
charset = result.charset;
mimeType = result.mimeType;
} else if (lowerCasedKey === HTTP2_HEADER_CONTENT_ENCODING) {
contentEncoding = typeof value === 'string' ? value.toLowerCase() : undefined;
}

if (typeof value === 'string') {
Expand All @@ -78,7 +84,7 @@ function convertHeaderObject(headers = {}) {

const url = `${scheme}://${authority}${path}`;

return [dict, url, method, statusCode, charset, mimeType];
return [dict, url, method, statusCode, charset, mimeType, contentEncoding];
}

/**
Expand Down Expand Up @@ -194,7 +200,16 @@ function onClientStreamFinish({ stream, headers }) {
return;
}

const { 0: convertedHeaderObject, 3: statusCode, 4: charset, 5: mimeType } = convertHeaderObject(headers);
const {
0: convertedHeaderObject,
3: statusCode,
4: charset,
5: mimeType,
6: contentEncoding,
} = convertHeaderObject(headers);

// Store content encoding on the stream for later use
stream[kContentEncoding] = contentEncoding;

Network.responseReceived({
requestId: stream[kInspectorRequestId],
Expand All @@ -210,23 +225,56 @@ function onClientStreamFinish({ stream, headers }) {
},
});

// Unlike stream.on('data', ...), this does not put the stream into flowing mode.
EventEmitter.prototype.on.call(stream, 'data', (chunk) => {
/**
* When a chunk of the response body has been received, cache it until `getResponseBody` request
* https://chromedevtools.github.io/devtools-protocol/1-3/Network/#method-getResponseBody or
* stream it with `streamResourceContent` request.
* https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-streamResourceContent
*/

Network.dataReceived({
requestId: stream[kInspectorRequestId],
timestamp: getMonotonicTime(),
dataLength: chunk.byteLength,
encodedDataLength: chunk.byteLength,
data: chunk,
// Create a decompressor if the response is compressed
const decompressor = createDecompressor(contentEncoding);

if (decompressor) {
// Pipe decompressed data to DevTools
decompressor.on('data', (decompressedChunk) => {
Network.dataReceived({
requestId: stream[kInspectorRequestId],
timestamp: getMonotonicTime(),
dataLength: decompressedChunk.byteLength,
encodedDataLength: decompressedChunk.byteLength,
data: decompressedChunk,
});
});
});

// Handle decompression errors gracefully
decompressor.on('error', () => {
// If decompression fails, the raw data has already been sent via the fallback
});

// Unlike stream.on('data', ...), this does not put the stream into flowing mode.
EventEmitter.prototype.on.call(stream, 'data', (chunk) => {
// Feed the chunk into the decompressor
decompressor.write(chunk);
});

// End the decompressor when the stream closes
stream.once('end', () => {
decompressor.end();
});
} else {
// No decompression needed, send data directly
// Unlike stream.on('data', ...), this does not put the stream into flowing mode.
EventEmitter.prototype.on.call(stream, 'data', (chunk) => {
/**
* When a chunk of the response body has been received, cache it until `getResponseBody` request
* https://chromedevtools.github.io/devtools-protocol/1-3/Network/#method-getResponseBody or
* stream it with `streamResourceContent` request.
* https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-streamResourceContent
*/

Network.dataReceived({
requestId: stream[kInspectorRequestId],
timestamp: getMonotonicTime(),
dataLength: chunk.byteLength,
encodedDataLength: chunk.byteLength,
data: chunk,
});
});
}
}

/**
Expand Down
Loading
Loading