From 24daacda981d5580602242199357420badc99183 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Tue, 13 Jan 2026 17:13:46 -0800 Subject: [PATCH 01/18] using bson for buffer calls --- src/bson.ts | 35 +++++++++++ src/cmap/auth/scram.ts | 23 +++++--- src/cmap/commands.ts | 85 +++++++++++++++------------ src/cmap/connection.ts | 10 ++-- src/cmap/wire_protocol/compression.ts | 15 ++--- src/gridfs/upload.ts | 6 +- src/sessions.ts | 4 +- src/utils.ts | 27 +++++---- 8 files changed, 133 insertions(+), 72 deletions(-) diff --git a/src/bson.ts b/src/bson.ts index f9db54dc428..578ab8e665a 100644 --- a/src/bson.ts +++ b/src/bson.ts @@ -32,6 +32,13 @@ export { /** @internal */ export type BSONElement = BSON.OnDemand['BSONElement']; +/** @internal */ +export function toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { + return Buffer.isBuffer(buffer) + ? buffer + : Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); +} + export function parseToElementsToArray(bytes: Uint8Array, offset?: number): BSONElement[] { const res = BSON.onDemand.parseToElements(bytes, offset); return Array.isArray(res) ? res : [...res]; @@ -41,6 +48,34 @@ export const getInt32LE = BSON.onDemand.NumberUtils.getInt32LE; export const getFloat64LE = BSON.onDemand.NumberUtils.getFloat64LE; export const getBigInt64LE = BSON.onDemand.NumberUtils.getBigInt64LE; export const toUTF8 = BSON.onDemand.ByteUtils.toUTF8; +export const writeInt32LE = BSON.onDemand.NumberUtils.setInt32LE; + +export const fromUTF8 = (text: string) => toLocalBufferType(BSON.onDemand.ByteUtils.fromUTF8(text)); + +export const concatBuffers = (list: Buffer[] | Uint8Array[]) => { + return toLocalBufferType(BSON.onDemand.ByteUtils.concat(list)); +}; +export const allocateBuffer = (size: number) => + toLocalBufferType(BSON.onDemand.ByteUtils.allocate(size)); +export const allocateUnsafeBuffer = (size: number) => + toLocalBufferType(BSON.onDemand.ByteUtils.allocateUnsafe(size)); + +export const utf8ByteLength = BSON.onDemand.ByteUtils.utf8ByteLength; +export const toBase64 = BSON.onDemand.ByteUtils.toBase64; +export const encodeUTF8Into = BSON.onDemand.ByteUtils.encodeUTF8Into; + +const validateBufferInputs = (buffer: Uint8Array, offset: number, length: number) => { + if (offset < 0 || offset + length > buffer.length) { + throw new RangeError( + `Attempt to access memory outside buffer bounds: buffer length: ${buffer.length}, offset: ${offset}, length: ${length}` + ); + } +}; + +export const readInt32LE = (buffer: Uint8Array, offset: number): number => { + validateBufferInputs(buffer, offset, 4); + return getInt32LE(buffer, offset); +}; /** * BSON Serialization options. diff --git a/src/cmap/auth/scram.ts b/src/cmap/auth/scram.ts index b10b2007dc8..3e382d73bb2 100644 --- a/src/cmap/auth/scram.ts +++ b/src/cmap/auth/scram.ts @@ -1,7 +1,14 @@ import { saslprep } from '@mongodb-js/saslprep'; import * as crypto from 'crypto'; -import { Binary, type Document } from '../../bson'; +import { + allocateBuffer, + Binary, + concatBuffers, + type Document, + fromUTF8, + toBase64 +} from '../../bson'; import { MongoInvalidArgumentError, MongoMissingCredentialsError, @@ -68,11 +75,11 @@ function cleanUsername(username: string) { function clientFirstMessageBare(username: string, nonce: Buffer) { // NOTE: This is done b/c Javascript uses UTF-16, but the server is hashing in UTF-8. // Since the username is not sasl-prep-d, we need to do this here. - return Buffer.concat([ - Buffer.from('n=', 'utf8'), - Buffer.from(username, 'utf8'), - Buffer.from(',r=', 'utf8'), - Buffer.from(nonce.toString('base64'), 'utf8') + return concatBuffers([ + fromUTF8('n='), + fromUTF8(username), + fromUTF8(',r='), + fromUTF8(toBase64(nonce)) ]); } @@ -91,7 +98,7 @@ function makeFirstMessage( saslStart: 1, mechanism, payload: new Binary( - Buffer.concat([Buffer.from('n,,', 'utf8'), clientFirstMessageBare(username, nonce)]) + concatBuffers([Buffer.from('n,,', 'utf8'), clientFirstMessageBare(username, nonce)]) ), autoAuthorize: 1, options: { skipEmptyExchange: true } @@ -199,7 +206,7 @@ async function continueScramConversation( const retrySaslContinueCmd = { saslContinue: 1, conversationId: r.conversationId, - payload: Buffer.alloc(0) + payload: allocateBuffer(0) }; await connection.command(ns(`${db}.$cmd`), retrySaslContinueCmd, undefined); diff --git a/src/cmap/commands.ts b/src/cmap/commands.ts index 177ca3a7835..5428e5c6a46 100644 --- a/src/cmap/commands.ts +++ b/src/cmap/commands.ts @@ -1,5 +1,15 @@ -import type { BSONSerializeOptions, Document, Long } from '../bson'; -import * as BSON from '../bson'; +import { + allocateBuffer, + allocateUnsafeBuffer, + BSON, + type BSONSerializeOptions, + concatBuffers, + type Document, + type Long, + readInt32LE, + utf8ByteLength, + writeInt32LE +} from '../bson'; import { MongoInvalidArgumentError, MongoRuntimeError } from '../error'; import { type ReadPreference } from '../read_preference'; import type { ClientSession } from '../sessions'; @@ -30,7 +40,7 @@ const QUERY_FAILURE = 2; const SHARD_CONFIG_STALE = 4; const AWAIT_CAPABLE = 8; -const encodeUTF8Into = BSON.BSON.onDemand.ByteUtils.encodeUTF8Into; +const encodeUTF8Into = BSON.onDemand.ByteUtils.encodeUTF8Into; /** @internal */ export type WriteProtocolMessageType = OpQueryRequest | OpMsgRequest; @@ -182,10 +192,10 @@ export class OpQueryRequest { if (this.batchSize !== this.numberToReturn) this.numberToReturn = this.batchSize; // Allocate write protocol header buffer - const header = Buffer.alloc( + const header = allocateBuffer( 4 * 4 + // Header 4 + // Flags - Buffer.byteLength(this.ns) + + utf8ByteLength(this.ns) + 1 + // namespace 4 + // numberToSkip 4 // numberToReturn @@ -256,7 +266,7 @@ export class OpQueryRequest { index = index + 4; // Write collection name - index = index + header.write(this.ns, index, 'utf8') + 1; + index = index + encodeUTF8Into(header, this.ns, index) + 1; header[index - 1] = 0; // Write header information flags numberToSkip @@ -364,10 +374,10 @@ export class OpReply { this.index = 20; // Read the message body - this.responseFlags = this.data.readInt32LE(0); - this.cursorId = new BSON.Long(this.data.readInt32LE(4), this.data.readInt32LE(8)); - this.startingFrom = this.data.readInt32LE(12); - this.numberReturned = this.data.readInt32LE(16); + this.responseFlags = readInt32LE(this.data, 0); + this.cursorId = new BSON.Long(readInt32LE(this.data, 4), readInt32LE(this.data, 8)); + this.startingFrom = readInt32LE(this.data, 12); + this.numberReturned = readInt32LE(this.data, 16); if (this.numberReturned < 0 || this.numberReturned > 2 ** 32 - 1) { throw new RangeError( @@ -446,7 +456,7 @@ export class DocumentSequence { this.serializedDocumentsLength = 0; // Document sequences starts with type 1 at the first byte. // Field strings must always be UTF-8. - const buffer = Buffer.allocUnsafe(1 + 4 + this.field.length + 1); + const buffer = allocateUnsafeBuffer(1 + 4 + this.field.length + 1); buffer[0] = 1; // Third part is the field name at offset 5 with trailing null byte. encodeUTF8Into(buffer, `${this.field}\0`, 5); @@ -482,7 +492,7 @@ export class DocumentSequence { * @returns The section bytes. */ toBin(): Uint8Array { - return Buffer.concat(this.chunks); + return concatBuffers(this.chunks); } } @@ -547,7 +557,7 @@ export class OpMsgRequest { flags |= OPTS_EXHAUST_ALLOWED; } - const header = Buffer.alloc( + const header = allocateBuffer( 4 * 4 + // Header 4 // Flags ); @@ -558,11 +568,11 @@ export class OpMsgRequest { const command = this.command; totalLength += this.makeSections(buffers, command); - header.writeInt32LE(totalLength, 0); // messageLength - header.writeInt32LE(this.requestId, 4); // requestID - header.writeInt32LE(0, 8); // responseTo - header.writeInt32LE(OP_MSG, 12); // opCode - header.writeUInt32LE(flags, 16); // flags + writeInt32LE(header, 0, totalLength); // messageLength + writeInt32LE(header, 4, this.requestId); // requestID + writeInt32LE(header, 8, 0); // responseTo + writeInt32LE(header, 12, OP_MSG); // opCode + writeInt32LE(header, 16, flags); // flags return buffers; } @@ -571,7 +581,7 @@ export class OpMsgRequest { */ makeSections(buffers: Uint8Array[], document: Document): number { const sequencesBuffer = this.extractDocumentSequences(document); - const payloadTypeBuffer = Buffer.allocUnsafe(1); + const payloadTypeBuffer = allocateUnsafeBuffer(1); payloadTypeBuffer[0] = 0; const documentBuffer = this.serializeBson(document); @@ -606,11 +616,11 @@ export class OpMsgRequest { } } if (chunks.length > 0) { - return Buffer.concat(chunks); + return concatBuffers(chunks); } // If we have no document sequences we return an empty buffer for nothing to add // to the payload. - return Buffer.alloc(0); + return allocateBuffer(0); } serializeBson(document: Document): Uint8Array { @@ -676,7 +686,7 @@ export class OpMsgResponse { this.fromCompressed = msgHeader.fromCompressed; // Read response flags - this.responseFlags = msgBody.readInt32LE(0); + this.responseFlags = readInt32LE(msgBody, 0); this.checksumPresent = (this.responseFlags & OPTS_CHECKSUM_PRESENT) !== 0; this.moreToCome = (this.responseFlags & OPTS_MORE_TO_COME) !== 0; this.exhaustAllowed = (this.responseFlags & OPTS_EXHAUST_ALLOWED) !== 0; @@ -700,9 +710,9 @@ export class OpMsgResponse { this.index = 4; while (this.index < this.data.length) { - const payloadType = this.data.readUInt8(this.index++); + const payloadType = this.data[this.index++]; if (payloadType === 0) { - const bsonSize = this.data.readUInt32LE(this.index); + const bsonSize = readInt32LE(this.data, this.index); const bin = this.data.subarray(this.index, this.index + bsonSize); this.sections.push(bin); @@ -759,30 +769,31 @@ export class OpCompressedRequest { } async toBin(): Promise { - const concatenatedOriginalCommandBuffer = Buffer.concat(this.command.toBin()); + const concatenatedOriginalCommandBuffer = concatBuffers(this.command.toBin()); // otherwise, compress the message const messageToBeCompressed = concatenatedOriginalCommandBuffer.slice(MESSAGE_HEADER_SIZE); // Extract information needed for OP_COMPRESSED from the uncompressed message - const originalCommandOpCode = concatenatedOriginalCommandBuffer.readInt32LE(12); + const originalCommandOpCode = readInt32LE(concatenatedOriginalCommandBuffer, 12); // Compress the message body const compressedMessage = await compress(this.options, messageToBeCompressed); // Create the msgHeader of OP_COMPRESSED - const msgHeader = Buffer.alloc(MESSAGE_HEADER_SIZE); - msgHeader.writeInt32LE( - MESSAGE_HEADER_SIZE + COMPRESSION_DETAILS_SIZE + compressedMessage.length, - 0 + const msgHeader = allocateBuffer(MESSAGE_HEADER_SIZE); + writeInt32LE( + msgHeader, + 0, + MESSAGE_HEADER_SIZE + COMPRESSION_DETAILS_SIZE + compressedMessage.length ); // messageLength - msgHeader.writeInt32LE(this.command.requestId, 4); // requestID - msgHeader.writeInt32LE(0, 8); // responseTo (zero) - msgHeader.writeInt32LE(OP_COMPRESSED, 12); // opCode + writeInt32LE(msgHeader, 4, this.command.requestId); // requestID + writeInt32LE(msgHeader, 8, 0); // responseTo (zero) + writeInt32LE(msgHeader, 12, OP_COMPRESSED); // opCode // Create the compression details of OP_COMPRESSED - const compressionDetails = Buffer.alloc(COMPRESSION_DETAILS_SIZE); - compressionDetails.writeInt32LE(originalCommandOpCode, 0); // originalOpcode - compressionDetails.writeInt32LE(messageToBeCompressed.length, 4); // Size of the uncompressed compressedMessage, excluding the MsgHeader - compressionDetails.writeUInt8(Compressor[this.options.agreedCompressor], 8); // compressorID + const compressionDetails = allocateBuffer(COMPRESSION_DETAILS_SIZE); + writeInt32LE(compressionDetails, 0, originalCommandOpCode); // originalOpcode + writeInt32LE(compressionDetails, 4, messageToBeCompressed.length); // Size of the uncompressed compressedMessage, excluding the MsgHeader + writeInt32LE(compressionDetails, 8, Compressor[this.options.agreedCompressor]); // compressorID return [msgHeader, compressionDetails, compressedMessage]; } } diff --git a/src/cmap/connection.ts b/src/cmap/connection.ts index 9652e3a5e4f..09f8498723f 100644 --- a/src/cmap/connection.ts +++ b/src/cmap/connection.ts @@ -2,7 +2,9 @@ import { type Readable, Transform, type TransformCallback } from 'stream'; import { clearTimeout, setTimeout } from 'timers'; import { + BSON, type BSONSerializeOptions, + concatBuffers, deserialize, type DeserializeOptions, type Document, @@ -174,7 +176,7 @@ function streamIdentifier(stream: Stream, options: ConnectionOptions): string { return HostAddress.fromHostPort(remoteAddress, remotePort).toString(); } - return uuidV4().toString('hex'); + return BSON.onDemand.ByteUtils.toHex(uuidV4()); } /** @internal */ @@ -204,7 +206,7 @@ export class Connection extends TypedEventEmitter { private lastUseTime: number; private clusterTime: Document | null = null; private error: Error | null = null; - private dataEvents: AsyncGenerator | null = null; + private dataEvents: AsyncGenerator | null = null; private readonly socketTimeoutMS: number; private readonly monitorCommands: boolean; @@ -696,7 +698,7 @@ export class Connection extends TypedEventEmitter { zlibCompressionLevel: options.zlibCompressionLevel ?? 0 }); - const buffer = Buffer.concat(await finalCommand.toBin()); + const buffer = concatBuffers(await finalCommand.toBin()); if (options.timeoutContext?.csotEnabled()) { if ( @@ -794,7 +796,7 @@ export class SizedMessageTransform extends Transform { this.connection = connection; } - override _transform(chunk: Buffer, encoding: unknown, callback: TransformCallback): void { + override _transform(chunk: Uint8Array, encoding: unknown, callback: TransformCallback): void { if (this.connection.delayedTimeoutId != null) { clearTimeout(this.connection.delayedTimeoutId); this.connection.delayedTimeoutId = null; diff --git a/src/cmap/wire_protocol/compression.ts b/src/cmap/wire_protocol/compression.ts index 4ee941ff3c9..81ecd8c3363 100644 --- a/src/cmap/wire_protocol/compression.ts +++ b/src/cmap/wire_protocol/compression.ts @@ -1,5 +1,6 @@ import * as zlib from 'zlib'; +import { concatBuffers, readInt32LE } from '../../bson'; import { LEGACY_HELLO_COMMAND } from '../../constants'; import { getSnappy, getZstdLibrary, type SnappyLib, type ZStandard } from '../../deps'; import { MongoDecompressionError, MongoInvalidArgumentError } from '../../error'; @@ -168,7 +169,7 @@ export async function compressCommand( zlibCompressionLevel: description.zlibCompressionLevel ?? 0 }); const data = await finalCommand.toBin(); - return Buffer.concat(data); + return concatBuffers(data); } /** @@ -180,10 +181,10 @@ export async function compressCommand( */ export async function decompressResponse(message: Buffer): Promise { const messageHeader: MessageHeader = { - length: message.readInt32LE(0), - requestId: message.readInt32LE(4), - responseTo: message.readInt32LE(8), - opCode: message.readInt32LE(12) + length: readInt32LE(message, 0), + requestId: readInt32LE(message, 4), + responseTo: readInt32LE(message, 8), + opCode: readInt32LE(message, 12) }; if (messageHeader.opCode !== OP_COMPRESSED) { @@ -195,8 +196,8 @@ export async function decompressResponse(message: Buffer): Promise = (error?: AnyError, result?: T) => void; export type AnyOptions = Document; export const ByteUtils = { - toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { - return Buffer.isBuffer(buffer) - ? buffer - : Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); - }, + toLocalBufferType: toLocalBufferType, equals(this: void, seqA: Uint8Array, seqB: Uint8Array) { return ByteUtils.toLocalBufferType(seqA).equals(seqB); @@ -821,13 +826,13 @@ export class BufferPool { } const firstBuffer = this.buffers.first(); if (firstBuffer != null && firstBuffer.byteLength >= 4) { - return firstBuffer.readInt32LE(0); + return getInt32LE(firstBuffer, 0); } // Unlikely case: an int32 is split across buffers. // Use read and put the returned buffer back on top const top4Bytes = this.read(4); - const value = top4Bytes.readInt32LE(0); + const value = getInt32LE(top4Bytes, 0); // Put it back. this.totalByteLength += 4; @@ -844,12 +849,12 @@ export class BufferPool { // oversized request returns empty buffer if (size > this.totalByteLength) { - return Buffer.alloc(0); + return allocateBuffer(0); } // We know we have enough, we just don't know how it is spread across chunks // TODO(NODE-4732): alloc API should change based on raw option - const result = Buffer.allocUnsafe(size); + const result = allocateUnsafeBuffer(size); for (let bytesRead = 0; bytesRead < size; ) { const buffer = this.buffers.shift(); @@ -869,7 +874,7 @@ export class BufferPool { } } - return result; + return ByteUtils.toLocalBufferType(result); } } From 51efd36bb0e0c24be0aa59a56254a3b71fc8a29b Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Tue, 13 Jan 2026 17:30:54 -0800 Subject: [PATCH 02/18] removing unnecessary code --- src/cmap/connection.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cmap/connection.ts b/src/cmap/connection.ts index 09f8498723f..51c0725657b 100644 --- a/src/cmap/connection.ts +++ b/src/cmap/connection.ts @@ -206,7 +206,7 @@ export class Connection extends TypedEventEmitter { private lastUseTime: number; private clusterTime: Document | null = null; private error: Error | null = null; - private dataEvents: AsyncGenerator | null = null; + private dataEvents: AsyncGenerator | null = null; private readonly socketTimeoutMS: number; private readonly monitorCommands: boolean; @@ -796,7 +796,7 @@ export class SizedMessageTransform extends Transform { this.connection = connection; } - override _transform(chunk: Uint8Array, encoding: unknown, callback: TransformCallback): void { + override _transform(chunk: Buffer, encoding: unknown, callback: TransformCallback): void { if (this.connection.delayedTimeoutId != null) { clearTimeout(this.connection.delayedTimeoutId); this.connection.delayedTimeoutId = null; From 3d28ae5475be8af1f0fb3355d5b5b12ba8784b5b Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Wed, 14 Jan 2026 11:52:05 -0800 Subject: [PATCH 03/18] lots of changes related to removing Buffer from internal uses: 1. using Uint8Array instead of Buffer in all internal code 2. enabling eslint rule to block Buffer use in src 3. using BSON's ByteUtils in places where we depended on Buffer operations 4. using ByteUtils.isUint8Array instead of Buffer.isBuffer 5. introduced writeInt32LE wrapper that uses the same order of variables, to avoid embarassing mistakes --- .eslintrc.json | 7 +++ src/bson.ts | 38 ++++++------ src/client-side-encryption/auto_encrypter.ts | 30 ++++++---- .../client_encryption.ts | 11 +++- src/cmap/auth/auth_provider.ts | 2 +- src/cmap/auth/mongodb_aws.ts | 6 +- src/cmap/auth/plain.ts | 4 +- src/cmap/auth/scram.ts | 53 +++++++---------- src/cmap/commands.ts | 59 ++++++++++--------- src/cmap/connection.ts | 4 +- src/cmap/handshake/client_metadata.ts | 6 +- src/cmap/wire_protocol/compression.ts | 17 +++--- src/cmap/wire_protocol/on_data.ts | 10 ++-- src/deps.ts | 8 +-- src/gridfs/download.ts | 4 +- src/gridfs/upload.ts | 23 ++++---- src/sessions.ts | 10 +++- src/utils.ts | 31 ++++++---- test/tools/runner/config.ts | 3 +- test/unit/cmap/commands.test.ts | 13 ++-- 20 files changed, 185 insertions(+), 154 deletions(-) diff --git a/.eslintrc.json b/.eslintrc.json index d009780f372..ff50d840307 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -285,6 +285,13 @@ } ] } + ], + "no-restricted-globals": [ + "error", + { + "name": "Buffer", + "message": "Use Uin8Array instead" + } ] } }, diff --git a/src/bson.ts b/src/bson.ts index 578ab8e665a..eda6746bf8f 100644 --- a/src/bson.ts +++ b/src/bson.ts @@ -1,5 +1,5 @@ /* eslint-disable no-restricted-imports */ -import { BSON, type DeserializeOptions, type SerializeOptions } from 'bson'; +import { BSON, ByteUtils, type DeserializeOptions, type SerializeOptions } from 'bson'; export { Binary, @@ -8,6 +8,7 @@ export { BSONRegExp, BSONSymbol, BSONType, + ByteUtils, calculateObjectSize, Code, DBRef, @@ -32,13 +33,6 @@ export { /** @internal */ export type BSONElement = BSON.OnDemand['BSONElement']; -/** @internal */ -export function toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { - return Buffer.isBuffer(buffer) - ? buffer - : Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); -} - export function parseToElementsToArray(bytes: Uint8Array, offset?: number): BSONElement[] { const res = BSON.onDemand.parseToElements(bytes, offset); return Array.isArray(res) ? res : [...res]; @@ -48,22 +42,30 @@ export const getInt32LE = BSON.onDemand.NumberUtils.getInt32LE; export const getFloat64LE = BSON.onDemand.NumberUtils.getFloat64LE; export const getBigInt64LE = BSON.onDemand.NumberUtils.getBigInt64LE; export const toUTF8 = BSON.onDemand.ByteUtils.toUTF8; -export const writeInt32LE = BSON.onDemand.NumberUtils.setInt32LE; -export const fromUTF8 = (text: string) => toLocalBufferType(BSON.onDemand.ByteUtils.fromUTF8(text)); +// BSON wrappers -export const concatBuffers = (list: Buffer[] | Uint8Array[]) => { - return toLocalBufferType(BSON.onDemand.ByteUtils.concat(list)); +// writeInt32LE, same order of arguments as Buffer.writeInt32LE +export const writeInt32LE = (destination: Uint8Array, value: number, offset: number) => + BSON.onDemand.NumberUtils.setInt32LE(destination, offset, value); + +// various wrappers that consume and return local buffer types + +export const fromUTF8 = (text: string) => + ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.fromUTF8(text)); +export const fromBase64 = (b64: string) => + ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.fromBase64(b64)); +export const fromNumberArray = (array: number[]) => + ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.fromNumberArray(array)); +export const concatBuffers = (list: Uint8Array[]) => { + return ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.concat(list)); }; export const allocateBuffer = (size: number) => - toLocalBufferType(BSON.onDemand.ByteUtils.allocate(size)); + ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.allocate(size)); export const allocateUnsafeBuffer = (size: number) => - toLocalBufferType(BSON.onDemand.ByteUtils.allocateUnsafe(size)); - -export const utf8ByteLength = BSON.onDemand.ByteUtils.utf8ByteLength; -export const toBase64 = BSON.onDemand.ByteUtils.toBase64; -export const encodeUTF8Into = BSON.onDemand.ByteUtils.encodeUTF8Into; + ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.allocateUnsafe(size)); +// validates buffer inputs, used for read operations const validateBufferInputs = (buffer: Uint8Array, offset: number, length: number) => { if (offset < 0 || offset + length > buffer.length) { throw new RangeError( diff --git a/src/client-side-encryption/auto_encrypter.ts b/src/client-side-encryption/auto_encrypter.ts index 0f9c79752a3..806c5f758a1 100644 --- a/src/client-side-encryption/auto_encrypter.ts +++ b/src/client-side-encryption/auto_encrypter.ts @@ -1,7 +1,7 @@ import { type MongoCrypt, type MongoCryptOptions } from 'mongodb-client-encryption'; import * as net from 'net'; -import { deserialize, type Document, serialize } from '../bson'; +import { ByteUtils, deserialize, type Document, serialize } from '../bson'; import { type CommandOptions, type ProxyOptions } from '../cmap/connection'; import { kDecorateResult } from '../constants'; import { getMongoDBClientEncryption } from '../deps'; @@ -256,20 +256,26 @@ export class AutoEncrypter { errorWrapper: defaultErrorWrapper }; if (options.schemaMap) { - mongoCryptOptions.schemaMap = Buffer.isBuffer(options.schemaMap) - ? options.schemaMap - : (serialize(options.schemaMap) as Buffer); + if (ByteUtils.isUint8Array(options.schemaMap)) { + mongoCryptOptions.schemaMap = options.schemaMap as Uint8Array; + } else { + mongoCryptOptions.schemaMap = serialize(options.schemaMap); + } } if (options.encryptedFieldsMap) { - mongoCryptOptions.encryptedFieldsMap = Buffer.isBuffer(options.encryptedFieldsMap) - ? options.encryptedFieldsMap - : (serialize(options.encryptedFieldsMap) as Buffer); + if (ByteUtils.isUint8Array(options.encryptedFieldsMap)) { + mongoCryptOptions.encryptedFieldsMap = options.encryptedFieldsMap as Uint8Array; + } else { + mongoCryptOptions.encryptedFieldsMap = serialize(options.encryptedFieldsMap); + } } - mongoCryptOptions.kmsProviders = !Buffer.isBuffer(this._kmsProviders) - ? (serialize(this._kmsProviders) as Buffer) - : this._kmsProviders; + if (ByteUtils.isUint8Array(this._kmsProviders)) { + mongoCryptOptions.kmsProviders = this._kmsProviders as any as Uint8Array; + } else { + mongoCryptOptions.kmsProviders = serialize(this._kmsProviders); + } if (options.options?.logger) { mongoCryptOptions.logger = options.options.logger; @@ -396,7 +402,9 @@ export class AutoEncrypter { return cmd; } - const commandBuffer = Buffer.isBuffer(cmd) ? cmd : serialize(cmd, options); + const commandBuffer: Uint8Array = ByteUtils.isUint8Array(cmd) + ? (cmd as Uint8Array) + : serialize(cmd, options); const context = this._mongocrypt.makeEncryptionContext( MongoDBCollectionNamespace.fromString(ns).db, commandBuffer diff --git a/src/client-side-encryption/client_encryption.ts b/src/client-side-encryption/client_encryption.ts index 6303eba4bac..851c4200b22 100644 --- a/src/client-side-encryption/client_encryption.ts +++ b/src/client-side-encryption/client_encryption.ts @@ -6,6 +6,7 @@ import type { import { type Binary, + ByteUtils, deserialize, type Document, type Int32, @@ -141,11 +142,15 @@ export class ClientEncryption { throw new MongoCryptInvalidArgumentError('Missing required option `keyVaultNamespace`'); } + let kmsProviders; + if (!ByteUtils.isUint8Array(this._kmsProviders)) { + kmsProviders = serialize(this._kmsProviders); + } else { + kmsProviders = this._kmsProviders as any as Uint8Array; + } const mongoCryptOptions: MongoCryptOptions = { ...options, - kmsProviders: !Buffer.isBuffer(this._kmsProviders) - ? (serialize(this._kmsProviders) as Buffer) - : this._kmsProviders, + kmsProviders, errorWrapper: defaultErrorWrapper }; diff --git a/src/cmap/auth/auth_provider.ts b/src/cmap/auth/auth_provider.ts index e40c791ea5d..d39de6500c8 100644 --- a/src/cmap/auth/auth_provider.ts +++ b/src/cmap/auth/auth_provider.ts @@ -21,7 +21,7 @@ export class AuthContext { /** A response from an initial auth attempt, only some mechanisms use this (e.g, SCRAM) */ response?: Document; /** A random nonce generated for use in an authentication conversation */ - nonce?: Buffer; + nonce?: Uint8Array; constructor( connection: Connection, diff --git a/src/cmap/auth/mongodb_aws.ts b/src/cmap/auth/mongodb_aws.ts index b9a2cdef0a7..868fdc30b96 100644 --- a/src/cmap/auth/mongodb_aws.ts +++ b/src/cmap/auth/mongodb_aws.ts @@ -5,7 +5,7 @@ import { MongoMissingCredentialsError, MongoRuntimeError } from '../../error'; -import { ByteUtils, maxWireVersion, ns, randomBytes } from '../../utils'; +import { maxWireVersion, ns, randomBytes } from '../../utils'; import { type AuthContext, AuthProvider } from './auth_provider'; import { type AWSCredentialProvider, @@ -92,7 +92,7 @@ export class MongoDBAWS extends AuthProvider { throw new MongoRuntimeError(`Invalid server nonce length ${serverNonce.length}, expected 64`); } - if (!ByteUtils.equals(serverNonce.subarray(0, nonce.byteLength), nonce)) { + if (!BSON.ByteUtils.equals(serverNonce.subarray(0, nonce.byteLength), nonce)) { // throw because the serverNonce's leading 32 bytes must equal the client nonce's 32 bytes // https://github.com/mongodb/specifications/blob/master/source/auth/auth.md#conversation-5 @@ -115,7 +115,7 @@ export class MongoDBAWS extends AuthProvider { headers: { 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': body.length, - 'X-MongoDB-Server-Nonce': ByteUtils.toBase64(serverNonce), + 'X-MongoDB-Server-Nonce': BSON.ByteUtils.toBase64(serverNonce), 'X-MongoDB-GS2-CB-Flag': 'n' }, path: '/', diff --git a/src/cmap/auth/plain.ts b/src/cmap/auth/plain.ts index f5a43863113..fbebd3f82d8 100644 --- a/src/cmap/auth/plain.ts +++ b/src/cmap/auth/plain.ts @@ -1,4 +1,4 @@ -import { Binary } from '../../bson'; +import { Binary, fromUTF8 } from '../../bson'; import { MongoMissingCredentialsError } from '../../error'; import { ns } from '../../utils'; import { type AuthContext, AuthProvider } from './auth_provider'; @@ -12,7 +12,7 @@ export class Plain extends AuthProvider { const { username, password } = credentials; - const payload = new Binary(Buffer.from(`\x00${username}\x00${password}`)); + const payload = new Binary(fromUTF8(`\x00${username}\x00${password}`)); const command = { saslStart: 1, mechanism: 'PLAIN', diff --git a/src/cmap/auth/scram.ts b/src/cmap/auth/scram.ts index 3e382d73bb2..254e780a268 100644 --- a/src/cmap/auth/scram.ts +++ b/src/cmap/auth/scram.ts @@ -4,10 +4,12 @@ import * as crypto from 'crypto'; import { allocateBuffer, Binary, + ByteUtils, concatBuffers, type Document, - fromUTF8, - toBase64 + fromBase64, + fromNumberArray, + fromUTF8 } from '../../bson'; import { MongoInvalidArgumentError, @@ -72,21 +74,21 @@ function cleanUsername(username: string) { return username.replace('=', '=3D').replace(',', '=2C'); } -function clientFirstMessageBare(username: string, nonce: Buffer) { +function clientFirstMessageBare(username: string, nonce: Uint8Array) { // NOTE: This is done b/c Javascript uses UTF-16, but the server is hashing in UTF-8. // Since the username is not sasl-prep-d, we need to do this here. return concatBuffers([ fromUTF8('n='), fromUTF8(username), fromUTF8(',r='), - fromUTF8(toBase64(nonce)) + fromUTF8(ByteUtils.toBase64(nonce)) ]); } function makeFirstMessage( cryptoMethod: CryptoMethod, credentials: MongoCredentials, - nonce: Buffer + nonce: Uint8Array ) { const username = cleanUsername(credentials.username); const mechanism = @@ -97,9 +99,7 @@ function makeFirstMessage( return { saslStart: 1, mechanism, - payload: new Binary( - concatBuffers([Buffer.from('n,,', 'utf8'), clientFirstMessageBare(username, nonce)]) - ), + payload: new Binary(concatBuffers([fromUTF8('n,,'), clientFirstMessageBare(username, nonce)])), autoAuthorize: 1, options: { skipEmptyExchange: true } }; @@ -143,7 +143,7 @@ async function continueScramConversation( const processedPassword = cryptoMethod === 'sha256' ? saslprep(password) : passwordDigest(username, password); - const payload: Binary = Buffer.isBuffer(response.payload) + const payload: Binary = ByteUtils.isUint8Array(response.payload) ? new Binary(response.payload) : response.payload; @@ -164,12 +164,7 @@ async function continueScramConversation( // Set up start of proof const withoutProof = `c=biws,r=${rnonce}`; - const saltedPassword = HI( - processedPassword, - Buffer.from(salt, 'base64'), - iterations, - cryptoMethod - ); + const saltedPassword = HI(processedPassword, fromBase64(salt), iterations, cryptoMethod); const clientKey = HMAC(cryptoMethod, saltedPassword, 'Client Key'); const serverKey = HMAC(cryptoMethod, saltedPassword, 'Server Key'); @@ -188,13 +183,13 @@ async function continueScramConversation( const saslContinueCmd = { saslContinue: 1, conversationId: response.conversationId, - payload: new Binary(Buffer.from(clientFinal)) + payload: new Binary(fromUTF8(clientFinal)) }; const r = await connection.command(ns(`${db}.$cmd`), saslContinueCmd, undefined); const parsedResponse = parsePayload(r.payload); - if (!compareDigest(Buffer.from(parsedResponse.v, 'base64'), serverSignature)) { + if (!compareDigest(fromBase64(parsedResponse.v), serverSignature)) { throw new MongoRuntimeError('Server returned an invalid signature'); } @@ -252,15 +247,7 @@ function passwordDigest(username: string, password: string) { } // XOR two buffers -function xor(a: Buffer, b: Buffer) { - if (!Buffer.isBuffer(a)) { - a = Buffer.from(a); - } - - if (!Buffer.isBuffer(b)) { - b = Buffer.from(b); - } - +function xor(a: Uint8Array, b: Uint8Array) { const length = Math.max(a.length, b.length); const res = []; @@ -268,19 +255,19 @@ function xor(a: Buffer, b: Buffer) { res.push(a[i] ^ b[i]); } - return Buffer.from(res).toString('base64'); + return ByteUtils.toBase64(fromNumberArray(res)); } -function H(method: CryptoMethod, text: Buffer) { +function H(method: CryptoMethod, text: Uint8Array): Uint8Array { return crypto.createHash(method).update(text).digest(); } -function HMAC(method: CryptoMethod, key: Buffer, text: Buffer | string) { +function HMAC(method: CryptoMethod, key: Uint8Array, text: Uint8Array | string): Uint8Array { return crypto.createHmac(method, key).update(text).digest(); } interface HICache { - [key: string]: Buffer; + [key: string]: Uint8Array; } let _hiCache: HICache = {}; @@ -295,9 +282,9 @@ const hiLengthMap = { sha1: 20 }; -function HI(data: string, salt: Buffer, iterations: number, cryptoMethod: CryptoMethod) { +function HI(data: string, salt: Uint8Array, iterations: number, cryptoMethod: CryptoMethod) { // omit the work if already generated - const key = [data, salt.toString('base64'), iterations].join('_'); + const key = [data, ByteUtils.toBase64(salt), iterations].join('_'); if (_hiCache[key] != null) { return _hiCache[key]; } @@ -321,7 +308,7 @@ function HI(data: string, salt: Buffer, iterations: number, cryptoMethod: Crypto return saltedData; } -function compareDigest(lhs: Buffer, rhs: Uint8Array) { +function compareDigest(lhs: Uint8Array, rhs: Uint8Array) { if (lhs.length !== rhs.length) { return false; } diff --git a/src/cmap/commands.ts b/src/cmap/commands.ts index 5428e5c6a46..5dda9c872d2 100644 --- a/src/cmap/commands.ts +++ b/src/cmap/commands.ts @@ -3,11 +3,11 @@ import { allocateUnsafeBuffer, BSON, type BSONSerializeOptions, + ByteUtils, concatBuffers, type Document, type Long, readInt32LE, - utf8ByteLength, writeInt32LE } from '../bson'; import { MongoInvalidArgumentError, MongoRuntimeError } from '../error'; @@ -195,7 +195,7 @@ export class OpQueryRequest { const header = allocateBuffer( 4 * 4 + // Header 4 + // Flags - utf8ByteLength(this.ns) + + ByteUtils.utf8ByteLength(this.ns) + 1 + // namespace 4 + // numberToSkip 4 // numberToReturn @@ -300,8 +300,8 @@ export interface MessageHeader { /** @internal */ export class OpReply { parsed: boolean; - raw: Buffer; - data: Buffer; + raw: Uint8Array; + data: Uint8Array; opts: BSONSerializeOptions; length: number; requestId: number; @@ -328,9 +328,9 @@ export class OpReply { moreToCome = false; constructor( - message: Buffer, + message: Uint8Array, msgHeader: MessageHeader, - msgBody: Buffer, + msgBody: Uint8Array, opts?: BSONSerializeOptions ) { this.parsed = false; @@ -443,7 +443,7 @@ export class DocumentSequence { documents: Document[]; serializedDocumentsLength: number; private chunks: Uint8Array[]; - private header: Buffer; + private header: Uint8Array; /** * Create a new document sequence for the provided field. @@ -483,7 +483,9 @@ export class DocumentSequence { // Push the document raw bson. this.chunks.push(buffer); // Write the new length. - this.header?.writeInt32LE(4 + this.field.length + 1 + this.serializedDocumentsLength, 1); + if (this.header) { + writeInt32LE(this.header, 4 + this.field.length + 1 + this.serializedDocumentsLength, 1); + } return this.serializedDocumentsLength + this.header.length; } @@ -541,8 +543,8 @@ export class OpMsgRequest { typeof options.exhaustAllowed === 'boolean' ? options.exhaustAllowed : false; } - toBin(): Buffer[] { - const buffers: Buffer[] = []; + toBin(): Uint8Array[] { + const buffers: Uint8Array[] = []; let flags = 0; if (this.checksumPresent) { @@ -568,11 +570,11 @@ export class OpMsgRequest { const command = this.command; totalLength += this.makeSections(buffers, command); - writeInt32LE(header, 0, totalLength); // messageLength - writeInt32LE(header, 4, this.requestId); // requestID - writeInt32LE(header, 8, 0); // responseTo - writeInt32LE(header, 12, OP_MSG); // opCode - writeInt32LE(header, 16, flags); // flags + writeInt32LE(header, totalLength, 0); // messageLength + writeInt32LE(header, this.requestId, 4); // requestID + writeInt32LE(header, 0, 8); // responseTo + writeInt32LE(header, OP_MSG, 12); // opCode + writeInt32LE(header, flags, 16); // flags return buffers; } @@ -640,8 +642,8 @@ export class OpMsgRequest { /** @internal */ export class OpMsgResponse { parsed: boolean; - raw: Buffer; - data: Buffer; + raw: Uint8Array; + data: Uint8Array; opts: BSONSerializeOptions; length: number; requestId: number; @@ -662,9 +664,9 @@ export class OpMsgResponse { sections: Uint8Array[] = []; constructor( - message: Buffer, + message: Uint8Array, msgHeader: MessageHeader, - msgBody: Buffer, + msgBody: Uint8Array, opts?: BSONSerializeOptions ) { this.parsed = false; @@ -768,7 +770,7 @@ export class OpCompressedRequest { return !uncompressibleCommands.has(commandName); } - async toBin(): Promise { + async toBin(): Promise { const concatenatedOriginalCommandBuffer = concatBuffers(this.command.toBin()); // otherwise, compress the message const messageToBeCompressed = concatenatedOriginalCommandBuffer.slice(MESSAGE_HEADER_SIZE); @@ -782,18 +784,17 @@ export class OpCompressedRequest { const msgHeader = allocateBuffer(MESSAGE_HEADER_SIZE); writeInt32LE( msgHeader, - 0, - MESSAGE_HEADER_SIZE + COMPRESSION_DETAILS_SIZE + compressedMessage.length + MESSAGE_HEADER_SIZE + COMPRESSION_DETAILS_SIZE + compressedMessage.length, + 0 ); // messageLength - writeInt32LE(msgHeader, 4, this.command.requestId); // requestID - writeInt32LE(msgHeader, 8, 0); // responseTo (zero) - writeInt32LE(msgHeader, 12, OP_COMPRESSED); // opCode - + writeInt32LE(msgHeader, this.command.requestId, 4); // requestID + writeInt32LE(msgHeader, 0, 8); // responseTo (zero) + writeInt32LE(msgHeader, OP_COMPRESSED, 12); // opCode // Create the compression details of OP_COMPRESSED const compressionDetails = allocateBuffer(COMPRESSION_DETAILS_SIZE); - writeInt32LE(compressionDetails, 0, originalCommandOpCode); // originalOpcode - writeInt32LE(compressionDetails, 4, messageToBeCompressed.length); // Size of the uncompressed compressedMessage, excluding the MsgHeader - writeInt32LE(compressionDetails, 8, Compressor[this.options.agreedCompressor]); // compressorID + writeInt32LE(compressionDetails, originalCommandOpCode, 0); // originalOpcode + writeInt32LE(compressionDetails, messageToBeCompressed.length, 4); // Size of the uncompressed compressedMessage, excluding the MsgHeader + writeInt32LE(compressionDetails, Compressor[this.options.agreedCompressor], 8); // compressorID return [msgHeader, compressionDetails, compressedMessage]; } } diff --git a/src/cmap/connection.ts b/src/cmap/connection.ts index 51c0725657b..09f8498723f 100644 --- a/src/cmap/connection.ts +++ b/src/cmap/connection.ts @@ -206,7 +206,7 @@ export class Connection extends TypedEventEmitter { private lastUseTime: number; private clusterTime: Document | null = null; private error: Error | null = null; - private dataEvents: AsyncGenerator | null = null; + private dataEvents: AsyncGenerator | null = null; private readonly socketTimeoutMS: number; private readonly monitorCommands: boolean; @@ -796,7 +796,7 @@ export class SizedMessageTransform extends Transform { this.connection = connection; } - override _transform(chunk: Buffer, encoding: unknown, callback: TransformCallback): void { + override _transform(chunk: Uint8Array, encoding: unknown, callback: TransformCallback): void { if (this.connection.delayedTimeoutId != null) { clearTimeout(this.connection.delayedTimeoutId); this.connection.delayedTimeoutId = null; diff --git a/src/cmap/handshake/client_metadata.ts b/src/cmap/handshake/client_metadata.ts index 0640930db20..8bc6cf4ed82 100644 --- a/src/cmap/handshake/client_metadata.ts +++ b/src/cmap/handshake/client_metadata.ts @@ -1,7 +1,7 @@ import * as os from 'os'; import * as process from 'process'; -import { BSON, type Document, Int32 } from '../../bson'; +import { BSON, type Document, fromUTF8, Int32, toUTF8 } from '../../bson'; import { MongoInvalidArgumentError } from '../../error'; import type { DriverInfo, MongoOptions } from '../../mongo_client'; import { fileIsAccessible } from '../../utils'; @@ -114,9 +114,9 @@ export async function makeClientMetadata( // Add app name first, it must be sent if (appName.length > 0) { const name = - Buffer.byteLength(appName, 'utf8') <= 128 + BSON.onDemand.ByteUtils.utf8ByteLength(appName) <= 128 ? appName - : Buffer.from(appName, 'utf8').subarray(0, 128).toString('utf8'); + : toUTF8(fromUTF8(appName), 0, 128, false); metadataDocument.ifItFitsItSits('application', { name }); } diff --git a/src/cmap/wire_protocol/compression.ts b/src/cmap/wire_protocol/compression.ts index 81ecd8c3363..3ed45579380 100644 --- a/src/cmap/wire_protocol/compression.ts +++ b/src/cmap/wire_protocol/compression.ts @@ -44,7 +44,7 @@ export const uncompressibleCommands = new Set([ const ZSTD_COMPRESSION_LEVEL = 3; const zlibInflate = (buf: zlib.InputType) => { - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { zlib.inflate(buf, (error, result) => { if (error) return reject(error); resolve(result); @@ -53,7 +53,7 @@ const zlibInflate = (buf: zlib.InputType) => { }; const zlibDeflate = (buf: zlib.InputType, options: zlib.ZlibOptions) => { - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => { zlib.deflate(buf, options, (error, result) => { if (error) return reject(error); resolve(result); @@ -77,8 +77,8 @@ function loadSnappy() { // Facilitate compressing a message using an agreed compressor export async function compress( options: OpCompressesRequestOptions, - dataToBeCompressed: Buffer -): Promise { + dataToBeCompressed: Uint8Array +): Promise { const zlibOptions = {} as zlib.ZlibOptions; switch (options.agreedCompressor) { case 'snappy': { @@ -107,7 +107,10 @@ export async function compress( } // Decompress a message using the given compressor -export async function decompress(compressorID: number, compressedData: Buffer): Promise { +export async function decompress( + compressorID: number, + compressedData: Uint8Array +): Promise { if ( compressorID !== Compressor.snappy && compressorID !== Compressor.zstd && @@ -160,7 +163,7 @@ const MESSAGE_HEADER_SIZE = 16; export async function compressCommand( command: WriteProtocolMessageType, description: { agreedCompressor?: CompressorName; zlibCompressionLevel?: number } -): Promise { +): Promise { const finalCommand = description.agreedCompressor === 'none' || !OpCompressedRequest.canCompress(command) ? command @@ -179,7 +182,7 @@ export async function compressCommand( * * This method does not parse the response's BSON. */ -export async function decompressResponse(message: Buffer): Promise { +export async function decompressResponse(message: Uint8Array): Promise { const messageHeader: MessageHeader = { length: readInt32LE(message, 0), requestId: readInt32LE(message, 4), diff --git a/src/cmap/wire_protocol/on_data.ts b/src/cmap/wire_protocol/on_data.ts index 408c4a51492..cf42f7b0234 100644 --- a/src/cmap/wire_protocol/on_data.ts +++ b/src/cmap/wire_protocol/on_data.ts @@ -9,7 +9,7 @@ import { addAbortListener, kDispose, List, promiseWithResolvers } from '../../ut * An object holding references to a promise's resolve and reject functions. */ type PendingPromises = Omit< - ReturnType>>, + ReturnType>>, 'promise' >; @@ -32,7 +32,7 @@ export function onData( * value from the event in this list. Next time they call .next() * we pull the first value out of this list and resolve a promise with it. */ - const unconsumedEvents = new List(); + const unconsumedEvents = new List(); /** * When there has not yet been an event, a new promise will be created * and implicitly stored in this list. When an event occurs we take the first @@ -49,7 +49,7 @@ export function onData( /** Set to true only after event listeners have been removed. */ let finished = false; - const iterator: AsyncGenerator & AsyncDisposable = { + const iterator: AsyncGenerator & AsyncDisposable = { next() { // First, we consume all unread events const value = unconsumedEvents.shift(); @@ -71,7 +71,7 @@ export function onData( if (finished) return closeHandler(); // Wait until an event happens - const { promise, resolve, reject } = promiseWithResolvers>(); + const { promise, resolve, reject } = promiseWithResolvers>(); unconsumedPromises.push({ resolve, reject }); return promise; }, @@ -107,7 +107,7 @@ export function onData( return iterator; - function eventHandler(value: Buffer) { + function eventHandler(value: Uint8Array) { const promise = unconsumedPromises.shift(); if (promise != null) promise.resolve({ value, done: false }); else unconsumedEvents.push(value); diff --git a/src/deps.ts b/src/deps.ts index f4c0b0f9cad..fc92beda8bd 100644 --- a/src/deps.ts +++ b/src/deps.ts @@ -50,12 +50,12 @@ type ZStandardLib = { * Compress using zstd. * @param buf - Buffer to be compressed. */ - compress(buf: Buffer, level?: number): Promise; + compress(buf: Uint8Array, level?: number): Promise; /** * Decompress using zstd. */ - decompress(buf: Buffer): Promise; + decompress(buf: Uint8Array): Promise; }; export type ZStandard = ZStandardLib | { kModuleError: MongoMissingDependencyError }; @@ -144,13 +144,13 @@ export type SnappyLib = { * In order to support both we must check the return value of the function * @param buf - Buffer to be compressed */ - compress(buf: Buffer): Promise; + compress(buf: Uint8Array): Promise; /** * In order to support both we must check the return value of the function * @param buf - Buffer to be compressed */ - uncompress(buf: Buffer, opt: { asBuffer: true }): Promise; + uncompress(buf: Uint8Array, opt: { asBuffer: true }): Promise; }; export function getSnappy(): SnappyLib | { kModuleError: MongoMissingDependencyError } { diff --git a/src/gridfs/download.ts b/src/gridfs/download.ts index 563678c3175..fb6691cb8cd 100644 --- a/src/gridfs/download.ts +++ b/src/gridfs/download.ts @@ -1,6 +1,6 @@ import { Readable } from 'stream'; -import type { Document, ObjectId } from '../bson'; +import { ByteUtils, type Document, type ObjectId } from '../bson'; import type { Collection } from '../collection'; import { CursorTimeoutMode } from '../cursor/abstract_cursor'; import type { FindCursor } from '../cursor/find_cursor'; @@ -248,7 +248,7 @@ function doRead(stream: GridFSBucketReadStream): void { ); } - let buf = Buffer.isBuffer(doc.data) ? doc.data : doc.data.buffer; + let buf = ByteUtils.isUint8Array(doc.data) ? doc.data : doc.data.buffer; if (buf.byteLength !== expectedLength) { if (bytesRemaining <= 0) { diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index 96ecef1c730..0585467b427 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -1,6 +1,6 @@ import { Writable } from 'stream'; -import { allocateBuffer, type Document, ObjectId } from '../bson'; +import { allocateBuffer, ByteUtils, type Document, fromUTF8, ObjectId } from '../bson'; import type { Collection } from '../collection'; import { CursorTimeoutMode } from '../cursor/abstract_cursor'; import { @@ -62,7 +62,7 @@ export class GridFSBucketWriteStream extends Writable { /** The number of bytes that each chunk will be limited to */ chunkSizeBytes: number; /** Space used to store a chunk currently being inserted */ - bufToStore: Buffer; + bufToStore: Uint8Array; /** Accumulates the number of bytes inserted as the stream uploads chunks */ length: number; /** Accumulates the number of chunks inserted as the stream uploads file contents */ @@ -178,7 +178,7 @@ export class GridFSBucketWriteStream extends Writable { * @param callback - Function to call when the chunk was added to the buffer, or if the entire chunk was persisted to MongoDB if this chunk caused a flush. */ override _write( - chunk: Buffer | string, + chunk: Uint8Array | string, encoding: BufferEncoding, callback: Callback ): void { @@ -227,7 +227,7 @@ function handleError(stream: GridFSBucketWriteStream, error: Error, callback: Ca queueMicrotask(() => callback(error)); } -function createChunkDoc(filesId: ObjectId, n: number, data: Buffer): GridFSChunk { +function createChunkDoc(filesId: ObjectId, n: number, data: Uint8Array): GridFSChunk { return { _id: new ObjectId(), files_id: filesId, @@ -409,7 +409,7 @@ function createFilesDoc( function doWrite( stream: GridFSBucketWriteStream, - chunk: Buffer | string, + chunk: Uint8Array | string, encoding: BufferEncoding, callback: Callback ): void { @@ -417,13 +417,16 @@ function doWrite( return; } - const inputBuf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding); + const chunkString = chunk as string; + const inputBuf = chunkString + ? fromUTF8(chunkString) + : ByteUtils.toLocalBufferType(chunk as Uint8Array); stream.length += inputBuf.length; // Input is small enough to fit in our buffer if (stream.pos + inputBuf.length < stream.chunkSizeBytes) { - inputBuf.copy(stream.bufToStore, stream.pos); + inputBuf.set(stream.bufToStore, stream.pos); stream.pos += inputBuf.length; queueMicrotask(callback); return; @@ -437,12 +440,12 @@ function doWrite( let outstandingRequests = 0; while (inputBufRemaining > 0) { const inputBufPos = inputBuf.length - inputBufRemaining; - inputBuf.copy(stream.bufToStore, stream.pos, inputBufPos, inputBufPos + numToCopy); + inputBuf.set(stream.bufToStore.subarray(inputBufPos, inputBufPos + numToCopy), stream.pos); stream.pos += numToCopy; spaceRemaining -= numToCopy; let doc: GridFSChunk; if (spaceRemaining === 0) { - doc = createChunkDoc(stream.id, stream.n, Buffer.from(stream.bufToStore)); + doc = createChunkDoc(stream.id, stream.n, stream.bufToStore); const remainingTimeMS = stream.timeoutContext?.remainingTimeMS; if (remainingTimeMS != null && remainingTimeMS <= 0) { @@ -496,7 +499,7 @@ function writeRemnant(stream: GridFSBucketWriteStream, callback: Callback): void // Create a new buffer to make sure the buffer isn't bigger than it needs // to be. const remnant = allocateBuffer(stream.pos); - stream.bufToStore.copy(remnant, 0, 0, stream.pos); + remnant.set(stream.bufToStore.subarray(0, stream.pos), 0); const doc = createChunkDoc(stream.id, stream.n, remnant); // If the stream was aborted, do not write remnant diff --git a/src/sessions.ts b/src/sessions.ts index 2135c5dceda..775e1d6a61b 100644 --- a/src/sessions.ts +++ b/src/sessions.ts @@ -1,6 +1,13 @@ import { setTimeout } from 'timers/promises'; -import { allocateUnsafeBuffer, Binary, type Document, Long, type Timestamp } from './bson'; +import { + allocateUnsafeBuffer, + Binary, + ByteUtils, + type Document, + Long, + type Timestamp +} from './bson'; import type { CommandOptions, Connection } from './cmap/connection'; import { ConnectionPoolMetrics } from './cmap/metrics'; import { type MongoDBResponse } from './cmap/wire_protocol/responses'; @@ -37,7 +44,6 @@ import { TxnState } from './transactions'; import { - ByteUtils, calculateDurationInMs, commandSupportsReadConcern, isPromiseLike, diff --git a/src/utils.ts b/src/utils.ts index ec0e9a5d6d3..7da12750c63 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -9,12 +9,12 @@ import { clearTimeout, setTimeout } from 'timers'; import { allocateBuffer, allocateUnsafeBuffer, + ByteUtils as BSONByteUtils, deserialize, type Document, getInt32LE, ObjectId, - resolveBSONOptions, - toLocalBufferType + resolveBSONOptions } from './bson'; import type { Connection } from './cmap/connection'; import { MAX_SUPPORTED_WIRE_VERSION } from './cmap/wire_protocol/constants'; @@ -54,7 +54,14 @@ export type Callback = (error?: AnyError, result?: T) => void; export type AnyOptions = Document; export const ByteUtils = { - toLocalBufferType: toLocalBufferType, + toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { + if (BSONByteUtils.isUint8Array(buffer)) { + // eslint-disable-next-line no-restricted-globals + return Buffer.from(buffer as Uint8Array); + } else { + return buffer as Buffer; + } + }, equals(this: void, seqA: Uint8Array, seqB: Uint8Array) { return ByteUtils.toLocalBufferType(seqA).equals(seqB); @@ -323,7 +330,7 @@ export function* makeCounter(seed = 0): Generator { * Synchronously Generate a UUIDv4 * @internal */ -export function uuidV4(): Buffer { +export function uuidV4(): Uint8Array { const result = crypto.randomBytes(16); result[6] = (result[6] & 0x0f) | 0x40; result[8] = (result[8] & 0x3f) | 0x80; @@ -798,7 +805,7 @@ export class List { * @internal */ export class BufferPool { - private buffers: List; + private buffers: List; private totalByteLength: number; constructor() { @@ -811,7 +818,7 @@ export class BufferPool { } /** Adds a buffer to the internal buffer pool list */ - append(buffer: Buffer): void { + append(buffer: Uint8Array): void { this.buffers.push(buffer); this.totalByteLength += buffer.length; } @@ -842,7 +849,7 @@ export class BufferPool { } /** Reads the requested number of bytes, optionally consuming them */ - read(size: number): Buffer { + read(size: number): Uint8Array { if (typeof size !== 'number' || size < 0) { throw new MongoInvalidArgumentError('Argument "size" must be a non-negative number'); } @@ -1245,8 +1252,8 @@ export function squashError(_error: unknown) { } export const randomBytes = (size: number) => { - return new Promise((resolve, reject) => { - crypto.randomBytes(size, (error: Error | null, buf: Buffer) => { + return new Promise((resolve, reject) => { + crypto.randomBytes(size, (error: Error | null, buf: Uint8Array) => { if (error) return reject(error); resolve(buf); }); @@ -1336,10 +1343,10 @@ export function decorateDecryptionResult( ): void { if (isTopLevelDecorateCall) { // The original value could have been either a JS object or a BSON buffer - if (Buffer.isBuffer(original)) { - original = deserialize(original); + if (BSONByteUtils.isUint8Array(original)) { + original = deserialize(ByteUtils.toLocalBufferType(original as Uint8Array)); } - if (Buffer.isBuffer(decrypted)) { + if (BSONByteUtils.isUint8Array(decrypted)) { throw new MongoRuntimeError('Expected result of decryption to be deserialized BSON object'); } } diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index f7f087b2acf..c0504323f95 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -19,6 +19,7 @@ import { TopologyType, type WriteConcernSettings } from '../../../src'; +import { ByteUtils } from '../../../src/bson'; import { type CompressorName } from '../../../src/cmap/wire_protocol/compression'; import { HostAddress } from '../../../src/utils'; import { getEnvironmentalOptions } from '../utils'; @@ -493,7 +494,7 @@ export class TestConfiguration { // @ts-expect-error: toExtendedJSON internal on double but not on long return { number: new Double(value).toExtendedJSON() }; } - if (Buffer.isBuffer(value)) + if (ByteUtils.isUint8Array(value)) return { [value.constructor.name]: Buffer.prototype.base64Slice.call(value) }; if (value === undefined) return { undefined: 'key was set but equal to undefined' }; return value; diff --git a/test/unit/cmap/commands.test.ts b/test/unit/cmap/commands.test.ts index 7666a4a6552..5762411fe7b 100644 --- a/test/unit/cmap/commands.test.ts +++ b/test/unit/cmap/commands.test.ts @@ -1,6 +1,7 @@ import * as BSON from 'bson'; import { expect } from 'chai'; +import { ByteUtils, readInt32LE } from '../../../src/bson'; import { DocumentSequence, OpMsgRequest, OpReply } from '../../../src/cmap/commands'; describe('commands', function () { @@ -41,12 +42,12 @@ describe('commands', function () { it('sets the length of the document sequence', function () { // Bytes starting at index 1 is a 4 byte length. - expect(buffers[3].readInt32LE(1)).to.equal(25); + expect(readInt32LE(buffers[3], 1)).to.equal(25); }); it('sets the name of the first field to be replaced', function () { // Bytes starting at index 5 is the field name. - expect(buffers[3].toString('utf8', 5, 10)).to.equal('field'); + expect(ByteUtils.toUTF8(buffers[3], 5, 10, true)).to.equal('field'); }); }); @@ -81,12 +82,12 @@ describe('commands', function () { it('sets the length of the first document sequence', function () { // Bytes starting at index 1 is a 4 byte length. - expect(buffers[3].readInt32LE(1)).to.equal(28); + expect(readInt32LE(buffers[3], 1)).to.equal(28); }); it('sets the name of the first field to be replaced', function () { // Bytes starting at index 5 is the field name. - expect(buffers[3].toString('utf8', 5, 13)).to.equal('fieldOne'); + expect(ByteUtils.toUTF8(buffers[3], 5, 13, true)).to.equal('fieldOne'); }); it('sets the document sequence sections second type to 1', function () { @@ -96,12 +97,12 @@ describe('commands', function () { it('sets the length of the second document sequence', function () { // Bytes starting at index 1 is a 4 byte length. - expect(buffers[3].readInt32LE(30)).to.equal(28); + expect(readInt32LE(buffers[3], 30)).to.equal(28); }); it('sets the name of the second field to be replaced', function () { // Bytes starting at index 33 is the field name. - expect(buffers[3].toString('utf8', 34, 42)).to.equal('fieldTwo'); + expect(ByteUtils.toUTF8(buffers[3], 34, 42, true)).to.equal('fieldTwo'); }); }); }); From 74fc9e3def53d8e916311fec1e9c5d6a0df2def6 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Wed, 14 Jan 2026 12:07:25 -0800 Subject: [PATCH 04/18] test fixes --- src/utils.ts | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/utils.ts b/src/utils.ts index 7da12750c63..c48c789d779 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -55,12 +55,7 @@ export type AnyOptions = Document; export const ByteUtils = { toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { - if (BSONByteUtils.isUint8Array(buffer)) { - // eslint-disable-next-line no-restricted-globals - return Buffer.from(buffer as Uint8Array); - } else { - return buffer as Buffer; - } + return BSONByteUtils.toLocalBufferType(buffer) as Buffer; }, equals(this: void, seqA: Uint8Array, seqB: Uint8Array) { From 732b12eb0ecc20afdfb7e4e4f911b2310ba93042 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Wed, 14 Jan 2026 12:17:12 -0800 Subject: [PATCH 05/18] remove onDemand accessors --- src/bson.ts | 22 +++++++++++----------- src/cmap/auth/aws4.ts | 8 ++++---- src/cmap/commands.ts | 2 +- src/cmap/connection.ts | 2 +- src/cmap/handshake/client_metadata.ts | 2 +- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/bson.ts b/src/bson.ts index eda6746bf8f..bb8d96a4398 100644 --- a/src/bson.ts +++ b/src/bson.ts @@ -38,32 +38,32 @@ export function parseToElementsToArray(bytes: Uint8Array, offset?: number): BSON return Array.isArray(res) ? res : [...res]; } -export const getInt32LE = BSON.onDemand.NumberUtils.getInt32LE; -export const getFloat64LE = BSON.onDemand.NumberUtils.getFloat64LE; -export const getBigInt64LE = BSON.onDemand.NumberUtils.getBigInt64LE; -export const toUTF8 = BSON.onDemand.ByteUtils.toUTF8; +export const getInt32LE = BSON.NumberUtils.getInt32LE; +export const getFloat64LE = BSON.NumberUtils.getFloat64LE; +export const getBigInt64LE = BSON.NumberUtils.getBigInt64LE; +export const toUTF8 = BSON.ByteUtils.toUTF8; // BSON wrappers // writeInt32LE, same order of arguments as Buffer.writeInt32LE export const writeInt32LE = (destination: Uint8Array, value: number, offset: number) => - BSON.onDemand.NumberUtils.setInt32LE(destination, offset, value); + BSON.NumberUtils.setInt32LE(destination, offset, value); // various wrappers that consume and return local buffer types export const fromUTF8 = (text: string) => - ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.fromUTF8(text)); + ByteUtils.toLocalBufferType(BSON.ByteUtils.fromUTF8(text)); export const fromBase64 = (b64: string) => - ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.fromBase64(b64)); + ByteUtils.toLocalBufferType(BSON.ByteUtils.fromBase64(b64)); export const fromNumberArray = (array: number[]) => - ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.fromNumberArray(array)); + ByteUtils.toLocalBufferType(BSON.ByteUtils.fromNumberArray(array)); export const concatBuffers = (list: Uint8Array[]) => { - return ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.concat(list)); + return ByteUtils.toLocalBufferType(BSON.ByteUtils.concat(list)); }; export const allocateBuffer = (size: number) => - ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.allocate(size)); + ByteUtils.toLocalBufferType(BSON.ByteUtils.allocate(size)); export const allocateUnsafeBuffer = (size: number) => - ByteUtils.toLocalBufferType(BSON.onDemand.ByteUtils.allocateUnsafe(size)); + ByteUtils.toLocalBufferType(BSON.ByteUtils.allocateUnsafe(size)); // validates buffer inputs, used for read operations const validateBufferInputs = (buffer: Uint8Array, offset: number, length: number) => { diff --git a/src/cmap/auth/aws4.ts b/src/cmap/auth/aws4.ts index 912cdbdcaa5..53bb0d356fe 100644 --- a/src/cmap/auth/aws4.ts +++ b/src/cmap/auth/aws4.ts @@ -31,7 +31,7 @@ export type SignedHeaders = { const getHexSha256 = async (str: string): Promise => { const data = stringToBuffer(str); const hashBuffer = await crypto.subtle.digest('SHA-256', data); - const hashHex = BSON.onDemand.ByteUtils.toHex(new Uint8Array(hashBuffer)); + const hashHex = BSON.ByteUtils.toHex(new Uint8Array(hashBuffer)); return hashHex; }; @@ -81,8 +81,8 @@ const convertHeaderValue = (value: string | number) => { * @returns Uint8Array containing the UTF-8 encoded string. */ function stringToBuffer(str: string): Uint8Array { - const data = new Uint8Array(BSON.onDemand.ByteUtils.utf8ByteLength(str)); - BSON.onDemand.ByteUtils.encodeUTF8Into(data, str, 0); + const data = new Uint8Array(BSON.ByteUtils.utf8ByteLength(str)); + BSON.ByteUtils.encodeUTF8Into(data, str, 0); return data; } @@ -189,7 +189,7 @@ export async function aws4Sign( // 5. Calculate the signature const signatureBuffer = await getHmacSha256(signingKey, stringToSign); - const signature = BSON.onDemand.ByteUtils.toHex(signatureBuffer); + const signature = BSON.ByteUtils.toHex(signatureBuffer); // 6. Add the signature to the request // Calculate the Authorization header diff --git a/src/cmap/commands.ts b/src/cmap/commands.ts index 5dda9c872d2..fe80a2a0625 100644 --- a/src/cmap/commands.ts +++ b/src/cmap/commands.ts @@ -40,7 +40,7 @@ const QUERY_FAILURE = 2; const SHARD_CONFIG_STALE = 4; const AWAIT_CAPABLE = 8; -const encodeUTF8Into = BSON.onDemand.ByteUtils.encodeUTF8Into; +const encodeUTF8Into = BSON.ByteUtils.encodeUTF8Into; /** @internal */ export type WriteProtocolMessageType = OpQueryRequest | OpMsgRequest; diff --git a/src/cmap/connection.ts b/src/cmap/connection.ts index 09f8498723f..d812384847e 100644 --- a/src/cmap/connection.ts +++ b/src/cmap/connection.ts @@ -176,7 +176,7 @@ function streamIdentifier(stream: Stream, options: ConnectionOptions): string { return HostAddress.fromHostPort(remoteAddress, remotePort).toString(); } - return BSON.onDemand.ByteUtils.toHex(uuidV4()); + return BSON.ByteUtils.toHex(uuidV4()); } /** @internal */ diff --git a/src/cmap/handshake/client_metadata.ts b/src/cmap/handshake/client_metadata.ts index 8bc6cf4ed82..deb2c749cd3 100644 --- a/src/cmap/handshake/client_metadata.ts +++ b/src/cmap/handshake/client_metadata.ts @@ -114,7 +114,7 @@ export async function makeClientMetadata( // Add app name first, it must be sent if (appName.length > 0) { const name = - BSON.onDemand.ByteUtils.utf8ByteLength(appName) <= 128 + BSON.ByteUtils.utf8ByteLength(appName) <= 128 ? appName : toUTF8(fromUTF8(appName), 0, 128, false); metadataDocument.ifItFitsItSits('application', { name }); From 0ee1a170f4eedc1f96d712196b502ee467719db3 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Wed, 14 Jan 2026 13:30:53 -0800 Subject: [PATCH 06/18] update based on isUint8Array signature change --- src/client-side-encryption/auto_encrypter.ts | 10 ++++------ src/client-side-encryption/client_encryption.ts | 10 +++------- src/utils.ts | 2 +- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/src/client-side-encryption/auto_encrypter.ts b/src/client-side-encryption/auto_encrypter.ts index 806c5f758a1..f3fe1ffb283 100644 --- a/src/client-side-encryption/auto_encrypter.ts +++ b/src/client-side-encryption/auto_encrypter.ts @@ -257,7 +257,7 @@ export class AutoEncrypter { }; if (options.schemaMap) { if (ByteUtils.isUint8Array(options.schemaMap)) { - mongoCryptOptions.schemaMap = options.schemaMap as Uint8Array; + mongoCryptOptions.schemaMap = options.schemaMap; } else { mongoCryptOptions.schemaMap = serialize(options.schemaMap); } @@ -265,14 +265,14 @@ export class AutoEncrypter { if (options.encryptedFieldsMap) { if (ByteUtils.isUint8Array(options.encryptedFieldsMap)) { - mongoCryptOptions.encryptedFieldsMap = options.encryptedFieldsMap as Uint8Array; + mongoCryptOptions.encryptedFieldsMap = options.encryptedFieldsMap; } else { mongoCryptOptions.encryptedFieldsMap = serialize(options.encryptedFieldsMap); } } if (ByteUtils.isUint8Array(this._kmsProviders)) { - mongoCryptOptions.kmsProviders = this._kmsProviders as any as Uint8Array; + mongoCryptOptions.kmsProviders = this._kmsProviders; } else { mongoCryptOptions.kmsProviders = serialize(this._kmsProviders); } @@ -402,9 +402,7 @@ export class AutoEncrypter { return cmd; } - const commandBuffer: Uint8Array = ByteUtils.isUint8Array(cmd) - ? (cmd as Uint8Array) - : serialize(cmd, options); + const commandBuffer: Uint8Array = ByteUtils.isUint8Array(cmd) ? cmd : serialize(cmd, options); const context = this._mongocrypt.makeEncryptionContext( MongoDBCollectionNamespace.fromString(ns).db, commandBuffer diff --git a/src/client-side-encryption/client_encryption.ts b/src/client-side-encryption/client_encryption.ts index 851c4200b22..13e0914a443 100644 --- a/src/client-side-encryption/client_encryption.ts +++ b/src/client-side-encryption/client_encryption.ts @@ -142,15 +142,11 @@ export class ClientEncryption { throw new MongoCryptInvalidArgumentError('Missing required option `keyVaultNamespace`'); } - let kmsProviders; - if (!ByteUtils.isUint8Array(this._kmsProviders)) { - kmsProviders = serialize(this._kmsProviders); - } else { - kmsProviders = this._kmsProviders as any as Uint8Array; - } const mongoCryptOptions: MongoCryptOptions = { ...options, - kmsProviders, + kmsProviders: !ByteUtils.isUint8Array(this._kmsProviders) + ? (serialize(this._kmsProviders) as Buffer) + : this._kmsProviders, errorWrapper: defaultErrorWrapper }; diff --git a/src/utils.ts b/src/utils.ts index c48c789d779..55bcd1cf34b 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1339,7 +1339,7 @@ export function decorateDecryptionResult( if (isTopLevelDecorateCall) { // The original value could have been either a JS object or a BSON buffer if (BSONByteUtils.isUint8Array(original)) { - original = deserialize(ByteUtils.toLocalBufferType(original as Uint8Array)); + original = deserialize(ByteUtils.toLocalBufferType(original)); } if (BSONByteUtils.isUint8Array(decrypted)) { throw new MongoRuntimeError('Expected result of decryption to be deserialized BSON object'); From 5ddfae46cbc84ccf0e66694c9cbf1f97fa824c90 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Wed, 14 Jan 2026 14:29:51 -0800 Subject: [PATCH 07/18] remove unnecessary toLocalBufferType calls --- src/utils.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/utils.ts b/src/utils.ts index 55bcd1cf34b..c8efa485d79 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -876,7 +876,7 @@ export class BufferPool { } } - return ByteUtils.toLocalBufferType(result); + return result; } } @@ -1339,7 +1339,7 @@ export function decorateDecryptionResult( if (isTopLevelDecorateCall) { // The original value could have been either a JS object or a BSON buffer if (BSONByteUtils.isUint8Array(original)) { - original = deserialize(ByteUtils.toLocalBufferType(original)); + original = deserialize(original); } if (BSONByteUtils.isUint8Array(decrypted)) { throw new MongoRuntimeError('Expected result of decryption to be deserialized BSON object'); From 45ebf1e9895717fa90eb4b094f8caf0688695b01 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Thu, 15 Jan 2026 13:05:28 -0800 Subject: [PATCH 08/18] point to bson PR https://github.com/mongodb/js-bson/pull/860/ --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 887e4c95232..6d9c032a014 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,7 @@ "license": "Apache-2.0", "dependencies": { "@mongodb-js/saslprep": "^1.3.0", - "bson": "^7.0.0", + "bson": "github:mongodb/js-bson#e6b99b374732afe33e4682e0180cc23bd7e1573e", "mongodb-connection-string-url": "^7.0.0" }, "devDependencies": { @@ -3924,8 +3924,8 @@ }, "node_modules/bson": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/bson/-/bson-7.0.0.tgz", - "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==", + "resolved": "git+ssh://git@github.com/mongodb/js-bson.git#e6b99b374732afe33e4682e0180cc23bd7e1573e", + "integrity": "sha512-qScwH9QFFA4DpSW+WGQDz6uooAJM3b5YthYP8HNDqVnyywhJWZEv5mWyHS2dthOsQk3TXaGVcgAp63/0kKmV2g==", "license": "Apache-2.0", "engines": { "node": ">=20.19.0" diff --git a/package.json b/package.json index 3f7c7f83666..0ebd411ef5d 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ }, "dependencies": { "@mongodb-js/saslprep": "^1.3.0", - "bson": "^7.0.0", + "bson": "github:mongodb/js-bson#e6b99b374732afe33e4682e0180cc23bd7e1573e", "mongodb-connection-string-url": "^7.0.0" }, "peerDependencies": { From 79d0d19d3de4cf14ed00e352ee5e33edc2d21b20 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Fri, 16 Jan 2026 13:19:56 -0800 Subject: [PATCH 09/18] use bson 7.1.0 --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6d9c032a014..dfbcde23f26 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,7 @@ "license": "Apache-2.0", "dependencies": { "@mongodb-js/saslprep": "^1.3.0", - "bson": "github:mongodb/js-bson#e6b99b374732afe33e4682e0180cc23bd7e1573e", + "bson": "^7.1.0", "mongodb-connection-string-url": "^7.0.0" }, "devDependencies": { @@ -3923,9 +3923,9 @@ } }, "node_modules/bson": { - "version": "7.0.0", - "resolved": "git+ssh://git@github.com/mongodb/js-bson.git#e6b99b374732afe33e4682e0180cc23bd7e1573e", - "integrity": "sha512-qScwH9QFFA4DpSW+WGQDz6uooAJM3b5YthYP8HNDqVnyywhJWZEv5mWyHS2dthOsQk3TXaGVcgAp63/0kKmV2g==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/bson/-/bson-7.1.0.tgz", + "integrity": "sha512-ffDaLH5Qw04SEv2nEtki5XwKeXVpljZMDDUYRxYj68vJhLWTYPOROg/Y+YMuAvRSrCAj1qzfmGDKsIT2QSkGYA==", "license": "Apache-2.0", "engines": { "node": ">=20.19.0" diff --git a/package.json b/package.json index 0ebd411ef5d..5fdedd4117a 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ }, "dependencies": { "@mongodb-js/saslprep": "^1.3.0", - "bson": "github:mongodb/js-bson#e6b99b374732afe33e4682e0180cc23bd7e1573e", + "bson": "^7.1.0", "mongodb-connection-string-url": "^7.0.0" }, "peerDependencies": { From 82a34bb4607671f939d2e507e9d17776fe854220 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Tue, 20 Jan 2026 09:59:43 -0800 Subject: [PATCH 10/18] pick up BSON 7.1.1 --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index dfbcde23f26..cbf0eb13e7d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,7 @@ "license": "Apache-2.0", "dependencies": { "@mongodb-js/saslprep": "^1.3.0", - "bson": "^7.1.0", + "bson": "^7.1.1", "mongodb-connection-string-url": "^7.0.0" }, "devDependencies": { @@ -3923,9 +3923,9 @@ } }, "node_modules/bson": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/bson/-/bson-7.1.0.tgz", - "integrity": "sha512-ffDaLH5Qw04SEv2nEtki5XwKeXVpljZMDDUYRxYj68vJhLWTYPOROg/Y+YMuAvRSrCAj1qzfmGDKsIT2QSkGYA==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/bson/-/bson-7.1.1.tgz", + "integrity": "sha512-TtJgBB+QyOlWjrbM+8bRgH84VM/xrDjyBFgSgGrfZF4xvt6gbEDtcswm27Tn9F9TWsjQybxT8b8VpCP/oJK4Dw==", "license": "Apache-2.0", "engines": { "node": ">=20.19.0" diff --git a/package.json b/package.json index 5fdedd4117a..0315d3fd215 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ }, "dependencies": { "@mongodb-js/saslprep": "^1.3.0", - "bson": "^7.1.0", + "bson": "^7.1.1", "mongodb-connection-string-url": "^7.0.0" }, "peerDependencies": { From cbc488c4689d9aa574ce5e4a2c504546fd45857f Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Tue, 20 Jan 2026 13:13:45 -0800 Subject: [PATCH 11/18] resolve conflicts --- src/cmap/handshake/client_metadata.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/cmap/handshake/client_metadata.ts b/src/cmap/handshake/client_metadata.ts index deb2c749cd3..7e26f7b6f3b 100644 --- a/src/cmap/handshake/client_metadata.ts +++ b/src/cmap/handshake/client_metadata.ts @@ -336,17 +336,18 @@ declare const Bun: { (): void; version?: string } | undefined; * with a future change to these global objects. */ function getRuntimeInfo(): string { + const endianness = BSON.NumberUtils.isBigEndian ? 'BE' : 'LE'; if ('Deno' in globalThis) { const version = typeof Deno?.version?.deno === 'string' ? Deno?.version?.deno : '0.0.0-unknown'; - return `Deno v${version}, ${os.endianness()}`; + return `Deno v${version}, ${endianness}`; } if ('Bun' in globalThis) { const version = typeof Bun?.version === 'string' ? Bun?.version : '0.0.0-unknown'; - return `Bun v${version}, ${os.endianness()}`; + return `Bun v${version}, ${endianness}`; } - return `Node.js ${process.version}, ${os.endianness()}`; + return `Node.js ${process.version}, ${endianness}`; } From 12ff51fee967058716597bb35ced88f9ee6c03db Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Tue, 20 Jan 2026 17:33:35 -0800 Subject: [PATCH 12/18] add an easy-to-use copyBuffer method --- src/bson.ts | 17 +++++++++++++++++ src/gridfs/upload.ts | 24 ++++++++++++++++++++---- 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/src/bson.ts b/src/bson.ts index f2edc654550..2072efdc30f 100644 --- a/src/bson.ts +++ b/src/bson.ts @@ -65,6 +65,23 @@ export const allocateBuffer = (size: number) => ByteUtils.toLocalBufferType(BSON.ByteUtils.allocate(size)); export const allocateUnsafeBuffer = (size: number) => ByteUtils.toLocalBufferType(BSON.ByteUtils.allocateUnsafe(size)); +export const copyBuffer = (input: { + source: Uint8Array; + target: Uint8Array; + targetStart?: number; + sourceStart?: number; + sourceEnd?: number; +}): number => { + const { source, target, targetStart = 0, sourceStart = 0, sourceEnd } = input; + const sourceEndActual = sourceEnd ?? source.length; + const srcSlice = source.subarray(sourceStart, sourceEndActual); + const maxLen = Math.min(srcSlice.length, target.length - targetStart); + if (maxLen <= 0) { + return 0; + } + target.set(srcSlice.subarray(0, maxLen), targetStart); + return maxLen; +}; // validates buffer inputs, used for read operations const validateBufferInputs = (buffer: Uint8Array, offset: number, length: number) => { diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index 0585467b427..a27960455a5 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -1,6 +1,6 @@ import { Writable } from 'stream'; -import { allocateBuffer, ByteUtils, type Document, fromUTF8, ObjectId } from '../bson'; +import { allocateBuffer, ByteUtils, copyBuffer, type Document, fromUTF8, ObjectId } from '../bson'; import type { Collection } from '../collection'; import { CursorTimeoutMode } from '../cursor/abstract_cursor'; import { @@ -426,7 +426,11 @@ function doWrite( // Input is small enough to fit in our buffer if (stream.pos + inputBuf.length < stream.chunkSizeBytes) { - inputBuf.set(stream.bufToStore, stream.pos); + copyBuffer({ + source: inputBuf, + target: stream.bufToStore, + targetStart: stream.pos + }); stream.pos += inputBuf.length; queueMicrotask(callback); return; @@ -440,7 +444,13 @@ function doWrite( let outstandingRequests = 0; while (inputBufRemaining > 0) { const inputBufPos = inputBuf.length - inputBufRemaining; - inputBuf.set(stream.bufToStore.subarray(inputBufPos, inputBufPos + numToCopy), stream.pos); + copyBuffer({ + source: inputBuf, + target: stream.bufToStore, + targetStart: stream.pos, + sourceStart: inputBufPos, + sourceEnd: inputBufPos + numToCopy + }); stream.pos += numToCopy; spaceRemaining -= numToCopy; let doc: GridFSChunk; @@ -499,7 +509,13 @@ function writeRemnant(stream: GridFSBucketWriteStream, callback: Callback): void // Create a new buffer to make sure the buffer isn't bigger than it needs // to be. const remnant = allocateBuffer(stream.pos); - remnant.set(stream.bufToStore.subarray(0, stream.pos), 0); + copyBuffer({ + source: stream.bufToStore, + target: remnant, + targetStart: 0, + sourceStart: 0, + sourceEnd: stream.pos + }); const doc = createChunkDoc(stream.id, stream.n, remnant); // If the stream was aborted, do not write remnant From 983571bf3adacab89262a1f0c5dbb21569957d2b Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Wed, 21 Jan 2026 10:47:39 -0800 Subject: [PATCH 13/18] minor fix: create copy of data --- src/gridfs/upload.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index a27960455a5..b23100edcce 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -455,7 +455,7 @@ function doWrite( spaceRemaining -= numToCopy; let doc: GridFSChunk; if (spaceRemaining === 0) { - doc = createChunkDoc(stream.id, stream.n, stream.bufToStore); + doc = createChunkDoc(stream.id, stream.n, new Uint8Array(stream.bufToStore)); const remainingTimeMS = stream.timeoutContext?.remainingTimeMS; if (remainingTimeMS != null && remainingTimeMS <= 0) { From e637852e4fe0c02060d8d53be413e3008c90be20 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Thu, 22 Jan 2026 09:25:59 -0800 Subject: [PATCH 14/18] remove unnecessary ByteUtils.toLocalBufferType calls --- src/bson.ts | 29 +++++++++++------------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/src/bson.ts b/src/bson.ts index 2072efdc30f..0b3782a97e9 100644 --- a/src/bson.ts +++ b/src/bson.ts @@ -1,5 +1,5 @@ /* eslint-disable no-restricted-imports */ -import { BSON, ByteUtils, type DeserializeOptions, type SerializeOptions } from 'bson'; +import { BSON, type DeserializeOptions, type SerializeOptions } from 'bson'; export { Binary, @@ -43,28 +43,19 @@ export const getInt32LE = BSON.NumberUtils.getInt32LE; export const getFloat64LE = BSON.NumberUtils.getFloat64LE; export const getBigInt64LE = BSON.NumberUtils.getBigInt64LE; export const toUTF8 = BSON.ByteUtils.toUTF8; - -// BSON wrappers +export const fromUTF8 = BSON.ByteUtils.fromUTF8; +export const fromBase64 = BSON.ByteUtils.fromBase64; +export const fromNumberArray = BSON.ByteUtils.fromNumberArray; +export const concatBuffers = BSON.ByteUtils.concat; +export const allocateBuffer = BSON.ByteUtils.allocate; +export const allocateUnsafeBuffer = BSON.ByteUtils.allocateUnsafe; // writeInt32LE, same order of arguments as Buffer.writeInt32LE export const writeInt32LE = (destination: Uint8Array, value: number, offset: number) => BSON.NumberUtils.setInt32LE(destination, offset, value); -// various wrappers that consume and return local buffer types - -export const fromUTF8 = (text: string) => - ByteUtils.toLocalBufferType(BSON.ByteUtils.fromUTF8(text)); -export const fromBase64 = (b64: string) => - ByteUtils.toLocalBufferType(BSON.ByteUtils.fromBase64(b64)); -export const fromNumberArray = (array: number[]) => - ByteUtils.toLocalBufferType(BSON.ByteUtils.fromNumberArray(array)); -export const concatBuffers = (list: Uint8Array[]) => { - return ByteUtils.toLocalBufferType(BSON.ByteUtils.concat(list)); -}; -export const allocateBuffer = (size: number) => - ByteUtils.toLocalBufferType(BSON.ByteUtils.allocate(size)); -export const allocateUnsafeBuffer = (size: number) => - ByteUtils.toLocalBufferType(BSON.ByteUtils.allocateUnsafe(size)); +// copyBuffer: copies from source buffer to target buffer, returns number of bytes copied +// inputs are explicitly named to avoid confusion export const copyBuffer = (input: { source: Uint8Array; target: Uint8Array; @@ -92,6 +83,8 @@ const validateBufferInputs = (buffer: Uint8Array, offset: number, length: number } }; +// readInt32LE, reads a 32-bit integer from buffer at given offset +// throws if offset is out of bounds export const readInt32LE = (buffer: Uint8Array, offset: number): number => { validateBufferInputs(buffer, offset, 4); return getInt32LE(buffer, offset); From 736ba62e05ffea276a9b0efa4ee3bb0cb0284ed1 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Thu, 22 Jan 2026 13:21:05 -0800 Subject: [PATCH 15/18] pr feedback --- src/client-side-encryption/auto_encrypter.ts | 2 +- src/client-side-encryption/client_encryption.ts | 4 +--- src/gridfs/upload.ts | 5 +---- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/src/client-side-encryption/auto_encrypter.ts b/src/client-side-encryption/auto_encrypter.ts index f3fe1ffb283..53426609f90 100644 --- a/src/client-side-encryption/auto_encrypter.ts +++ b/src/client-side-encryption/auto_encrypter.ts @@ -402,7 +402,7 @@ export class AutoEncrypter { return cmd; } - const commandBuffer: Uint8Array = ByteUtils.isUint8Array(cmd) ? cmd : serialize(cmd, options); + const commandBuffer: Uint8Array = serialize(cmd, options); const context = this._mongocrypt.makeEncryptionContext( MongoDBCollectionNamespace.fromString(ns).db, commandBuffer diff --git a/src/client-side-encryption/client_encryption.ts b/src/client-side-encryption/client_encryption.ts index 13e0914a443..0b70f766aae 100644 --- a/src/client-side-encryption/client_encryption.ts +++ b/src/client-side-encryption/client_encryption.ts @@ -144,9 +144,7 @@ export class ClientEncryption { const mongoCryptOptions: MongoCryptOptions = { ...options, - kmsProviders: !ByteUtils.isUint8Array(this._kmsProviders) - ? (serialize(this._kmsProviders) as Buffer) - : this._kmsProviders, + kmsProviders: serialize(this._kmsProviders), errorWrapper: defaultErrorWrapper }; diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index b23100edcce..6aaab6465c2 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -417,10 +417,7 @@ function doWrite( return; } - const chunkString = chunk as string; - const inputBuf = chunkString - ? fromUTF8(chunkString) - : ByteUtils.toLocalBufferType(chunk as Uint8Array); + const inputBuf = typeof chunk === 'string' ? fromUTF8(chunk) : ByteUtils.toLocalBufferType(chunk); stream.length += inputBuf.length; From 722379ae1c674bcac774edabb0bcb0d3a2c99e3f Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Thu, 22 Jan 2026 13:39:56 -0800 Subject: [PATCH 16/18] lint fix --- src/client-side-encryption/client_encryption.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/client-side-encryption/client_encryption.ts b/src/client-side-encryption/client_encryption.ts index 0b70f766aae..948f27256fb 100644 --- a/src/client-side-encryption/client_encryption.ts +++ b/src/client-side-encryption/client_encryption.ts @@ -6,7 +6,6 @@ import type { import { type Binary, - ByteUtils, deserialize, type Document, type Int32, From 45c9875fcbd523e16daff09c17b78bd2873c8d46 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Fri, 23 Jan 2026 07:43:13 -0800 Subject: [PATCH 17/18] Apply suggestion from @addaleax Co-authored-by: Anna Henningsen --- .eslintrc.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.eslintrc.json b/.eslintrc.json index ff50d840307..e1092b9befa 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -290,7 +290,7 @@ "error", { "name": "Buffer", - "message": "Use Uin8Array instead" + "message": "Use Uint8Array instead" } ] } From ecc19f22d1edb5105f3fffd6ff762c2b72c166b7 Mon Sep 17 00:00:00 2001 From: Pavel Safronov Date: Mon, 26 Jan 2026 11:10:51 -0800 Subject: [PATCH 18/18] pr feedback: undo test changes and remove ByteUtils --- src/utils.ts | 24 +++--------------------- test/tools/runner/config.ts | 3 +-- test/unit/cmap/commands.test.ts | 13 ++++++------- test/unit/utils.test.ts | 3 +-- 4 files changed, 11 insertions(+), 32 deletions(-) diff --git a/src/utils.ts b/src/utils.ts index c8efa485d79..ea0721f515b 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -9,7 +9,7 @@ import { clearTimeout, setTimeout } from 'timers'; import { allocateBuffer, allocateUnsafeBuffer, - ByteUtils as BSONByteUtils, + ByteUtils, deserialize, type Document, getInt32LE, @@ -53,24 +53,6 @@ export type Callback = (error?: AnyError, result?: T) => void; export type AnyOptions = Document; -export const ByteUtils = { - toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { - return BSONByteUtils.toLocalBufferType(buffer) as Buffer; - }, - - equals(this: void, seqA: Uint8Array, seqB: Uint8Array) { - return ByteUtils.toLocalBufferType(seqA).equals(seqB); - }, - - compare(this: void, seqA: Uint8Array, seqB: Uint8Array) { - return ByteUtils.toLocalBufferType(seqA).compare(seqB); - }, - - toBase64(this: void, uint8array: Uint8Array) { - return ByteUtils.toLocalBufferType(uint8array).toString('base64'); - } -}; - /** * Returns true if value is a Uint8Array or a Buffer * @param value - any value that may be a Uint8Array @@ -1338,10 +1320,10 @@ export function decorateDecryptionResult( ): void { if (isTopLevelDecorateCall) { // The original value could have been either a JS object or a BSON buffer - if (BSONByteUtils.isUint8Array(original)) { + if (ByteUtils.isUint8Array(original)) { original = deserialize(original); } - if (BSONByteUtils.isUint8Array(decrypted)) { + if (ByteUtils.isUint8Array(decrypted)) { throw new MongoRuntimeError('Expected result of decryption to be deserialized BSON object'); } } diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index c0504323f95..f7f087b2acf 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -19,7 +19,6 @@ import { TopologyType, type WriteConcernSettings } from '../../../src'; -import { ByteUtils } from '../../../src/bson'; import { type CompressorName } from '../../../src/cmap/wire_protocol/compression'; import { HostAddress } from '../../../src/utils'; import { getEnvironmentalOptions } from '../utils'; @@ -494,7 +493,7 @@ export class TestConfiguration { // @ts-expect-error: toExtendedJSON internal on double but not on long return { number: new Double(value).toExtendedJSON() }; } - if (ByteUtils.isUint8Array(value)) + if (Buffer.isBuffer(value)) return { [value.constructor.name]: Buffer.prototype.base64Slice.call(value) }; if (value === undefined) return { undefined: 'key was set but equal to undefined' }; return value; diff --git a/test/unit/cmap/commands.test.ts b/test/unit/cmap/commands.test.ts index 5762411fe7b..7666a4a6552 100644 --- a/test/unit/cmap/commands.test.ts +++ b/test/unit/cmap/commands.test.ts @@ -1,7 +1,6 @@ import * as BSON from 'bson'; import { expect } from 'chai'; -import { ByteUtils, readInt32LE } from '../../../src/bson'; import { DocumentSequence, OpMsgRequest, OpReply } from '../../../src/cmap/commands'; describe('commands', function () { @@ -42,12 +41,12 @@ describe('commands', function () { it('sets the length of the document sequence', function () { // Bytes starting at index 1 is a 4 byte length. - expect(readInt32LE(buffers[3], 1)).to.equal(25); + expect(buffers[3].readInt32LE(1)).to.equal(25); }); it('sets the name of the first field to be replaced', function () { // Bytes starting at index 5 is the field name. - expect(ByteUtils.toUTF8(buffers[3], 5, 10, true)).to.equal('field'); + expect(buffers[3].toString('utf8', 5, 10)).to.equal('field'); }); }); @@ -82,12 +81,12 @@ describe('commands', function () { it('sets the length of the first document sequence', function () { // Bytes starting at index 1 is a 4 byte length. - expect(readInt32LE(buffers[3], 1)).to.equal(28); + expect(buffers[3].readInt32LE(1)).to.equal(28); }); it('sets the name of the first field to be replaced', function () { // Bytes starting at index 5 is the field name. - expect(ByteUtils.toUTF8(buffers[3], 5, 13, true)).to.equal('fieldOne'); + expect(buffers[3].toString('utf8', 5, 13)).to.equal('fieldOne'); }); it('sets the document sequence sections second type to 1', function () { @@ -97,12 +96,12 @@ describe('commands', function () { it('sets the length of the second document sequence', function () { // Bytes starting at index 1 is a 4 byte length. - expect(readInt32LE(buffers[3], 30)).to.equal(28); + expect(buffers[3].readInt32LE(30)).to.equal(28); }); it('sets the name of the second field to be replaced', function () { // Bytes starting at index 33 is the field name. - expect(ByteUtils.toUTF8(buffers[3], 34, 42, true)).to.equal('fieldTwo'); + expect(buffers[3].toString('utf8', 34, 42)).to.equal('fieldTwo'); }); }); }); diff --git a/test/unit/utils.test.ts b/test/unit/utils.test.ts index dc393fe0990..6914bb6ce3d 100644 --- a/test/unit/utils.test.ts +++ b/test/unit/utils.test.ts @@ -1,6 +1,6 @@ import { setTimeout } from 'node:timers'; -import { ObjectId } from 'bson'; +import { ByteUtils, ObjectId } from 'bson'; import { expect } from 'chai'; import * as sinon from 'sinon'; @@ -10,7 +10,6 @@ import { decorateWithExplain, Explain } from '../../src/explain'; import { abortable, BufferPool, - ByteUtils, checkParentDomainMatch, compareObjectId, hasAtomicOperators,