diff --git a/README.md b/README.md
index 273fa356..0e7a675e 100644
--- a/README.md
+++ b/README.md
@@ -238,12 +238,13 @@ Serialize a Javascript object using a predefined Buffer and index into the buffe
| buffer | Buffer
| | the buffer containing the serialized set of BSON documents. |
| [options.evalFunctions] | Object
| false
| evaluate functions in the BSON document scoped to the object deserialized. |
| [options.cacheFunctions] | Object
| false
| cache evaluated functions for reuse. |
+| [options.useBigInt64] | Object
| false
| when deserializing a Long will return a BigInt. |
| [options.promoteLongs] | Object
| true
| when deserializing a Long will fit it into a Number if it's smaller than 53 bits |
| [options.promoteBuffers] | Object
| false
| when deserializing a Binary will return it as a node.js Buffer instance. |
| [options.promoteValues] | Object
| false
| when deserializing will promote BSON values to their Node.js closest equivalent types. |
| [options.fieldsAsRaw] | Object
|
| allow to specify if there what fields we wish to return as unserialized raw buffer. |
| [options.bsonRegExp] | Object
| false
| return BSON regular expressions as BSONRegExp instances. |
-| [options.allowObjectSmallerThanBufferSize] | boolean
| false
| allows the buffer to be larger than the parsed BSON object |
+| [options.allowObjectSmallerThanBufferSize] | boolean
| false
| allows the buffer to be larger than the parsed BSON object. |
Deserialize data as BSON.
diff --git a/src/parser/deserializer.ts b/src/parser/deserializer.ts
index 1a7d7690..50d7a8d8 100644
--- a/src/parser/deserializer.ts
+++ b/src/parser/deserializer.ts
@@ -14,12 +14,14 @@ import { ObjectId } from '../objectid';
import { BSONRegExp } from '../regexp';
import { BSONSymbol } from '../symbol';
import { Timestamp } from '../timestamp';
-import { ByteUtils } from '../utils/byte_utils';
+import { BSONDataView, ByteUtils } from '../utils/byte_utils';
import { validateUtf8 } from '../validate_utf8';
/** @public */
export interface DeserializeOptions {
- /** when deserializing a Long will fit it into a Number if it's smaller than 53 bits */
+ /** when deserializing a Long will return as a BigInt. */
+ useBigInt64?: boolean;
+ /** when deserializing a Long will fit it into a Number if it's smaller than 53 bits. */
promoteLongs?: boolean;
/** when deserializing a Binary will return it as a node.js Buffer instance. */
promoteBuffers?: boolean;
@@ -29,7 +31,7 @@ export interface DeserializeOptions {
fieldsAsRaw?: Document;
/** return BSON regular expressions as BSONRegExp instances. */
bsonRegExp?: boolean;
- /** allows the buffer to be larger than the parsed BSON object */
+ /** allows the buffer to be larger than the parsed BSON object. */
allowObjectSmallerThanBufferSize?: boolean;
/** Offset into buffer to begin reading document from */
index?: number;
@@ -96,7 +98,7 @@ export function internalDeserialize(
);
}
- // Start deserializtion
+ // Start deserialization
return deserializeObject(buffer, index, options, isArray);
}
@@ -117,9 +119,18 @@ function deserializeObject(
const bsonRegExp = typeof options['bsonRegExp'] === 'boolean' ? options['bsonRegExp'] : false;
// Controls the promotion of values vs wrapper classes
- const promoteBuffers = options['promoteBuffers'] == null ? false : options['promoteBuffers'];
- const promoteLongs = options['promoteLongs'] == null ? true : options['promoteLongs'];
- const promoteValues = options['promoteValues'] == null ? true : options['promoteValues'];
+ const promoteBuffers = options.promoteBuffers ?? false;
+ const promoteLongs = options.promoteLongs ?? true;
+ const promoteValues = options.promoteValues ?? true;
+ const useBigInt64 = options.useBigInt64 ?? false;
+
+ if (useBigInt64 && !promoteValues) {
+ throw new BSONError('Must either request bigint or Long for int64 deserialization');
+ }
+
+ if (useBigInt64 && !promoteLongs) {
+ throw new BSONError('Must either request bigint or Long for int64 deserialization');
+ }
// Ensures default validation option if none given
const validation = options.validation == null ? { utf8: true } : options.validation;
@@ -323,6 +334,8 @@ function deserializeObject(
value = null;
} else if (elementType === constants.BSON_DATA_LONG) {
// Unpack the low and high bits
+ const dataview = BSONDataView.fromUint8Array(buffer.subarray(index, index + 8));
+
const lowBits =
buffer[index++] |
(buffer[index++] << 8) |
@@ -334,8 +347,10 @@ function deserializeObject(
(buffer[index++] << 16) |
(buffer[index++] << 24);
const long = new Long(lowBits, highBits);
- // Promote the long if possible
- if (promoteLongs && promoteValues === true) {
+ if (useBigInt64) {
+ value = dataview.getBigInt64(0, true);
+ } else if (promoteLongs && promoteValues === true) {
+ // Promote the long if possible
value =
long.lessThanOrEqual(JS_INT_MAX_LONG) && long.greaterThanOrEqual(JS_INT_MIN_LONG)
? long.toNumber()
diff --git a/test/node/bigint.test.ts b/test/node/bigint.test.ts
index fe771ce3..fcc95790 100644
--- a/test/node/bigint.test.ts
+++ b/test/node/bigint.test.ts
@@ -1,166 +1,266 @@
-import { BSON } from '../register-bson';
+import { BSON, BSONError } from '../register-bson';
import { bufferFromHexArray } from './tools/utils';
+import { expect } from 'chai';
import { BSON_DATA_LONG } from '../../src/constants';
import { BSONDataView } from '../../src/utils/byte_utils';
-describe('BSON BigInt serialization Support', function () {
- // Index for the data type byte of a BSON document with a
- // NOTE: These offsets only apply for documents with the shape {a : }
- // where n is a BigInt
- type SerializedDocParts = {
- dataType: number;
- key: string;
- value: bigint;
- };
- /**
- * NOTE: this function operates on serialized BSON documents with the shape { : }
- * where n is some int64. This function assumes that keys are properly encoded
- * with the necessary null byte at the end and only at the end of the key string
- */
- function getSerializedDocParts(serializedDoc: Uint8Array): SerializedDocParts {
- const DATA_TYPE_OFFSET = 4;
- const KEY_OFFSET = 5;
-
- const dataView = BSONDataView.fromUint8Array(serializedDoc);
- const keySlice = serializedDoc.slice(KEY_OFFSET);
-
- let keyLength = 0;
- while (keySlice[keyLength++] !== 0);
-
- const valueOffset = KEY_OFFSET + keyLength;
- const key = Buffer.from(serializedDoc.slice(KEY_OFFSET, KEY_OFFSET + keyLength)).toString(
- 'utf8'
- );
-
- return {
- dataType: dataView.getInt8(DATA_TYPE_OFFSET),
- key: key.slice(0, keyLength - 1),
- value: dataView.getBigInt64(valueOffset, true)
+describe('BSON BigInt support', function () {
+ describe('BSON.deserialize()', function () {
+ type DeserialzationOptions = {
+ useBigInt64: boolean | undefined;
+ promoteValues: boolean | undefined;
+ promoteLongs: boolean | undefined;
};
- }
+ type TestTableEntry = {
+ options: DeserialzationOptions;
+ shouldThrow: boolean;
+ expectedResult: BSON.Document;
+ expectedErrorMessage: string;
+ };
+ const testSerializedDoc = bufferFromHexArray(['12', '6100', '2300000000000000']); // key 'a', value 0x23 as int64
+ const useBigInt64Values = [true, false, undefined];
+ const promoteLongsValues = [true, false, undefined];
+ const promoteValuesValues = [true, false, undefined];
- it('serializes bigints with the correct BSON type', function () {
- const testDoc = { a: 0n };
- const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
- expect(serializedDoc.dataType).to.equal(BSON_DATA_LONG);
- });
+ const testTable = useBigInt64Values.flatMap(useBigInt64 => {
+ return promoteLongsValues.flatMap(promoteLongs => {
+ return promoteValuesValues.flatMap(promoteValues => {
+ const useBigInt64Set = useBigInt64 ?? false;
+ const promoteLongsSet = promoteLongs ?? true;
+ const promoteValuesSet = promoteValues ?? true;
+ const shouldThrow = useBigInt64Set && (!promoteValuesSet || !promoteLongsSet);
+ let expectedResult: BSON.Document;
+ if (useBigInt64Set) {
+ expectedResult = { a: 0x23n };
+ } else if (promoteLongsSet && promoteValuesSet) {
+ expectedResult = { a: 0x23 };
+ } else {
+ expectedResult = { a: new BSON.Long(0x23) };
+ }
+ const expectedErrorMessage = shouldThrow
+ ? 'Must either request bigint or Long for int64 deserialization'
+ : '';
+ return [
+ {
+ options: { useBigInt64, promoteValues, promoteLongs },
+ shouldThrow,
+ expectedResult,
+ expectedErrorMessage
+ }
+ ];
+ });
+ });
+ });
- it('serializes bigints into little-endian byte order', function () {
- const testDoc = { a: 0x1234567812345678n };
- const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
- const expectedResult = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64 type
- '6100', // 'a' key with null terminator
- '7856341278563412'
- ])
- );
+ it('meta test: generates 27 tests with exactly 5 error cases and 22 success cases', () => {
+ expect(testTable).to.have.lengthOf(27);
+ expect(testTable.filter(t => t.shouldThrow)).to.have.lengthOf(5);
+ expect(testTable.filter(t => !t.shouldThrow)).to.have.lengthOf(22);
+ });
- expect(expectedResult.value).to.equal(serializedDoc.value);
- });
+ function generateTestDescription(entry: TestTableEntry): string {
+ const options = entry.options;
+ const promoteValues = `promoteValues ${
+ options.promoteValues === undefined ? 'is default' : `is ${options.promoteValues}`
+ }`;
+ const promoteLongs = `promoteLongs ${
+ options.promoteLongs === undefined ? 'is default' : `is ${options.promoteLongs}`
+ }`;
+ const useBigInt64 = `useBigInt64 ${
+ options.useBigInt64 === undefined ? 'is default' : `is ${options.useBigInt64}`
+ }`;
+ const flagString = `${useBigInt64}, ${promoteValues}, and ${promoteLongs}`;
+ if (entry.shouldThrow) {
+ return `throws when ${flagString}`;
+ } else {
+ return `deserializes int64 to ${entry.expectedResult.a.constructor.name} when ${flagString}`;
+ }
+ }
- it('serializes a BigInt that can be safely represented as a Number', function () {
- const testDoc = { a: 0x23n };
- const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
- const expectedResult = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64 type
- '6100', // 'a' key with null terminator
- '2300000000000000' // little endian int64
- ])
- );
- expect(serializedDoc).to.deep.equal(expectedResult);
- });
+ function generateTest(test: TestTableEntry) {
+ const options = test.options;
+ const deserialize = () => {
+ return BSON.deserialize(testSerializedDoc, options);
+ };
+ if (test.shouldThrow) {
+ return () => {
+ expect(deserialize).to.throw(BSONError, test.expectedErrorMessage);
+ };
+ } else {
+ return () => {
+ const deserializedDoc = deserialize();
+ expect(deserializedDoc).to.deep.equal(test.expectedResult);
+ };
+ }
+ }
- it('serializes a BigInt in the valid range [-2^63, 2^63 - 1]', function () {
- const testDoc = { a: 0xfffffffffffffff1n };
- const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
- const expectedResult = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64
- '6100', // 'a' key with null terminator
- 'f1ffffffffffffff'
- ])
- );
- expect(serializedDoc).to.deep.equal(expectedResult);
- });
+ for (const tableEntry of testTable) {
+ const test = generateTest(tableEntry);
+ const description = generateTestDescription(tableEntry);
- it('wraps to negative on a BigInt that is larger than (2^63 -1)', function () {
- const maxIntPlusOne = { a: 2n ** 63n };
- const serializedMaxIntPlusOne = getSerializedDocParts(BSON.serialize(maxIntPlusOne));
- const expectedResultForMaxIntPlusOne = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64
- '6100', // 'a' key with null terminator
- '0000000000000080'
- ])
- );
- expect(serializedMaxIntPlusOne).to.deep.equal(expectedResultForMaxIntPlusOne);
+ it(description, test);
+ }
});
- it('serializes BigInts at the edges of the valid range [-2^63, 2^63 - 1]', function () {
- const maxPositiveInt64 = { a: 2n ** 63n - 1n };
- const serializedMaxPositiveInt64 = getSerializedDocParts(BSON.serialize(maxPositiveInt64));
- const expectedSerializationForMaxPositiveInt64 = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64
- '6100', // 'a' key with null terminator
- 'ffffffffffffff7f'
- ])
- );
- expect(serializedMaxPositiveInt64).to.deep.equal(expectedSerializationForMaxPositiveInt64);
-
- const minPositiveInt64 = { a: -(2n ** 63n) };
- const serializedMinPositiveInt64 = getSerializedDocParts(BSON.serialize(minPositiveInt64));
- const expectedSerializationForMinPositiveInt64 = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64
- '6100', // 'a' key with null terminator
- '0000000000000080'
- ])
- );
- expect(serializedMinPositiveInt64).to.deep.equal(expectedSerializationForMinPositiveInt64);
- });
+ describe('BSON.serialize()', function () {
+ // Index for the data type byte of a BSON document with a
+ // NOTE: These offsets only apply for documents with the shape {a : }
+ // where n is a BigInt
+ type SerializedDocParts = {
+ dataType: number;
+ key: string;
+ value: bigint;
+ };
+ /**
+ * NOTE: this function operates on serialized BSON documents with the shape { : }
+ * where n is some int64. This function assumes that keys are properly encoded
+ * with the necessary null byte at the end and only at the end of the key string
+ */
+ function getSerializedDocParts(serializedDoc: Uint8Array): SerializedDocParts {
+ const DATA_TYPE_OFFSET = 4;
+ const KEY_OFFSET = 5;
- it('truncates a BigInt that is larger than a 64-bit int', function () {
- const testDoc = { a: 2n ** 64n + 1n };
- const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
- const expectedSerialization = getSerializedDocParts(
- bufferFromHexArray([
- '12', //int64
- '6100', // 'a' key with null terminator
- '0100000000000000'
- ])
- );
- expect(serializedDoc).to.deep.equal(expectedSerialization);
- });
+ const dataView = BSONDataView.fromUint8Array(serializedDoc);
+ const keySlice = serializedDoc.slice(KEY_OFFSET);
- it('serializes array of BigInts', function () {
- const testArr = { a: [1n] };
- const serializedArr = BSON.serialize(testArr);
- const expectedSerialization = bufferFromHexArray([
- '04', // array
- '6100', // 'a' key with null terminator
- bufferFromHexArray([
- '12', // int64
- '3000', // '0' key with null terminator
- '0100000000000000' // 1n (little-endian)
- ]).toString('hex')
- ]);
- expect(serializedArr).to.deep.equal(expectedSerialization);
- });
+ let keyLength = 0;
+ while (keySlice[keyLength++] !== 0);
+
+ const valueOffset = KEY_OFFSET + keyLength;
+ const key = Buffer.from(serializedDoc.slice(KEY_OFFSET, KEY_OFFSET + keyLength)).toString(
+ 'utf8'
+ );
+
+ return {
+ dataType: dataView.getInt8(DATA_TYPE_OFFSET),
+ key: key.slice(0, keyLength - 1),
+ value: dataView.getBigInt64(valueOffset, true)
+ };
+ }
+
+ it('serializes bigints with the correct BSON type', function () {
+ const testDoc = { a: 0n };
+ const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
+ expect(serializedDoc.dataType).to.equal(BSON_DATA_LONG);
+ });
+
+ it('serializes bigints into little-endian byte order', function () {
+ const testDoc = { a: 0x1234567812345678n };
+ const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
+ const expectedResult = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64 type
+ '6100', // 'a' key with null terminator
+ '7856341278563412'
+ ])
+ );
+
+ expect(expectedResult.value).to.equal(serializedDoc.value);
+ });
- it('serializes Map with BigInt values', function () {
- const testMap = new Map();
- testMap.set('a', 1n);
- const serializedMap = getSerializedDocParts(BSON.serialize(testMap));
- const expectedSerialization = getSerializedDocParts(
- bufferFromHexArray([
- '12', // int64
+ it('serializes a BigInt that can be safely represented as a Number', function () {
+ const testDoc = { a: 0x23n };
+ const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
+ const expectedResult = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64 type
+ '6100', // 'a' key with null terminator
+ '2300000000000000' // little endian int64
+ ])
+ );
+ expect(serializedDoc).to.deep.equal(expectedResult);
+ });
+
+ it('serializes a BigInt in the valid range [-2^63, 2^63 - 1]', function () {
+ const testDoc = { a: 0xfffffffffffffff1n };
+ const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
+ const expectedResult = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64
+ '6100', // 'a' key with null terminator
+ 'f1ffffffffffffff'
+ ])
+ );
+ expect(serializedDoc).to.deep.equal(expectedResult);
+ });
+
+ it('wraps to negative on a BigInt that is larger than (2^63 -1)', function () {
+ const maxIntPlusOne = { a: 2n ** 63n };
+ const serializedMaxIntPlusOne = getSerializedDocParts(BSON.serialize(maxIntPlusOne));
+ const expectedResultForMaxIntPlusOne = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64
+ '6100', // 'a' key with null terminator
+ '0000000000000080'
+ ])
+ );
+ expect(serializedMaxIntPlusOne).to.deep.equal(expectedResultForMaxIntPlusOne);
+ });
+
+ it('serializes BigInts at the edges of the valid range [-2^63, 2^63 - 1]', function () {
+ const maxPositiveInt64 = { a: 2n ** 63n - 1n };
+ const serializedMaxPositiveInt64 = getSerializedDocParts(BSON.serialize(maxPositiveInt64));
+ const expectedSerializationForMaxPositiveInt64 = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64
+ '6100', // 'a' key with null terminator
+ 'ffffffffffffff7f'
+ ])
+ );
+ expect(serializedMaxPositiveInt64).to.deep.equal(expectedSerializationForMaxPositiveInt64);
+
+ const minPositiveInt64 = { a: -(2n ** 63n) };
+ const serializedMinPositiveInt64 = getSerializedDocParts(BSON.serialize(minPositiveInt64));
+ const expectedSerializationForMinPositiveInt64 = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64
+ '6100', // 'a' key with null terminator
+ '0000000000000080'
+ ])
+ );
+ expect(serializedMinPositiveInt64).to.deep.equal(expectedSerializationForMinPositiveInt64);
+ });
+
+ it('truncates a BigInt that is larger than a 64-bit int', function () {
+ const testDoc = { a: 2n ** 64n + 1n };
+ const serializedDoc = getSerializedDocParts(BSON.serialize(testDoc));
+ const expectedSerialization = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', //int64
+ '6100', // 'a' key with null terminator
+ '0100000000000000'
+ ])
+ );
+ expect(serializedDoc).to.deep.equal(expectedSerialization);
+ });
+
+ it('serializes array of BigInts', function () {
+ const testArr = { a: [1n] };
+ const serializedArr = BSON.serialize(testArr);
+ const expectedSerialization = bufferFromHexArray([
+ '04', // array
'6100', // 'a' key with null terminator
- '0100000000000000'
- ])
- );
- expect(serializedMap).to.deep.equal(expectedSerialization);
+ bufferFromHexArray([
+ '12', // int64
+ '3000', // '0' key with null terminator
+ '0100000000000000' // 1n (little-endian)
+ ]).toString('hex')
+ ]);
+ expect(serializedArr).to.deep.equal(expectedSerialization);
+ });
+
+ it('serializes Map with BigInt values', function () {
+ const testMap = new Map();
+ testMap.set('a', 1n);
+ const serializedMap = getSerializedDocParts(BSON.serialize(testMap));
+ const expectedSerialization = getSerializedDocParts(
+ bufferFromHexArray([
+ '12', // int64
+ '6100', // 'a' key with null terminator
+ '0100000000000000'
+ ])
+ );
+ expect(serializedMap).to.deep.equal(expectedSerialization);
+ });
});
});