From b2d3580367997314f8449c9c5e7c89c86e5d666c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andi=20Pa=CC=88tzold?= Date: Sat, 2 Dec 2023 02:30:31 +0100 Subject: [PATCH] feat: use webcrypto --- README.md | 2 - global.d.ts | 1 - package-lock.json | 330 +++------------ package.json | 4 +- src/Database.ts | 14 +- src/MDBReader.ts | 64 +-- src/Table.ts | 393 ++++-------------- .../handlers/office/agile/index.ts | 35 +- .../handlers/office/rc4-cryptoapi.ts | 18 +- src/codec-handler/types.ts | 4 +- src/crypto/blockDecrypt.ts | 12 +- src/crypto/deriveKey.ts | 17 +- src/crypto/hash.ts | 28 +- src/data/index.ts | 10 +- src/data/memo.ts | 6 +- src/data/ole.ts | 6 +- src/environment/browser.ts | 5 +- src/environment/node.ts | 2 +- src/index.ts | 1 - src/table/getColumnDefinitions.ts | 69 +++ src/table/getDataFromPage.ts | 113 +++++ src/table/getDataPage.ts | 13 + src/table/getRecordOffsets.ts | 27 ++ src/types.ts | 25 +- src/usage-map.ts | 8 +- src/util.ts | 12 + test/bigint.spec.ts | 6 +- test/currency.spec.ts | 6 +- test/empty.spec.ts | 4 +- test/encryption/encryption.spec.ts | 2 +- test/longtext.spec.ts | 5 +- test/numeric.spec.ts | 12 +- test/ole.spec.ts | 6 +- test/reader.spec.ts | 17 +- test/table.spec.ts | 60 +-- tsconfig.browser.json | 2 +- typings/browserify-aes.d.ts | 5 - typings/create-hash.d.ts | 6 - 38 files changed, 576 insertions(+), 774 deletions(-) delete mode 100644 global.d.ts create mode 100644 src/table/getColumnDefinitions.ts create mode 100644 src/table/getDataFromPage.ts create mode 100644 src/table/getDataPage.ts create mode 100644 src/table/getRecordOffsets.ts delete mode 100644 typings/browserify-aes.d.ts delete mode 100644 typings/create-hash.d.ts diff --git a/README.md b/README.md index e9a8f3f8..5acf4994 100644 --- a/README.md +++ b/README.md @@ -46,8 +46,6 @@ yarn add mdb-reader To decrypt databases, this library requires a few dependencies: -- [`browserify-aes`](https://www.npmjs.com/browserify-aes): Only imported when running in browser -- [`create-hash`](https://www.npmjs.com/create-hash): Only imported when running in browser - [`fast-xml-parser`](https://www.npmjs.com/fast-xml-parser) ## Usage diff --git a/global.d.ts b/global.d.ts deleted file mode 100644 index 9bc71c21..00000000 --- a/global.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module "create-hash"; diff --git a/package-lock.json b/package-lock.json index aa5fc152..4abbbc18 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,8 +9,6 @@ "version": "3.0.0", "license": "MIT", "dependencies": { - "browserify-aes": "^1.0.0", - "create-hash": "^1.0.0", "fast-xml-parser": "^4.0.0" }, "devDependencies": { @@ -19,12 +17,14 @@ "@tsconfig/node18": "18.2.2", "@tsconfig/strictest": "2.0.2", "@types/chai": "4.3.11", + "@types/chai-as-promised": "7.1.6", "@types/mocha": "10.0.6", "@types/mocha-each": "2.0.4", "@types/node": "18.18.13", "@typescript-eslint/eslint-plugin": "6.13.1", "@typescript-eslint/parser": "6.13.1", "chai": "4.3.10", + "chai-as-promised": "7.1.1", "eslint": "8.54.0", "mocha": "10.2.0", "mocha-each": "2.0.1", @@ -1198,6 +1198,15 @@ "integrity": "sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ==", "dev": true }, + "node_modules/@types/chai-as-promised": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/@types/chai-as-promised/-/chai-as-promised-7.1.6.tgz", + "integrity": "sha512-cQLhk8fFarRVZAXUQV1xEnZgMoPxqKojBvRkqPCKPQCzEhpbbSKl1Uu75kDng7k5Ln6LQLUmNBjLlFthCgm1NA==", + "dev": true, + "dependencies": { + "@types/chai": "*" + } + }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -1682,24 +1691,6 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, - "node_modules/browserify-aes": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", - "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", - "dependencies": { - "buffer-xor": "^1.0.3", - "cipher-base": "^1.0.0", - "create-hash": "^1.1.0", - "evp_bytestokey": "^1.0.3", - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/buffer-xor": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", - "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" - }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -1740,6 +1731,18 @@ "node": ">=4" } }, + "node_modules/chai-as-promised": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz", + "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==", + "dev": true, + "dependencies": { + "check-error": "^1.0.2" + }, + "peerDependencies": { + "chai": ">= 2.1.2 < 5" + } + }, "node_modules/chalk": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", @@ -1804,15 +1807,6 @@ "fsevents": "~2.3.2" } }, - "node_modules/cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, "node_modules/clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", @@ -1975,18 +1969,6 @@ "url": "https://github.com/sponsors/d-fischer" } }, - "node_modules/create-hash": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", - "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", - "dependencies": { - "cipher-base": "^1.0.1", - "inherits": "^2.0.1", - "md5.js": "^1.3.4", - "ripemd160": "^2.0.1", - "sha.js": "^2.4.0" - } - }, "node_modules/create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", @@ -2594,15 +2576,6 @@ "node": ">=0.10.0" } }, - "node_modules/evp_bytestokey": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", - "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", - "dependencies": { - "md5.js": "^1.3.4", - "safe-buffer": "^5.1.1" - } - }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -3158,38 +3131,6 @@ "node": ">=8" } }, - "node_modules/hash-base": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", - "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", - "dependencies": { - "inherits": "^2.0.4", - "readable-stream": "^3.6.0", - "safe-buffer": "^5.2.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/hash-base/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -3379,7 +3320,8 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true }, "node_modules/ini": { "version": "1.3.8", @@ -3870,16 +3812,6 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/md5.js": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", - "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", - "dependencies": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1", - "safe-buffer": "^5.1.2" - } - }, "node_modules/meow": { "version": "12.1.1", "resolved": "https://registry.npmjs.org/meow/-/meow-12.1.1.tgz", @@ -8196,19 +8128,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -8334,15 +8253,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/ripemd160": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", - "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", - "dependencies": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1" - } - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -8369,7 +8279,8 @@ "node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true }, "node_modules/semantic-release": { "version": "22.0.8", @@ -8782,18 +8693,6 @@ "randombytes": "^2.1.0" } }, - "node_modules/sha.js": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", - "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - }, - "bin": { - "sha.js": "bin.js" - } - }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -9026,33 +8925,6 @@ "safe-buffer": "~5.1.0" } }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string_decoder/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -9455,7 +9327,8 @@ "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", @@ -10516,6 +10389,15 @@ "integrity": "sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ==", "dev": true }, + "@types/chai-as-promised": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/@types/chai-as-promised/-/chai-as-promised-7.1.6.tgz", + "integrity": "sha512-cQLhk8fFarRVZAXUQV1xEnZgMoPxqKojBvRkqPCKPQCzEhpbbSKl1Uu75kDng7k5Ln6LQLUmNBjLlFthCgm1NA==", + "dev": true, + "requires": { + "@types/chai": "*" + } + }, "@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -10853,24 +10735,6 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, - "browserify-aes": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", - "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", - "requires": { - "buffer-xor": "^1.0.3", - "cipher-base": "^1.0.0", - "create-hash": "^1.1.0", - "evp_bytestokey": "^1.0.3", - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, - "buffer-xor": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", - "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" - }, "callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -10902,6 +10766,15 @@ "type-detect": "^4.0.8" } }, + "chai-as-promised": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz", + "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==", + "dev": true, + "requires": { + "check-error": "^1.0.2" + } + }, "chalk": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", @@ -10943,15 +10816,6 @@ "readdirp": "~3.6.0" } }, - "cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", - "requires": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, "clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", @@ -11079,18 +10943,6 @@ "path-type": "^4.0.0" } }, - "create-hash": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", - "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", - "requires": { - "cipher-base": "^1.0.1", - "inherits": "^2.0.1", - "md5.js": "^1.3.4", - "ripemd160": "^2.0.1", - "sha.js": "^2.4.0" - } - }, "create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", @@ -11520,15 +11372,6 @@ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true }, - "evp_bytestokey": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", - "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", - "requires": { - "md5.js": "^1.3.4", - "safe-buffer": "^5.1.1" - } - }, "execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -11950,23 +11793,6 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "hash-base": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", - "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", - "requires": { - "inherits": "^2.0.4", - "readable-stream": "^3.6.0", - "safe-buffer": "^5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - } - } - }, "he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -12099,7 +11925,8 @@ "inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true }, "ini": { "version": "1.3.8", @@ -12479,16 +12306,6 @@ } } }, - "md5.js": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", - "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", - "requires": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1", - "safe-buffer": "^5.1.2" - } - }, "meow": { "version": "12.1.1", "resolved": "https://registry.npmjs.org/meow/-/meow-12.1.1.tgz", @@ -15395,16 +15212,6 @@ "type-fest": "^4.2.0" } }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, "readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -15492,15 +15299,6 @@ } } }, - "ripemd160": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", - "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", - "requires": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1" - } - }, "run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -15513,7 +15311,8 @@ "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true }, "semantic-release": { "version": "22.0.8", @@ -15782,15 +15581,6 @@ "randombytes": "^2.1.0" } }, - "sha.js": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", - "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", - "requires": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, "shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -15991,21 +15781,6 @@ } } }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - } - } - }, "string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -16278,7 +16053,8 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true }, "v8-compile-cache-lib": { "version": "3.0.1", diff --git a/package.json b/package.json index de617107..5e8dfd57 100644 --- a/package.json +++ b/package.json @@ -39,8 +39,6 @@ }, "homepage": "https://github.com/andipaetzold/mdb-reader#readme", "dependencies": { - "browserify-aes": "^1.0.0", - "create-hash": "^1.0.0", "fast-xml-parser": "^4.0.0" }, "devDependencies": { @@ -49,12 +47,14 @@ "@tsconfig/node18": "18.2.2", "@tsconfig/strictest": "2.0.2", "@types/chai": "4.3.11", + "@types/chai-as-promised": "7.1.6", "@types/mocha": "10.0.6", "@types/mocha-each": "2.0.4", "@types/node": "18.18.13", "@typescript-eslint/eslint-plugin": "6.13.1", "@typescript-eslint/parser": "6.13.1", "chai": "4.3.10", + "chai-as-promised": "7.1.1", "eslint": "8.54.0", "mocha": "10.2.0", "mocha-each": "2.0.1", diff --git a/src/Database.ts b/src/Database.ts index 6eab25ce..d9eba113 100644 --- a/src/Database.ts +++ b/src/Database.ts @@ -28,9 +28,9 @@ export class Database { decryptHeader(this.#databaseDefinitionPage, this.#format); this.#codecHandler = createCodecHandler(this.#databaseDefinitionPage, password); - if (!this.#codecHandler.verifyPassword()) { - throw new Error("Wrong password"); - } + // if (!this.#codecHandler.verifyPassword()) { + // throw new Error("Wrong password"); + // } } get format(): JetFormat { @@ -105,7 +105,7 @@ export class Database { return Object.freeze({ value, version }); } - getPage(page: number): Buffer { + async getPage(page: number): Promise { if (page === 0) { // already decrypted return this.#databaseDefinitionPage; @@ -117,7 +117,7 @@ export class Database { } const pageBuffer = this.#buffer.slice(offset, offset + this.#format.pageSize); - return this.#codecHandler.decryptPage(pageBuffer, page); + return await this.#codecHandler.decryptPage(pageBuffer, page); } /** @@ -125,11 +125,11 @@ export class Database { * * @see https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/data.c#L102-L124 */ - findPageRow(pageRow: number): Buffer { + async findPageRow(pageRow: number): Promise { const page = pageRow >> 8; const row = pageRow & 0xff; - const pageBuffer = this.getPage(page); + const pageBuffer = await this.getPage(page); return this.findRow(pageBuffer, row); } diff --git a/src/MDBReader.ts b/src/MDBReader.ts index 8941b45a..8b2f4d1a 100644 --- a/src/MDBReader.ts +++ b/src/MDBReader.ts @@ -1,8 +1,9 @@ import { Database } from "./Database.js"; import { PageType, assertPageType } from "./PageType.js"; +import { createTable } from "./Table.js"; +import { memoPromise } from "./util.js"; import { type SysObject, isSysObjectType, isSystemObject, SysObjectTypes } from "./SysObject.js"; -import { Table } from "./Table.js"; -import type { SortOrder } from "./types.js"; +import type { SortOrder, Table } from "./types.js"; const MSYS_OBJECTS_TABLE = "MSysObjects"; const MSYS_OBJECTS_PAGE = 2; @@ -13,7 +14,6 @@ export interface Options { export default class MDBReader { #buffer: Buffer; - #sysObjects: SysObject[]; #database: Database; /** @@ -25,27 +25,10 @@ export default class MDBReader { assertPageType(this.#buffer, PageType.DatabaseDefinitionPage); this.#database = new Database(this.#buffer, password ?? ""); - - const mSysObjectsTable = new Table(MSYS_OBJECTS_TABLE, this.#database, MSYS_OBJECTS_PAGE).getData<{ - Id: number; - Name: string; - Type: number; - Flags: number; - }>({ - columns: ["Id", "Name", "Type", "Flags"], - }); - - this.#sysObjects = mSysObjectsTable.map((mSysObject) => { - const objectType = mSysObject.Type & 0x7f; - return { - objectName: mSysObject.Name, - objectType: isSysObjectType(objectType) ? objectType : null, - tablePage: mSysObject.Id & 0x00ffffff, - flags: mSysObject.Flags, - }; - }); } + #getSysObjects = memoPromise(() => getSysObjects(this.#database)); + /** * Date when the database was created */ @@ -74,7 +57,7 @@ export default class MDBReader { * @param systemTables Includes system tables. Default false. * @param linkedTables Includes linked tables. Default false. */ - getTableNames({ + async getTableNames({ normalTables = true, systemTables = false, linkedTables = false, @@ -82,9 +65,9 @@ export default class MDBReader { normalTables?: boolean | undefined; systemTables?: boolean | undefined; linkedTables?: boolean | undefined; - } = {}): string[] { + } = {}): Promise { const filteredSysObjects: SysObject[] = []; - for (const sysObject of this.#sysObjects) { + for (const sysObject of await this.#getSysObjects()) { if (sysObject.objectType === SysObjectTypes.Table) { if (!isSystemObject(sysObject)) { if (normalTables) { @@ -106,15 +89,36 @@ export default class MDBReader { * * @param name Name of the table. Case sensitive. */ - getTable(name: string): Table { - const sysObject = this.#sysObjects - .filter((o) => o.objectType === SysObjectTypes.Table) - .find((o) => o.objectName === name); + async getTable(name: string): Promise { + const sysObjects = await this.#getSysObjects(); + const sysObject = sysObjects.filter((o) => o.objectType === SysObjectTypes.Table).find((o) => o.objectName === name); if (!sysObject) { throw new Error(`Could not find table with name ${name}`); } - return new Table(name, this.#database, sysObject.tablePage); + return await createTable(name, this.#database, sysObject.tablePage); } } + +async function getSysObjects(database: Database): Promise { + const table = await createTable(MSYS_OBJECTS_TABLE, database, MSYS_OBJECTS_PAGE); + const tableData = await table.getData<{ + Id: number; + Name: string; + Type: number; + Flags: number; + }>({ + columns: ["Id", "Name", "Type", "Flags"], + }); + + return tableData.map((mSysObject) => { + const objectType = mSysObject.Type & 0x7f; + return { + objectName: mSysObject.Name, + objectType: isSysObjectType(objectType) ? objectType : null, + tablePage: mSysObject.Id & 0x00ffffff, + flags: mSysObject.Flags, + }; + }); +} diff --git a/src/Table.ts b/src/Table.ts index bf578c75..104eb26a 100644 --- a/src/Table.ts +++ b/src/Table.ts @@ -1,109 +1,52 @@ -import { ColumnTypes } from "./index.js"; -import { type Column, type ColumnDefinition, getColumnType, parseColumnFlags } from "./column.js"; -import { readFieldValue } from "./data/index.js"; import { Database } from "./Database.js"; import { PageType, assertPageType } from "./PageType.js"; -import type { Value } from "./types.js"; -import { uncompressText } from "./unicodeCompression.js"; +import type { Column } from "./column.js"; +import { getColumnDefinitions } from "./table/getColumnDefinitions.js"; +import { getDataFromPage } from "./table/getDataFromPage.js"; +import { getDataPage } from "./table/getDataPage.js"; +import { getRecordOffsets } from "./table/getRecordOffsets.js"; +import type { Table, Value } from "./types.js"; import { findMapPages } from "./usage-map.js"; -import { getBitmapValue, roundToFullByte } from "./util.js"; -export class Table { - #name: string; - #database: Database; - #firstDefinitionPage: number; +export async function createTable(name: string, database: Database, firstDefinitionPage: number): Promise
{ + // Concat all table definition pages + let nextDefinitionPage = firstDefinitionPage; + let buffer: Buffer | undefined; + while (nextDefinitionPage > 0) { + const curBuffer = await database.getPage(nextDefinitionPage); + assertPageType(curBuffer, PageType.TableDefinition); - #definitionBuffer: Buffer; - #dataPages: number[]; - - /** - * Number of rows. - */ - #rowCount: number; - - /** - * Number of columns. - */ - #columnCount: number; - - #variableColumnCount: number; - // #fixedColumnCount: number; - - // #logicalIndexCount: number; - #realIndexCount: number; - - /** - * @param name Table name. As this is stored in a MSysObjects, it has to be passed in - * @param database - * @param firstDefinitionPage The first page of the table definition referenced in the corresponding MSysObject - */ - constructor(name: string, database: Database, firstDefinitionPage: number) { - this.#name = name; - this.#database = database; - this.#firstDefinitionPage = firstDefinitionPage; - - // Concat all table definition pages - let nextDefinitionPage = this.#firstDefinitionPage; - let buffer: Buffer | undefined; - while (nextDefinitionPage > 0) { - const curBuffer = this.#database.getPage(nextDefinitionPage); - assertPageType(curBuffer, PageType.TableDefinition); - - if (!buffer) { - buffer = curBuffer; - } else { - buffer = Buffer.concat([buffer, curBuffer.slice(8)]); - } - - nextDefinitionPage = curBuffer.readUInt32LE(4); - } if (!buffer) { - throw new Error("Could not find table definition page"); + buffer = curBuffer; + } else { + buffer = Buffer.concat([buffer, curBuffer.slice(8)]); } - this.#definitionBuffer = buffer; - // Read row, column, and index counts - this.#rowCount = this.#definitionBuffer.readUInt32LE(this.#database.format.tableDefinitionPage.rowCountOffset); - - this.#columnCount = this.#definitionBuffer.readUInt16LE(this.#database.format.tableDefinitionPage.columnCountOffset); - this.#variableColumnCount = this.#definitionBuffer.readUInt16LE( - this.#database.format.tableDefinitionPage.variableColumnCountOffset - ); - // this.#fixedColumnCount = this.#columnCount - this.#variableColumnCount; - - // this.#logicalIndexCount = this.#definitionBuffer.readInt32LE( - // this.#database.format.tableDefinitionPage.logicalIndexCountOffset - // ); - this.#realIndexCount = this.#definitionBuffer.readInt32LE( - this.#database.format.tableDefinitionPage.realIndexCountOffset - ); - - // Usage Map - const usageMapBuffer = this.#database.findPageRow( - this.#definitionBuffer.readUInt32LE(this.#database.format.tableDefinitionPage.usageMapOffset) - ); - this.#dataPages = findMapPages(usageMapBuffer, this.#database); + nextDefinitionPage = curBuffer.readUInt32LE(4); } - - get name(): string { - return this.#name; + if (!buffer) { + throw new Error("Could not find table definition page"); } + const definitionBuffer = buffer; - get rowCount(): number { - return this.#rowCount; - } + // Read row, column, and index counts + const rowCount = definitionBuffer.readUInt32LE(database.format.tableDefinitionPage.rowCountOffset); - get columnCount(): number { - return this.#columnCount; - } + const columnCount = definitionBuffer.readUInt16LE(database.format.tableDefinitionPage.columnCountOffset); + const variableColumnCount = definitionBuffer.readUInt16LE(database.format.tableDefinitionPage.variableColumnCountOffset); + // const fixedColumnCount = columnCount - variableColumnCount; - /** - * Returns a column definition by its name. - * - * @param name Name of the column. Case sensitive. - */ - getColumn(name: string): Column { - const column = this.getColumns().find((c) => c.name === name); + // const logicalIndexCount = definitionBuffer.readInt32LE(database.format.tableDefinitionPage.logicalIndexCountOffset); + const realIndexCount = definitionBuffer.readInt32LE(database.format.tableDefinitionPage.realIndexCountOffset); + + // Usage Map + const usageMapBuffer = await database.findPageRow( + definitionBuffer.readUInt32LE(database.format.tableDefinitionPage.usageMapOffset) + ); + const dataPages = await findMapPages(usageMapBuffer, database); + + function getColumn(name: string): Column { + const column = getColumns().find((c) => c.name === name); if (column === undefined) { throw new Error(`Could not find column with name ${name}`); @@ -115,8 +58,13 @@ export class Table { /** * Returns an ordered array of all column definitions. */ - getColumns(): Column[] { - const columnDefinitions = this.#getColumnDefinitions(); + function getColumns(): Column[] { + const columnDefinitions = getColumnDefinitions({ + database, + realIndexCount, + columnCount, + definitionBuffer, + }); columnDefinitions.sort((a, b) => a.index - b.index); @@ -124,76 +72,8 @@ export class Table { return columnDefinitions.map(({ index, variableIndex, fixedIndex, ...rest }) => rest); } - #getColumnDefinitions(): ColumnDefinition[] { - const columns: ColumnDefinition[] = []; - - let curDefinitionPos = - this.#database.format.tableDefinitionPage.realIndexStartOffset + - this.#realIndexCount * this.#database.format.tableDefinitionPage.realIndexEntrySize; - - let namesCursorPos = - curDefinitionPos + this.#columnCount * this.#database.format.tableDefinitionPage.columnsDefinition.entrySize; - - for (let i = 0; i < this.#columnCount; ++i) { - const columnBuffer = this.#definitionBuffer.slice( - curDefinitionPos, - curDefinitionPos + this.#database.format.tableDefinitionPage.columnsDefinition.entrySize - ); - - const type = getColumnType( - this.#definitionBuffer.readUInt8( - curDefinitionPos + this.#database.format.tableDefinitionPage.columnsDefinition.typeOffset - ) - ); - - const nameLength = this.#definitionBuffer.readUIntLE( - namesCursorPos, - this.#database.format.tableDefinitionPage.columnNames.nameLengthSize - ); - namesCursorPos += this.#database.format.tableDefinitionPage.columnNames.nameLengthSize; - const name = uncompressText( - this.#definitionBuffer.slice(namesCursorPos, namesCursorPos + nameLength), - this.#database.format - ); - namesCursorPos += nameLength; - - const column: ColumnDefinition = { - name, - type, - index: columnBuffer.readUInt8(this.#database.format.tableDefinitionPage.columnsDefinition.indexOffset), - variableIndex: columnBuffer.readUInt8( - this.#database.format.tableDefinitionPage.columnsDefinition.variableIndexOffset - ), - size: - type === ColumnTypes.Boolean - ? 0 - : columnBuffer.readUInt16LE(this.#database.format.tableDefinitionPage.columnsDefinition.sizeOffset), - fixedIndex: columnBuffer.readUInt16LE( - this.#database.format.tableDefinitionPage.columnsDefinition.fixedIndexOffset - ), - ...parseColumnFlags( - columnBuffer.readUInt8(this.#database.format.tableDefinitionPage.columnsDefinition.flagsOffset) - ), - }; - - if (type === ColumnTypes.Numeric) { - column.precision = columnBuffer.readUInt8(11); - column.scale = columnBuffer.readUInt8(12); - } - - columns.push(column); - - curDefinitionPos += this.#database.format.tableDefinitionPage.columnsDefinition.entrySize; - } - - return columns; - } - - /** - * Returns an ordered array of all column names. - */ - getColumnNames(): string[] { - return this.getColumns().map((column) => column.name); + function getColumnNames(): string[] { + return getColumns().map((column) => column.name); } /** @@ -203,7 +83,7 @@ export class Table { * @param rowOffset Index of the first row to be returned. 0-based. Defaults to 0. * @param rowLimit Maximum number of rows to be returned. Defaults to Infinity. */ - getData( + async function getData( options: | { columns?: ReadonlyArray | undefined; @@ -211,8 +91,14 @@ export class Table { rowLimit?: number | undefined; } | undefined = {} - ): TRow[] { - const columnDefinitions = this.#getColumnDefinitions(); + ): Promise { + + const columnDefinitions = getColumnDefinitions({ + database, + realIndexCount, + columnCount, + definitionBuffer, + }); const data = []; @@ -221,14 +107,14 @@ export class Table { let rowsToSkip = options?.rowOffset ?? 0; let rowsToRead = options?.rowLimit ?? Infinity; - for (const dataPage of this.#dataPages) { + for (const dataPage of dataPages) { if (rowsToRead <= 0) { // All required data was loaded break; } - const pageBuffer = this.#getDataPage(dataPage); - const recordOffsets = this.#getRecordOffsets(pageBuffer); + const pageBuffer = await getDataPage(name, database, firstDefinitionPage, dataPage); + const recordOffsets = getRecordOffsets(database, pageBuffer); if (recordOffsets.length <= rowsToSkip) { // All records can be skipped @@ -237,7 +123,13 @@ export class Table { } const recordOffsetsToLoad = recordOffsets.slice(rowsToSkip, rowsToSkip + rowsToRead); - const recordsOnPage = this.#getDataFromPage(pageBuffer, recordOffsetsToLoad, columns); + const recordsOnPage = await getDataFromPage( + database, + variableColumnCount, + pageBuffer, + recordOffsetsToLoad, + columns + ); data.push(...recordsOnPage); @@ -248,146 +140,19 @@ export class Table { return data as TRow[]; } - #getDataPage(page: number) { - const pageBuffer = this.#database.getPage(page); - assertPageType(pageBuffer, PageType.DataPage); - - if (pageBuffer.readUInt32LE(4) !== this.#firstDefinitionPage) { - throw new Error(`Data page ${page} does not belong to table ${this.#name}`); - } - - return pageBuffer; - } - - #getRecordOffsets(pageBuffer: Buffer): RecordOffset[] { - const recordCount = pageBuffer.readUInt16LE(this.#database.format.dataPage.recordCountOffset); - const recordOffsets: RecordOffset[] = []; - for (let record = 0; record < recordCount; ++record) { - const offsetMask = 0x1fff; - - let recordStart = pageBuffer.readUInt16LE(this.#database.format.dataPage.record.countOffset + 2 + record * 2); - if (recordStart & 0x4000) { - // deleted record - continue; - } - recordStart &= offsetMask; // remove flags - - const nextStart = - record === 0 - ? this.#database.format.pageSize - : pageBuffer.readUInt16LE(this.#database.format.dataPage.record.countOffset + record * 2) & offsetMask; - const recordLength = nextStart - recordStart; - const recordEnd = recordStart + recordLength - 1; - - recordOffsets.push([recordStart, recordEnd]); - } - return recordOffsets; - } - - #getDataFromPage( - pageBuffer: Buffer, - recordOffsets: RecordOffset[], - columns: ReadonlyArray - ): { [column: string]: Value }[] { - const lastColumnIndex = Math.max(...columns.map((c) => c.index), 0); - const data: { [column: string]: Value }[] = []; - for (const [recordStart, recordEnd] of recordOffsets) { - const rowColumnCount = pageBuffer.readUIntLE(recordStart, this.#database.format.dataPage.record.columnCountSize); - - const bitmaskSize = roundToFullByte(rowColumnCount); - - let rowVariableColumnCount = 0; - const variableColumnOffsets: number[] = []; - if (this.#variableColumnCount > 0) { - switch (this.#database.format.dataPage.record.variableColumnCountSize) { - case 1: { - rowVariableColumnCount = pageBuffer.readUInt8(recordEnd - bitmaskSize); - - // https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/write.c#L125-L147 - const recordLength = recordEnd - recordStart + 1; - let jumpCount = Math.floor((recordLength - 1) / 256); - const columnPointer = recordEnd - bitmaskSize - jumpCount - 1; - - /* If last jump is a dummy value, ignore it */ - if ((columnPointer - recordStart - rowVariableColumnCount) / 256 < jumpCount) { - --jumpCount; - } - - let jumpsUsed = 0; - for (let i = 0; i < rowVariableColumnCount + 1; ++i) { - while ( - jumpsUsed < jumpCount && - i === pageBuffer.readUInt8(recordEnd - bitmaskSize - jumpsUsed - 1) - ) { - ++jumpsUsed; - } - variableColumnOffsets.push(pageBuffer.readUInt8(columnPointer - i) + jumpsUsed * 256); - } - break; - } - case 2: { - rowVariableColumnCount = pageBuffer.readUInt16LE(recordEnd - bitmaskSize - 1); - - // https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/write.c#L115-L124 - for (let i = 0; i < rowVariableColumnCount + 1; ++i) { - variableColumnOffsets.push(pageBuffer.readUInt16LE(recordEnd - bitmaskSize - 3 - i * 2)); - } - break; - } - } - } - - const rowFixedColumnCount = rowColumnCount - rowVariableColumnCount; - - const nullMask = pageBuffer.slice( - recordEnd - bitmaskSize + 1, - recordEnd - bitmaskSize + 1 + roundToFullByte(lastColumnIndex + 1) - ); - let fixedColumnsFound = 0; - - const recordValues: { [column: string]: Value } = {}; - for (const column of [...columns].sort((a, b) => a.index - b.index)) { - /** - * undefined = will be set later. Undefined will never be returned to the user. - * null = actually null - */ - let value: Value | undefined = undefined; - let start: number; - let size: number; - - if (!getBitmapValue(nullMask, column.index)) { - value = null; - } - - if (column.fixedLength && fixedColumnsFound < rowFixedColumnCount) { - const colStart = column.fixedIndex + this.#database.format.dataPage.record.columnCountSize; - start = recordStart + colStart; - size = column.size; - ++fixedColumnsFound; - } else if (!column.fixedLength && column.variableIndex < rowVariableColumnCount) { - const colStart = variableColumnOffsets[column.variableIndex]!; - start = recordStart + colStart; - size = variableColumnOffsets[column.variableIndex + 1]! - colStart; - } else { - start = 0; - value = null; - size = 0; - } - - if (column.type === ColumnTypes.Boolean) { - value = value === undefined; - } else if (value !== null) { - value = readFieldValue(pageBuffer.slice(start, start + size), column, this.#database); - } - - recordValues[column.name] = value as Value; - } - - data.push(recordValues); - } - - return data; - } + return { + get name(): string { + return name; + }, + get rowCount(): number { + return rowCount; + }, + get columnCount(): number { + return columnCount; + }, + getColumn, + getColumns, + getColumnNames, + getData, + }; } - -type RecordOffset = [start: number, end: number]; diff --git a/src/codec-handler/handlers/office/agile/index.ts b/src/codec-handler/handlers/office/agile/index.ts index 11823550..ab6e9003 100644 --- a/src/codec-handler/handlers/office/agile/index.ts +++ b/src/codec-handler/handlers/office/agile/index.ts @@ -13,19 +13,20 @@ const ENC_VALUE_BLOCK = [0x14, 0x6e, 0x0b, 0xe7, 0xab, 0xac, 0xd0, 0xd6]; export function createAgileCodecHandler(encodingKey: Buffer, encryptionProvider: Buffer, password: Buffer): CodecHandler { const { keyData, passwordKeyEncryptor } = parseEncryptionDescriptor(encryptionProvider); - const key = decryptKeyValue(password, passwordKeyEncryptor); - const decryptPage: DecryptPage = (b, pageNumber) => { + const decryptPage: DecryptPage = async (b, pageNumber) => { + const key = await decryptKeyValue(password, passwordKeyEncryptor); + const pageEncodingKey = getPageEncodingKey(encodingKey, pageNumber); - const iv = hash(keyData.hash.algorithm, [keyData.salt, pageEncodingKey], keyData.blockSize); + const iv = await hash(keyData.hash.algorithm, [keyData.salt, pageEncodingKey], keyData.blockSize); - return blockDecrypt(keyData.cipher, key, iv, b); + return await blockDecrypt(keyData.cipher, key, iv, b); }; - const verifyPassword: VerifyPassword = () => { - const verifier = decryptVerifierHashInput(password, passwordKeyEncryptor); - const verifierHash = decryptVerifierHashValue(password, passwordKeyEncryptor); + const verifyPassword: VerifyPassword = async () => { + const verifier = await decryptVerifierHashInput(password, passwordKeyEncryptor); + const verifierHash = await decryptVerifierHashValue(password, passwordKeyEncryptor); - let testHash = hash(passwordKeyEncryptor.hash.algorithm, [verifier]); + let testHash = await hash(passwordKeyEncryptor.hash.algorithm, [verifier]); const blockSize = passwordKeyEncryptor.blockSize; if (testHash.length % blockSize != 0) { @@ -42,8 +43,8 @@ export function createAgileCodecHandler(encodingKey: Buffer, encryptionProvider: }; } -function decryptKeyValue(password: Buffer, passwordKeyEncryptor: PasswordKeyEncryptor): Buffer { - const key = deriveKey( +async function decryptKeyValue(password: Buffer, passwordKeyEncryptor: PasswordKeyEncryptor): Promise { + const key = await deriveKey( password, Buffer.from(ENC_VALUE_BLOCK), passwordKeyEncryptor.hash.algorithm, @@ -52,7 +53,7 @@ function decryptKeyValue(password: Buffer, passwordKeyEncryptor: PasswordKeyEncr roundToFullByte(passwordKeyEncryptor.keyBits) ); - return blockDecrypt( + return await blockDecrypt( passwordKeyEncryptor.cipher, key, passwordKeyEncryptor.salt, @@ -60,8 +61,8 @@ function decryptKeyValue(password: Buffer, passwordKeyEncryptor: PasswordKeyEncr ); } -function decryptVerifierHashInput(password: Buffer, passwordKeyEncryptor: PasswordKeyEncryptor): Buffer { - const key = deriveKey( +async function decryptVerifierHashInput(password: Buffer, passwordKeyEncryptor: PasswordKeyEncryptor): Promise { + const key = await deriveKey( password, Buffer.from(ENC_VERIFIER_INPUT_BLOCK), passwordKeyEncryptor.hash.algorithm, @@ -70,7 +71,7 @@ function decryptVerifierHashInput(password: Buffer, passwordKeyEncryptor: Passwo roundToFullByte(passwordKeyEncryptor.keyBits) ); - return blockDecrypt( + return await blockDecrypt( passwordKeyEncryptor.cipher, key, passwordKeyEncryptor.salt, @@ -78,8 +79,8 @@ function decryptVerifierHashInput(password: Buffer, passwordKeyEncryptor: Passwo ); } -function decryptVerifierHashValue(password: Buffer, passwordKeyEncryptor: PasswordKeyEncryptor): Buffer { - const key = deriveKey( +async function decryptVerifierHashValue(password: Buffer, passwordKeyEncryptor: PasswordKeyEncryptor): Promise { + const key = await deriveKey( password, Buffer.from(ENC_VERIFIER_VALUE_BLOCK), passwordKeyEncryptor.hash.algorithm, @@ -88,7 +89,7 @@ function decryptVerifierHashValue(password: Buffer, passwordKeyEncryptor: Passwo roundToFullByte(passwordKeyEncryptor.keyBits) ); - return blockDecrypt( + return await blockDecrypt( passwordKeyEncryptor.cipher, key, passwordKeyEncryptor.salt, diff --git a/src/codec-handler/handlers/office/rc4-cryptoapi.ts b/src/codec-handler/handlers/office/rc4-cryptoapi.ts index 7ad39001..a6a09ff1 100644 --- a/src/codec-handler/handlers/office/rc4-cryptoapi.ts +++ b/src/codec-handler/handlers/office/rc4-cryptoapi.ts @@ -21,17 +21,17 @@ export function createRC4CryptoAPICodecHandler( const encryptionHeader = parseEncryptionHeader(headerBuffer, VALID_CRYPTO_ALGORITHMS, VALID_HASH_ALGORITHMS); const encryptionVerifier = parseEncryptionVerifier(encryptionProvider, encryptionHeader.cryptoAlgorithm); - const baseHash = hash("sha1", [encryptionVerifier.salt, password]); - - const decryptPage: DecryptPage = (pageBuffer, pageIndex) => { + const decryptPage: DecryptPage = async (pageBuffer, pageIndex) => { + const baseHash = await hash("sha1", [encryptionVerifier.salt, password]); const pageEncodingKey = getPageEncodingKey(encodingKey, pageIndex); - const encryptionKey = getEncryptionKey(encryptionHeader, baseHash, pageEncodingKey); + const encryptionKey = await getEncryptionKey(encryptionHeader, baseHash, pageEncodingKey); return decryptRC4(encryptionKey, pageBuffer); }; return { decryptPage, - verifyPassword: () => { - const encryptionKey = getEncryptionKey(encryptionHeader, baseHash, intToBuffer(0)); + verifyPassword: async () => { + const baseHash = await hash("sha1", [encryptionVerifier.salt, password]); + const encryptionKey = await getEncryptionKey(encryptionHeader, baseHash, intToBuffer(0)); const rc4Decrypter = createRC4Decrypter(encryptionKey); @@ -41,15 +41,15 @@ export function createRC4CryptoAPICodecHandler( encryptionVerifier.encryptionVerifierHashSize ); - const testHash = fixBufferLength(hash("sha1", [verifier]), encryptionVerifier.encryptionVerifierHashSize); + const testHash = fixBufferLength(await hash("sha1", [verifier]), encryptionVerifier.encryptionVerifierHashSize); return verifierHash.equals(testHash); }, }; } -function getEncryptionKey(header: EncryptionHeader, baseHash: Buffer, data: Buffer): Buffer { - const key = hash("sha1", [baseHash, data], roundToFullByte(header.keySize)); +async function getEncryptionKey(header: EncryptionHeader, baseHash: Buffer, data: Buffer): Promise { + const key = await hash("sha1", [baseHash, data], roundToFullByte(header.keySize)); if (header.keySize === 40) { return key.slice(0, roundToFullByte(128)); } diff --git a/src/codec-handler/types.ts b/src/codec-handler/types.ts index 19508707..764c97de 100644 --- a/src/codec-handler/types.ts +++ b/src/codec-handler/types.ts @@ -3,5 +3,5 @@ export interface CodecHandler { verifyPassword: VerifyPassword; } -export type DecryptPage = (pageBuffer: Buffer, pageIndex: number) => Buffer; -export type VerifyPassword = () => boolean; +export type DecryptPage = (pageBuffer: Buffer, pageIndex: number) => Promise | Buffer; +export type VerifyPassword = () => Promise | boolean; diff --git a/src/crypto/blockDecrypt.ts b/src/crypto/blockDecrypt.ts index a388254e..b86f461a 100644 --- a/src/crypto/blockDecrypt.ts +++ b/src/crypto/blockDecrypt.ts @@ -1,9 +1,9 @@ -import { createDecipheriv } from "../environment/index.js"; +import { webcrypto } from "../environment/index.js"; import type { Cipher } from "./types.js"; -export function blockDecrypt(cipher: Cipher, key: Buffer, iv: Buffer, data: Buffer): Buffer { - const algorithm = `${cipher.algorithm}-${key.length * 8}-${cipher.chaining.slice(-3)}`; - const decipher = createDecipheriv(algorithm, key, iv); - decipher.setAutoPadding(false); - return decipher.update(data); +export async function blockDecrypt(cipher: Cipher, key: Buffer, iv: Buffer, encryptedData: Buffer): Promise { + const algorithm = `${cipher.algorithm}-${cipher.chaining.slice(-3)}`.toUpperCase(); + const importedKey = await webcrypto.subtle.importKey("raw", key, algorithm, false, ["decrypt"]); + const result = await webcrypto.subtle.decrypt({ name: algorithm, iv }, importedKey, encryptedData); + return Buffer.from(result); } diff --git a/src/crypto/deriveKey.ts b/src/crypto/deriveKey.ts index 99f6928b..e05d4d58 100644 --- a/src/crypto/deriveKey.ts +++ b/src/crypto/deriveKey.ts @@ -1,27 +1,24 @@ import { fixBufferLength, intToBuffer } from "../util.js"; import { hash } from "./hash.js"; -/** - * Can probably be replaced with `crypto.webcrypto.subtle.derivekey(...)` once node 16 support is dropped - */ -export function deriveKey( +export async function deriveKey( password: Buffer, blockBytes: Buffer, algorithm: string, salt: Buffer, iterations: number, keyByteLength: number -): Buffer { - const baseHash = hash(algorithm, [salt, password]); - const iterHash = iterateHash(algorithm, baseHash, iterations); - const finalHash = hash(algorithm, [iterHash, blockBytes]); +): Promise { + const baseHash = await hash(algorithm, [salt, password]); + const iterHash = await iterateHash(algorithm, baseHash, iterations); + const finalHash = await hash(algorithm, [iterHash, blockBytes]); return fixBufferLength(finalHash, keyByteLength, 0x36); } -function iterateHash(algorithm: string, baseBuffer: Buffer, iterations: number): Buffer { +async function iterateHash(algorithm: string, baseBuffer: Buffer, iterations: number): Promise { let iterHash = baseBuffer; for (let i = 0; i < iterations; ++i) { - iterHash = hash(algorithm, [intToBuffer(i), iterHash]); + iterHash = await hash(algorithm, [intToBuffer(i), iterHash]); } return iterHash; } diff --git a/src/crypto/hash.ts b/src/crypto/hash.ts index 935772c5..3cc0ce28 100644 --- a/src/crypto/hash.ts +++ b/src/crypto/hash.ts @@ -1,16 +1,26 @@ -import { createHash } from "../environment/index.js"; +import { webcrypto } from "../environment/index.js"; import { fixBufferLength } from "../util.js"; -export function hash(algorithm: string, buffers: Buffer[], length?: number): Buffer { - const digest = createHash(algorithm); +const algorithmMap: Record = { + sha1: "SHA-1", + sha256: "SHA-256", + sha384: "SHA-384", + sha512: "SHA-512", +}; - for (const buffer of buffers) { - digest.update(buffer); +export async function hash(algorithm: string, buffers: Buffer[], length?: number): Promise { + const webcryptoAlgorithm = algorithmMap[algorithm.toLowerCase()]; + if (!webcryptoAlgorithm) { + throw new Error(`Unknown hashing algorithm: "${algorithm}"`); } - const result = digest.digest(); - if (length !== undefined) { - return fixBufferLength(result, length); + const concatBuffer = Buffer.concat(buffers); + const result = await webcrypto.subtle.digest(webcryptoAlgorithm, concatBuffer); + const resultAsBuffer = Buffer.from(result); + + if (length === undefined) { + return resultAsBuffer; } - return result; + + return fixBufferLength(resultAsBuffer, length); } diff --git a/src/data/index.ts b/src/data/index.ts index c879c160..f609e3a5 100644 --- a/src/data/index.ts +++ b/src/data/index.ts @@ -19,7 +19,7 @@ import { readText } from "./text.js"; const readFnByColType: { [type in Exclude]: - | ((buffer: Buffer, column: Column, database: Database) => ValueMap[type]) + | ((buffer: Buffer, column: Column, database: Database) => ValueMap[type] | Promise) | undefined; } = { [ColumnTypes.BigInt]: readBigInt, @@ -40,7 +40,11 @@ const readFnByColType: { [ColumnTypes.RepID]: readRepID, }; -export function readFieldValue(buffer: Buffer, column: ColumnDefinition, database: Database): Value | undefined { +export async function readFieldValue( + buffer: Buffer, + column: ColumnDefinition, + database: Database +): Promise { if (column.type === ColumnTypes.Boolean) { throw new Error("readFieldValue does not handle type boolean"); } @@ -50,5 +54,5 @@ export function readFieldValue(buffer: Buffer, column: ColumnDefinition, databas return `Column type ${column.type} is currently not supported`; } - return read(buffer, column, database); + return await read(buffer, column, database); } diff --git a/src/data/memo.ts b/src/data/memo.ts index 3bbdc265..6b57e4d1 100644 --- a/src/data/memo.ts +++ b/src/data/memo.ts @@ -9,7 +9,7 @@ const TYPE_OTHER_PAGES = 0x00; /** * @see https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/data.c#L690-L776 */ -export function readMemo(buffer: Buffer, _col: Column, database: Database): string { +export async function readMemo(buffer: Buffer, _col: Column, database: Database): Promise { const memoLength = buffer.readUIntLE(0, 3); const type = buffer.readUInt8(3); @@ -21,7 +21,7 @@ export function readMemo(buffer: Buffer, _col: Column, database: Database): stri case TYPE_OTHER_PAGE: { const pageRow = buffer.readUInt32LE(4); - const rowBuffer = database.findPageRow(pageRow); + const rowBuffer = await database.findPageRow(pageRow); const compressedText = rowBuffer.slice(0, memoLength); return uncompressText(compressedText, database.format); } @@ -30,7 +30,7 @@ export function readMemo(buffer: Buffer, _col: Column, database: Database): stri let pageRow = buffer.readInt32LE(4); let memoDataBuffer = Buffer.alloc(0); do { - const rowBuffer = database.findPageRow(pageRow); + const rowBuffer = await database.findPageRow(pageRow); if (memoDataBuffer.length + rowBuffer.length - 4 > memoLength) { break; diff --git a/src/data/ole.ts b/src/data/ole.ts index 21d26b1f..9eb3eaa4 100644 --- a/src/data/ole.ts +++ b/src/data/ole.ts @@ -4,7 +4,7 @@ import { Database } from "../Database.js"; /** * @see https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/data.c#L626-L688 */ -export function readOLE(buffer: Buffer, _col: Column, database: Database): Buffer { +export async function readOLE(buffer: Buffer, _col: Column, database: Database): Promise { const memoLength = buffer.readUIntLE(0, 3); const bitmask = buffer.readUInt8(3); @@ -15,7 +15,7 @@ export function readOLE(buffer: Buffer, _col: Column, database: Database): Buffe } else if (bitmask & 0x40) { // single page const pageRow = buffer.readUInt32LE(4); - const rowBuffer = database.findPageRow(pageRow); + const rowBuffer = await database.findPageRow(pageRow); return rowBuffer.slice(0, memoLength); } else if (bitmask === 0) { // multi page @@ -23,7 +23,7 @@ export function readOLE(buffer: Buffer, _col: Column, database: Database): Buffe let memoDataBuffer = Buffer.alloc(0); do { - const rowBuffer = database.findPageRow(pageRow); + const rowBuffer = await database.findPageRow(pageRow); if (memoDataBuffer.length + rowBuffer.length - 4 > memoLength) { break; diff --git a/src/environment/browser.ts b/src/environment/browser.ts index e3529589..fff96445 100644 --- a/src/environment/browser.ts +++ b/src/environment/browser.ts @@ -1,3 +1,2 @@ -import browserifyAES from "browserify-aes/browser.js"; -export const createDecipheriv = browserifyAES.createDecipheriv; -export { default as createHash } from "create-hash"; +// @ts-expect-error "Element implicitly has an 'any' type because type 'typeof globalThis' has no index signature." +export const webcrypto = globalThis.crypto; diff --git a/src/environment/node.ts b/src/environment/node.ts index c9680eba..7f37bcd6 100644 --- a/src/environment/node.ts +++ b/src/environment/node.ts @@ -1 +1 @@ -export { createDecipheriv, createHash } from "crypto"; +export { createHash, webcrypto } from "node:crypto"; diff --git a/src/index.ts b/src/index.ts index bec77ce3..45d8778a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,3 @@ export type { Column } from "./column.js"; export { default, type Options } from "./MDBReader.js"; -export type { Table } from "./Table.js"; export * from "./types.js"; diff --git a/src/table/getColumnDefinitions.ts b/src/table/getColumnDefinitions.ts new file mode 100644 index 00000000..d74c6eda --- /dev/null +++ b/src/table/getColumnDefinitions.ts @@ -0,0 +1,69 @@ +import { Database } from "../Database.js"; +import { type ColumnDefinition, getColumnType, parseColumnFlags } from "../column.js"; +import { ColumnTypes } from "../types.js"; +import { uncompressText } from "../unicodeCompression.js"; + +interface Options { + database: Database; + realIndexCount: number; + columnCount: number; + definitionBuffer: Buffer; +} + +export function getColumnDefinitions({ + database, + realIndexCount, + columnCount, + definitionBuffer, +}: Options): ColumnDefinition[] { + const columns: ColumnDefinition[] = []; + + let curDefinitionPos = + database.format.tableDefinitionPage.realIndexStartOffset + + realIndexCount * database.format.tableDefinitionPage.realIndexEntrySize; + + let namesCursorPos = curDefinitionPos + columnCount * database.format.tableDefinitionPage.columnsDefinition.entrySize; + + for (let i = 0; i < columnCount; ++i) { + const columnBuffer = definitionBuffer.slice( + curDefinitionPos, + curDefinitionPos + database.format.tableDefinitionPage.columnsDefinition.entrySize + ); + + const type = getColumnType( + definitionBuffer.readUInt8(curDefinitionPos + database.format.tableDefinitionPage.columnsDefinition.typeOffset) + ); + + const nameLength = definitionBuffer.readUIntLE( + namesCursorPos, + database.format.tableDefinitionPage.columnNames.nameLengthSize + ); + namesCursorPos += database.format.tableDefinitionPage.columnNames.nameLengthSize; + const name = uncompressText(definitionBuffer.slice(namesCursorPos, namesCursorPos + nameLength), database.format); + namesCursorPos += nameLength; + + const column: ColumnDefinition = { + name, + type, + index: columnBuffer.readUInt8(database.format.tableDefinitionPage.columnsDefinition.indexOffset), + variableIndex: columnBuffer.readUInt8(database.format.tableDefinitionPage.columnsDefinition.variableIndexOffset), + size: + type === ColumnTypes.Boolean + ? 0 + : columnBuffer.readUInt16LE(database.format.tableDefinitionPage.columnsDefinition.sizeOffset), + fixedIndex: columnBuffer.readUInt16LE(database.format.tableDefinitionPage.columnsDefinition.fixedIndexOffset), + ...parseColumnFlags(columnBuffer.readUInt8(database.format.tableDefinitionPage.columnsDefinition.flagsOffset)), + }; + + if (type === ColumnTypes.Numeric) { + column.precision = columnBuffer.readUInt8(11); + column.scale = columnBuffer.readUInt8(12); + } + + columns.push(column); + + curDefinitionPos += database.format.tableDefinitionPage.columnsDefinition.entrySize; + } + + return columns; +} diff --git a/src/table/getDataFromPage.ts b/src/table/getDataFromPage.ts new file mode 100644 index 00000000..b36faece --- /dev/null +++ b/src/table/getDataFromPage.ts @@ -0,0 +1,113 @@ +import { Database } from "../Database.js"; +import type { ColumnDefinition } from "../column.js"; +import { readFieldValue } from "../data/index.js"; +import { ColumnTypes, type Value } from "../types.js"; +import { getBitmapValue, roundToFullByte } from "../util.js"; + +type RecordOffset = [start: number, end: number]; +export async function getDataFromPage( + database: Database, + variableColumnCount: number, + pageBuffer: Buffer, + recordOffsets: RecordOffset[], + columns: ReadonlyArray +): Promise<{ [column: string]: Value }[]> { + const lastColumnIndex = Math.max(...columns.map((c) => c.index), 0); + const data: { [column: string]: Value }[] = []; + for (const [recordStart, recordEnd] of recordOffsets) { + const rowColumnCount = pageBuffer.readUIntLE(recordStart, database.format.dataPage.record.columnCountSize); + + const bitmaskSize = roundToFullByte(rowColumnCount); + + let rowVariableColumnCount = 0; + const variableColumnOffsets: number[] = []; + if (variableColumnCount > 0) { + switch (database.format.dataPage.record.variableColumnCountSize) { + case 1: { + rowVariableColumnCount = pageBuffer.readUInt8(recordEnd - bitmaskSize); + + // https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/write.c#L125-L147 + const recordLength = recordEnd - recordStart + 1; + let jumpCount = Math.floor((recordLength - 1) / 256); + const columnPointer = recordEnd - bitmaskSize - jumpCount - 1; + + /* If last jump is a dummy value, ignore it */ + if ((columnPointer - recordStart - rowVariableColumnCount) / 256 < jumpCount) { + --jumpCount; + } + + let jumpsUsed = 0; + for (let i = 0; i < rowVariableColumnCount + 1; ++i) { + while ( + jumpsUsed < jumpCount && + i === pageBuffer.readUInt8(recordEnd - bitmaskSize - jumpsUsed - 1) + ) { + ++jumpsUsed; + } + variableColumnOffsets.push(pageBuffer.readUInt8(columnPointer - i) + jumpsUsed * 256); + } + break; + } + case 2: { + rowVariableColumnCount = pageBuffer.readUInt16LE(recordEnd - bitmaskSize - 1); + + // https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/write.c#L115-L124 + for (let i = 0; i < rowVariableColumnCount + 1; ++i) { + variableColumnOffsets.push(pageBuffer.readUInt16LE(recordEnd - bitmaskSize - 3 - i * 2)); + } + break; + } + } + } + + const rowFixedColumnCount = rowColumnCount - rowVariableColumnCount; + + const nullMask = pageBuffer.slice( + recordEnd - bitmaskSize + 1, + recordEnd - bitmaskSize + 1 + roundToFullByte(lastColumnIndex + 1) + ); + let fixedColumnsFound = 0; + + const recordValues: { [column: string]: Value } = {}; + for (const column of [...columns].sort((a, b) => a.index - b.index)) { + /** + * undefined = will be set later. Undefined will never be returned to the user. + * null = actually null + */ + let value: Value | undefined = undefined; + let start: number; + let size: number; + + if (!getBitmapValue(nullMask, column.index)) { + value = null; + } + + if (column.fixedLength && fixedColumnsFound < rowFixedColumnCount) { + const colStart = column.fixedIndex + database.format.dataPage.record.columnCountSize; + start = recordStart + colStart; + size = column.size; + ++fixedColumnsFound; + } else if (!column.fixedLength && column.variableIndex < rowVariableColumnCount) { + const colStart = variableColumnOffsets[column.variableIndex]!; + start = recordStart + colStart; + size = variableColumnOffsets[column.variableIndex + 1]! - colStart; + } else { + start = 0; + value = null; + size = 0; + } + + if (column.type === ColumnTypes.Boolean) { + value = value === undefined; + } else if (value !== null) { + value = await readFieldValue(pageBuffer.slice(start, start + size), column, database); + } + + recordValues[column.name] = value as Value; + } + + data.push(recordValues); + } + + return data; +} diff --git a/src/table/getDataPage.ts b/src/table/getDataPage.ts new file mode 100644 index 00000000..7518eb21 --- /dev/null +++ b/src/table/getDataPage.ts @@ -0,0 +1,13 @@ +import { Database } from "../Database.js"; +import { PageType, assertPageType } from "../PageType.js"; + +export async function getDataPage(name: string, database: Database, firstDefinitionPage: number, page: number) { + const pageBuffer = await database.getPage(page); + assertPageType(pageBuffer, PageType.DataPage); + + if (pageBuffer.readUInt32LE(4) !== firstDefinitionPage) { + throw new Error(`Data page ${page} does not belong to table ${name}`); + } + + return pageBuffer; +} diff --git a/src/table/getRecordOffsets.ts b/src/table/getRecordOffsets.ts new file mode 100644 index 00000000..2e98b9f7 --- /dev/null +++ b/src/table/getRecordOffsets.ts @@ -0,0 +1,27 @@ +import { Database } from "../Database.js"; + +export type RecordOffset = [start: number, end: number]; +export function getRecordOffsets(database: Database, pageBuffer: Buffer): RecordOffset[] { + const recordCount = pageBuffer.readUInt16LE(database.format.dataPage.recordCountOffset); + const recordOffsets: RecordOffset[] = []; + for (let record = 0; record < recordCount; ++record) { + const offsetMask = 0x1fff; + + let recordStart = pageBuffer.readUInt16LE(database.format.dataPage.record.countOffset + 2 + record * 2); + if (recordStart & 0x4000) { + // deleted record + continue; + } + recordStart &= offsetMask; // remove flags + + const nextStart = + record === 0 + ? database.format.pageSize + : pageBuffer.readUInt16LE(database.format.dataPage.record.countOffset + record * 2) & offsetMask; + const recordLength = nextStart - recordStart; + const recordEnd = recordStart + recordLength - 1; + + recordOffsets.push([recordStart, recordEnd]); + } + return recordOffsets; +} diff --git a/src/types.ts b/src/types.ts index 44448136..f2879a02 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,3 +1,5 @@ +import type { Column } from "./column.js"; + export const ColumnTypes = { Boolean: "boolean", Byte: "byte", @@ -18,7 +20,7 @@ export const ColumnTypes = { DateTimeExtended: "datetimextended", } as const; -export type ColumnType = typeof ColumnTypes[keyof typeof ColumnTypes]; +export type ColumnType = (typeof ColumnTypes)[keyof typeof ColumnTypes]; export type ValueMap = { [ColumnTypes.Binary]: Buffer; @@ -46,3 +48,24 @@ export interface SortOrder { value: number; version: number; } + +export type Table = { + get name(): string; + get rowCount(): number; + get columnCount(): number; + + getColumn(name: string): Column; + getColumns(): Column[]; + getColumnNames(): string[]; + + getData< + TRow extends { + [column in TColumn]: Value; + }, + TColumn extends string = string + >(options?: { + columns?: ReadonlyArray; + rowOffset?: number; + rowLimit?: number; + }): Promise; +}; diff --git a/src/usage-map.ts b/src/usage-map.ts index 2c75c915..2264ef56 100644 --- a/src/usage-map.ts +++ b/src/usage-map.ts @@ -5,12 +5,12 @@ import { PageType, assertPageType } from "./PageType.js"; /** * @see https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/HACKING#L556-L622 */ -export function findMapPages(buffer: Buffer, database: Database): number[] { +export async function findMapPages(buffer: Buffer, database: Database): Promise { switch (buffer[0]) { case 0x00: return findMapPages0(buffer); case 0x01: - return findMapPages1(buffer, database); + return await findMapPages1(buffer, database); default: throw new Error("Unknown usage map type"); } @@ -28,7 +28,7 @@ function findMapPages0(buffer: Buffer): number[] { /** * @see https://github.com/brianb/mdbtools/blob/d6f5745d949f37db969d5f424e69b54f0da60b9b/src/libmdb/map.c#L44-L84 */ -function findMapPages1(buffer: Buffer, database: Database): number[] { +async function findMapPages1(buffer: Buffer, database: Database): Promise { const bitmapLength = (database.format.pageSize - 4) * 8; const mapCount = Math.floor((buffer.length - 1) / 4); @@ -39,7 +39,7 @@ function findMapPages1(buffer: Buffer, database: Database): number[] { continue; } - const pageBuffer = database.getPage(page); + const pageBuffer = await database.getPage(page); assertPageType(pageBuffer, PageType.PageUsageBitmaps); const bitmap = pageBuffer.slice(4); diff --git a/src/util.ts b/src/util.ts index 0b37a8a9..34854a5d 100644 --- a/src/util.ts +++ b/src/util.ts @@ -58,3 +58,15 @@ export function fixBufferLength(buffer: Buffer, length: number, padByte = 0): Bu export function isInRange(from: number, to: number, value: number): boolean { return from <= value && value <= to; } + +export function memoPromise(fn: () => Promise): () => Promise { + let promise: Promise | undefined; + + return async () => { + if (!promise) { + promise = fn(); + } + + return await promise; + }; +} diff --git a/test/bigint.spec.ts b/test/bigint.spec.ts index a53f1777..8c690617 100644 --- a/test/bigint.spec.ts +++ b/test/bigint.spec.ts @@ -17,10 +17,10 @@ describe("BigInt", () => { /** * @see https://github.com/jahlborn/jackcess/blob/3f75e95a21d9a9e3486519511cdd6178e3c2e3e4/src/test/java/com/healthmarketscience/jackcess/DatabaseTest.java#L471-L516 */ - it("getData(): returns correct big int data", () => { + it("getData(): returns correct big int data", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table"); - const rows = table.getData(); + const table = await reader.getTable("Table"); + const rows = await table.getData(); expect(rows.length).to.eq(1); diff --git a/test/currency.spec.ts b/test/currency.spec.ts index 9c9af19e..e7dfe102 100644 --- a/test/currency.spec.ts +++ b/test/currency.spec.ts @@ -14,10 +14,10 @@ describe("Currency", () => { buffer = readFileSync(path); }); - it("getData(): returns correct currency data", () => { + it("getData(): returns correct currency data",async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table"); - const rows = table.getData(); + const table = await reader.getTable("Table"); + const rows = await table.getData(); expect(rows.length).to.eq(1); diff --git a/test/empty.spec.ts b/test/empty.spec.ts index 312eae1e..c1cb5bb0 100644 --- a/test/empty.spec.ts +++ b/test/empty.spec.ts @@ -16,9 +16,9 @@ describe("empty", () => { buffer = readFileSync(filename); }); - it("should have no tables", () => { + it("should have no tables", async () => { const reader = new MDBReader(buffer); - const tableNames = reader.getTableNames(); + const tableNames = await reader.getTableNames(); expect(tableNames).to.deep.eq([]); }); }); diff --git a/test/encryption/encryption.spec.ts b/test/encryption/encryption.spec.ts index 63e6a210..34d5b3b4 100644 --- a/test/encryption/encryption.spec.ts +++ b/test/encryption/encryption.spec.ts @@ -20,7 +20,7 @@ describe("Encryption", () => { it("should be able to read a page", function () { this.timeout(5000); // node 20 in CI is slow const reader = new MDBReader(buffer, { password }); - expect(reader.getTableNames()).to.deep.eq(["Table1"]); + expect(reader.getTableNames()).to.eventually.deep.eq(["Table1"]); }); }); diff --git a/test/longtext.spec.ts b/test/longtext.spec.ts index dd422db2..0b0e41c6 100644 --- a/test/longtext.spec.ts +++ b/test/longtext.spec.ts @@ -4,11 +4,12 @@ import { resolve } from "path"; import MDBReader from "../src/index.js"; describe("LongText", () => { - it("multiple pages", () => { + it("multiple pages", async () => { const path = resolve("test/data/V2016/longtext.accdb"); const buffer = readFileSync(path); const reader = new MDBReader(buffer); - const data = reader.getTable("Table1").getData(); + const table = await reader.getTable("Table1"); + const data = await table.getData(); expect(data[0]!['LongText']).to.have.length(5000); }); }); diff --git a/test/numeric.spec.ts b/test/numeric.spec.ts index a514fc76..8b4d0e91 100644 --- a/test/numeric.spec.ts +++ b/test/numeric.spec.ts @@ -10,19 +10,19 @@ describe("Numeric", () => { (filename) => { const path = resolve("test/data", filename); - let buffer: Buffer + let buffer: Buffer; beforeEach(() => { buffer = readFileSync(path); }); - + /** * @see https://github.com/jahlborn/jackcess/blob/3f75e95a21d9a9e3486519511cdd6178e3c2e3e4/src/test/java/com/healthmarketscience/jackcess/DatabaseTest.java#L471-L516 - */ - it("getData(): returns correct numeric data", () => { + */ + it("getData(): returns correct numeric data", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("test"); - const rows = table.getData(); + const table = await reader.getTable("test"); + const rows = await table.getData(); expect(rows.length).to.eq(1); diff --git a/test/ole.spec.ts b/test/ole.spec.ts index 95ecd136..0b1c07fd 100644 --- a/test/ole.spec.ts +++ b/test/ole.spec.ts @@ -10,10 +10,10 @@ describe("OLE", () => { buffer = readFileSync(path); }); - it("reads ole data ", () => { + it("reads ole data ", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table1"); - table.getData(); + const table = await reader.getTable("Table1"); + await table.getData(); // TODO: check for correct values }); }); diff --git a/test/reader.spec.ts b/test/reader.spec.ts index 79ff9671..4bc1d757 100644 --- a/test/reader.spec.ts +++ b/test/reader.spec.ts @@ -2,7 +2,10 @@ import { resolve } from "path"; import { readFileSync } from "fs"; import MDBReader from "../src/index.js"; import forEach from "mocha-each"; -import { expect } from "chai"; +import chai from "chai"; +import chaiAsPromised from "chai-as-promised"; +chai.use(chaiAsPromised); +const { expect } = chai; describe("MDBReader", () => { forEach([ @@ -21,22 +24,22 @@ describe("MDBReader", () => { }); describe("getTable()", () => { - it("returns table", () => { + it("returns table", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table1"); + const table = await reader.getTable("Table1"); expect(table.name).to.eq("Table1"); expect(table.rowCount).to.eq(2); }); - it("throws error for unknown table", () => { + it("throws error for unknown table", async () => { const reader = new MDBReader(buffer); - expect(() => reader.getTable("unknown")).to.throw(); + await expect(reader.getTable("unknown")).to.eventually.be.rejected; }); }); - it("getTableNames()", () => { + it("getTableNames()", async () => { const reader = new MDBReader(buffer); - const tableNames = reader.getTableNames(); + const tableNames = await reader.getTableNames(); expect(tableNames).to.deep.eq(["Table1", "Table2", "Table3", "Table4"]); }); diff --git a/test/table.spec.ts b/test/table.spec.ts index b3379e3b..ca00a3a3 100644 --- a/test/table.spec.ts +++ b/test/table.spec.ts @@ -21,10 +21,10 @@ describe("Table", () => { buffer = readFileSync(path); }); - it("getData()", () => { + it("getData()", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table1"); - const rows = table.getData(); + const table = await reader.getTable("Table1"); + const rows = await table.getData(); if (reverseRows) { rows.reverse(); @@ -52,9 +52,9 @@ describe("Table", () => { }); describe("getColumns()", () => { - it("returns correct data types", () => { + it("returns correct data types", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table1"); + const table = await reader.getTable("Table1"); const columns = table.getColumns(); expect(columns[0]!.name).to.eq("A"); @@ -87,9 +87,9 @@ describe("Table", () => { expect(columns[8]!.type).to.eq("boolean"); }); - it("can handle many columns", () => { + it("can handle many columns", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table2"); + const table = await reader.getTable("Table2"); const columns = table.getColumns(); expect(columns.length).to.eq(89); @@ -101,10 +101,10 @@ describe("Table", () => { }); }); - it("getColumnNames()", () => { + it("getColumnNames()", async () => { const reader = new MDBReader(buffer); - const table = reader.getTable("Table1"); - const columnNames = table.getColumnNames(); + const table = await reader.getTable("Table1"); + const columnNames = await table.getColumnNames(); expect(columnNames).to.deep.eq(["A", "B", "C", "D", "E", "F", "G", "H", "I"]); }); @@ -115,54 +115,54 @@ describe("Table", () => { const path = resolve("test/data/real/ASampleDatabase.accdb"); let table: Table; - before(() => { + before(async () => { const buffer = readFileSync(path); const reader = new MDBReader(buffer); - table = reader.getTable("Asset Items"); + table = await reader.getTable("Asset Items"); }); - it("no options", () => { - const rows = table.getData(); + it("no options", async () => { + const rows = await table.getData(); expect(rows.length).to.eq(65); const assetNumbers = rows.map((row) => row["Asset No"]); expect(new Set(assetNumbers).size).to.eq(65); }); - it("with rowOffset", () => { - const rows = table.getData({ rowOffset: 30 }); + it("with rowOffset", async () => { + const rows = await table.getData({ rowOffset: 30 }); expect(rows.length).to.eq(35); const assetNumbers = rows.map((row) => row["Asset No"]); expect(new Set(assetNumbers).size).to.eq(35); }); - it("with rowOffset > rowCount", () => { - const rows = table.getData({ rowOffset: 100 }); + it("with rowOffset > rowCount", async () => { + const rows = await table.getData({ rowOffset: 100 }); expect(rows.length).to.eq(0); const assetNumbers = rows.map((row) => row["Asset No"]); expect(new Set(assetNumbers).size).to.eq(0); }); - it("with rowLimit", () => { - const rows = table.getData({ rowLimit: 40 }); + it("with rowLimit", async () => { + const rows = await table.getData({ rowLimit: 40 }); expect(rows.length).to.eq(40); const assetNumbers = rows.map((row) => row["Asset No"]); expect(new Set(assetNumbers).size).to.eq(40); }); - it("with rowLimit > rowCount", () => { - const rows = table.getData({ rowLimit: 100 }); + it("with rowLimit > rowCount", async () => { + const rows = await table.getData({ rowLimit: 100 }); expect(rows.length).to.eq(65); const assetNumbers = rows.map((row) => row["Asset No"]); expect(new Set(assetNumbers).size).to.eq(65); }); - it("with rowOffset & rowLimit", () => { - const rows = table.getData({ rowOffset: 30, rowLimit: 15 }); + it("with rowOffset & rowLimit", async () => { + const rows = await table.getData({ rowOffset: 30, rowLimit: 15 }); expect(rows.length).to.eq(15); const assetNumbers = rows.map((row) => row["Asset No"]); @@ -171,26 +171,26 @@ describe("Table", () => { }); describe("V2016/withdeletedcol.accdb", () => { - it("with offset column indices due to a column deletion", () => { + it("with offset column indices due to a column deletion", async () => { const withDeletedColPath = resolve("test/data/V2016/withdeletedcol.accdb"); const buffer = readFileSync(withDeletedColPath); const reader = new MDBReader(buffer); - const withDeletedColTable = reader.getTable("Table1"); + const withDeletedColTable = await reader.getTable("Table1"); - expect(withDeletedColTable.getData()).to.deep.eq([ + expect(await withDeletedColTable.getData()).to.deep.eq([ { col1: 0, col2: 1, col3: 2, col5: 4, col6: 5, col7: 6, col8: 7 }, ]); }); }); describe("V2016/withinsertedcol.accdb", () => { - it("with offset column indices due to a column insertion", () => { + it("with offset column indices due to a column insertion", async () => { const withInsertedColPath = resolve("test/data/V2016/withinsertedcol.accdb"); const buffer = readFileSync(withInsertedColPath); const reader = new MDBReader(buffer); - const withInsertedColTable = reader.getTable("Table1"); + const withInsertedColTable = await reader.getTable("Table1"); - expect(withInsertedColTable.getData()).to.deep.eq([{ col1: true, col2: true, col3: false }]); + expect(await withInsertedColTable.getData()).to.deep.eq([{ col1: true, col2: true, col3: false }]); }); }); }); diff --git a/tsconfig.browser.json b/tsconfig.browser.json index 5ccf6614..c34a7f61 100644 --- a/tsconfig.browser.json +++ b/tsconfig.browser.json @@ -7,5 +7,5 @@ "declaration": false, "verbatimModuleSyntax": true }, - "files": ["src/index.ts", "typings/browserify-aes.d.ts", "typings/create-hash.d.ts"] + "files": ["src/index.ts"] } diff --git a/typings/browserify-aes.d.ts b/typings/browserify-aes.d.ts deleted file mode 100644 index 850e2b5e..00000000 --- a/typings/browserify-aes.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -declare module "browserify-aes/browser.js" { - import { createDecipheriv as nodeCreateDecipheriv } from "crypto"; - - export const createDecipheriv: typeof nodeCreateDecipheriv; -} diff --git a/typings/create-hash.d.ts b/typings/create-hash.d.ts deleted file mode 100644 index 51d0b713..00000000 --- a/typings/create-hash.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -declare module "create-hash" { - import { createHash as nodeCreateHash } from "crypto"; - - const createHash: typeof nodeCreateHash; - export default createHash; -}