diff --git a/README.md b/README.md index 8da44abbc..1e48073a1 100644 --- a/README.md +++ b/README.md @@ -170,12 +170,13 @@ Each `Record` instance has the following instance properties: `id`, `attestation Each `Record` instance has the following instance methods: - **`data`** - _`object`_: an object with the following convenience methods that read out the data of the record entry in the following formats: - - **`text`** - _`function`_: produces a textual representation of the data. - - **`json`** - _`function`_: if the value is JSON data, this method will return a parsed JSON object. - - **`stream`** - _`function`_: returns the raw stream of bytes for the data. + - **`blob`** - _`function`_: returns the data as a [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). + - **`bytes`** - _`function`_: returns the data as a raw byte array in `Uint8Array` format. + - **`json`** - _`function`_: returns a parsed JSON object. + - **`stream`** - _`function`_: returns the data as a raw stream of bytes. + - **`text`** - _`function`_: returns the data as a string. - **`send`** - _`function`_: sends the record the instance represents to the DWeb Node endpoints of a provided DID. - **`update`** - _`function`_: takes in a new request object matching the expected method signature of a `write` and overwrites the record. This is a convenience method that allows you to easily overwrite records with less verbosity. -- **`delete`** - _`function`_: generates a `delete` entry tombstone for the record. This is a convenience method that allows you to easily delete records with less verbosity. ### **`web5.dwn.records.query(request)`** diff --git a/package-lock.json b/package-lock.json index a0312fdb9..1bc7bef2c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -68,9 +68,9 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.4.tgz", - "integrity": "sha512-r1IONyb6Ia+jYR2vvIDhdWdlTGhqbBoFqLTQidzZ4kepUFH15ejXvFHxCVbtl7BOXIudsIubf4E81xeA3h3IXA==", + "version": "7.23.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz", + "integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==", "dev": true, "dependencies": { "@babel/highlight": "^7.23.4", @@ -80,77 +80,6 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/code-frame/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/code-frame/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/helper-validator-identifier": { "version": "7.22.20", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", @@ -174,77 +103,6 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/@babel/highlight/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/@bcoe/v8-coverage": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", @@ -660,9 +518,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.3.tgz", - "integrity": "sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -705,9 +563,9 @@ "dev": true }, "node_modules/@eslint/js": { - "version": "8.54.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.54.0.tgz", - "integrity": "sha512-ut5V+D+fOoWPgGGNj83GGjnntO39xDy6DWxO0wb7Jp3DcMX0TfIqdzHF85VTQkerdyGmuuMD9AKAo5KiNlf/AQ==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz", + "integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1201,9 +1059,9 @@ } }, "node_modules/@rollup/pluginutils": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.5.tgz", - "integrity": "sha512-6aEYR910NyP73oHiJglti74iRyOwgFU4x3meH/H8OJx6Ry0j6cOVZ5X/wTvub7G7Ao6qaHBEaNsV3GLJkSsF+Q==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.0.tgz", + "integrity": "sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g==", "dev": true, "dependencies": { "@types/estree": "^1.0.0", @@ -1223,9 +1081,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.6.0.tgz", - "integrity": "sha512-keHkkWAe7OtdALGoutLY3utvthkGF+Y17ws9LYT8pxMBYXaCoH/8dXS2uzo6e8+sEhY7y/zi5RFo22Dy2lFpDw==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.6.1.tgz", + "integrity": "sha512-0WQ0ouLejaUCRsL93GD4uft3rOmB8qoQMU05Kb8CmMtMBe7XUDLAltxVZI1q6byNqEtU7N1ZX1Vw5lIpgulLQA==", "cpu": [ "arm" ], @@ -1236,9 +1094,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.6.0.tgz", - "integrity": "sha512-y3Kt+34smKQNWilicPbBz/MXEY7QwDzMFNgwEWeYiOhUt9MTWKjHqe3EVkXwT2fR7izOvHpDWZ0o2IyD9SWX7A==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.6.1.tgz", + "integrity": "sha512-1TKm25Rn20vr5aTGGZqo6E4mzPicCUD79k17EgTLAsXc1zysyi4xXKACfUbwyANEPAEIxkzwue6JZ+stYzWUTA==", "cpu": [ "arm64" ], @@ -1249,9 +1107,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.6.0.tgz", - "integrity": "sha512-oLzzxcUIHltHxOCmaXl+pkIlU+uhSxef5HfntW7RsLh1eHm+vJzjD9Oo4oUKso4YuP4PpbFJNlZjJuOrxo8dPg==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.6.1.tgz", + "integrity": "sha512-cEXJQY/ZqMACb+nxzDeX9IPLAg7S94xouJJCNVE5BJM8JUEP4HeTF+ti3cmxWeSJo+5D+o8Tc0UAWUkfENdeyw==", "cpu": [ "arm64" ], @@ -1262,9 +1120,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.6.0.tgz", - "integrity": "sha512-+ANnmjkcOBaV25n0+M0Bere3roeVAnwlKW65qagtuAfIxXF9YxUneRyAn/RDcIdRa7QrjRNJL3jR7T43ObGe8Q==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.6.1.tgz", + "integrity": "sha512-LoSU9Xu56isrkV2jLldcKspJ7sSXmZWkAxg7sW/RfF7GS4F5/v4EiqKSMCFbZtDu2Nc1gxxFdQdKwkKS4rwxNg==", "cpu": [ "x64" ], @@ -1275,9 +1133,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.6.0.tgz", - "integrity": "sha512-tBTSIkjSVUyrekddpkAqKOosnj1Fc0ZY0rJL2bIEWPKqlEQk0paORL9pUIlt7lcGJi3LzMIlUGXvtNi1Z6MOCQ==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.6.1.tgz", + "integrity": "sha512-EfI3hzYAy5vFNDqpXsNxXcgRDcFHUWSx5nnRSCKwXuQlI5J9dD84g2Usw81n3FLBNsGCegKGwwTVsSKK9cooSQ==", "cpu": [ "arm" ], @@ -1288,9 +1146,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.6.0.tgz", - "integrity": "sha512-Ed8uJI3kM11de9S0j67wAV07JUNhbAqIrDYhQBrQW42jGopgheyk/cdcshgGO4fW5Wjq97COCY/BHogdGvKVNQ==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.6.1.tgz", + "integrity": "sha512-9lhc4UZstsegbNLhH0Zu6TqvDfmhGzuCWtcTFXY10VjLLUe4Mr0Ye2L3rrtHaDd/J5+tFMEuo5LTCSCMXWfUKw==", "cpu": [ "arm64" ], @@ -1301,9 +1159,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.6.0.tgz", - "integrity": "sha512-mZoNQ/qK4D7SSY8v6kEsAAyDgznzLLuSFCA3aBHZTmf3HP/dW4tNLTtWh9+LfyO0Z1aUn+ecpT7IQ3WtIg3ViQ==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.6.1.tgz", + "integrity": "sha512-FfoOK1yP5ksX3wwZ4Zk1NgyGHZyuRhf99j64I5oEmirV8EFT7+OhUZEnP+x17lcP/QHJNWGsoJwrz4PJ9fBEXw==", "cpu": [ "arm64" ], @@ -1314,9 +1172,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.6.0.tgz", - "integrity": "sha512-rouezFHpwCqdEXsqAfNsTgSWO0FoZ5hKv5p+TGO5KFhyN/dvYXNMqMolOb8BkyKcPqjYRBeT+Z6V3aM26rPaYg==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.6.1.tgz", + "integrity": "sha512-DNGZvZDO5YF7jN5fX8ZqmGLjZEXIJRdJEdTFMhiyXqyXubBa0WVLDWSNlQ5JR2PNgDbEV1VQowhVRUh+74D+RA==", "cpu": [ "x64" ], @@ -1327,9 +1185,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.6.0.tgz", - "integrity": "sha512-Bbm+fyn3S6u51urfj3YnqBXg5vI2jQPncRRELaucmhBVyZkbWClQ1fEsRmdnCPpQOQfkpg9gZArvtMVkOMsh1w==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.6.1.tgz", + "integrity": "sha512-RkJVNVRM+piYy87HrKmhbexCHg3A6Z6MU0W9GHnJwBQNBeyhCJG9KDce4SAMdicQnpURggSvtbGo9xAWOfSvIQ==", "cpu": [ "x64" ], @@ -1340,9 +1198,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.6.0.tgz", - "integrity": "sha512-+MRMcyx9L2kTrTUzYmR61+XVsliMG4odFb5UmqtiT8xOfEicfYAGEuF/D1Pww1+uZkYhBqAHpvju7VN+GnC3ng==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.6.1.tgz", + "integrity": "sha512-v2FVT6xfnnmTe3W9bJXl6r5KwJglMK/iRlkKiIFfO6ysKs0rDgz7Cwwf3tjldxQUrHL9INT/1r4VA0n9L/F1vQ==", "cpu": [ "arm64" ], @@ -1353,9 +1211,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.6.0.tgz", - "integrity": "sha512-rxfeE6K6s/Xl2HGeK6cO8SiQq3k/3BYpw7cfhW5Bk2euXNEpuzi2cc7llxx1si1QgwfjNtdRNTGqdBzGlFZGFw==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.6.1.tgz", + "integrity": "sha512-YEeOjxRyEjqcWphH9dyLbzgkF8wZSKAKUkldRY6dgNR5oKs2LZazqGB41cWJ4Iqqcy9/zqYgmzBkRoVz3Q9MLw==", "cpu": [ "ia32" ], @@ -1366,9 +1224,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.6.0.tgz", - "integrity": "sha512-QqmCsydHS172Y0Kc13bkMXvipbJSvzeglBncJG3LsYJSiPlxYACz7MmJBs4A8l1oU+jfhYEIC/+AUSlvjmiX/g==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.6.1.tgz", + "integrity": "sha512-0zfTlFAIhgz8V2G8STq8toAjsYYA6eci1hnXuyOTUFnymrtJwnS6uGKiv3v5UrPZkBlamLvrLV2iiaeqCKzb0A==", "cpu": [ "x64" ], @@ -1530,6 +1388,11 @@ } } }, + "node_modules/@tbd54566975/dwn-sdk-js/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "node_modules/@tbd54566975/dwn-sdk-js/node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -1538,6 +1401,20 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/@tbd54566975/dwn-sdk-js/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/@tbd54566975/dwn-sdk-js/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/@tootallnate/quickjs-emscripten": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", @@ -1800,9 +1677,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.10.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.1.tgz", - "integrity": "sha512-T2qwhjWwGH81vUEx4EXmBKsTJRXFXNZTL4v0gi01+zyBmCwzE6TyHszqX01m+QHTEq+EZNo13NeJIdEqf+Myrg==", + "version": "20.10.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.3.tgz", + "integrity": "sha512-XJavIpZqiXID5Yxnxv3RUDKTN5b81ddNC3ecsA0SoFXz/QU8OGBwZGMomiq0zw+uuqbL/krztv/DINAQ/EV4gg==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -2347,18 +2224,6 @@ "node": ">=18.0.0" } }, - "node_modules/@web/dev-server-core/node_modules/isbinaryfile": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-5.0.0.tgz", - "integrity": "sha512-UDdnyGvMajJUWCkib7Cei/dvyJrrvo4FIrsvSFWdPpXSUorzXrDJ0S+X5Q4ZlasfPjca4yqCNNsjbCeiy8FFeg==", - "dev": true, - "engines": { - "node": ">= 14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/gjtorikian/" - } - }, "node_modules/@web/dev-server-core/node_modules/lru-cache": { "version": "8.0.5", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-8.0.5.tgz", @@ -2368,27 +2233,6 @@ "node": ">=16.14" } }, - "node_modules/@web/dev-server-core/node_modules/ws": { - "version": "7.5.9", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", - "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", - "dev": true, - "engines": { - "node": ">=8.3.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/@web/dev-server-rollup": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/@web/dev-server-rollup/-/dev-server-rollup-0.6.1.tgz", @@ -2406,49 +2250,6 @@ "node": ">=18.0.0" } }, - "node_modules/@web/dev-server-rollup/node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/@web/dev-server-rollup/node_modules/tr46": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", - "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", - "dev": true, - "dependencies": { - "punycode": "^2.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@web/dev-server-rollup/node_modules/webidl-conversions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", - "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/@web/dev-server-rollup/node_modules/whatwg-url": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", - "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", - "dev": true, - "dependencies": { - "tr46": "^3.0.0", - "webidl-conversions": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, "node_modules/@web/parse5-utils": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/@web/parse5-utils/-/parse5-utils-2.1.0.tgz", @@ -2522,18 +2323,6 @@ "node": ">=18.0.0" } }, - "node_modules/@web/test-runner-commands/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@web/test-runner-core": { "version": "0.13.0", "resolved": "https://registry.npmjs.org/@web/test-runner-core/-/test-runner-core-0.13.0.tgz", @@ -2571,15 +2360,6 @@ "node": ">=18.0.0" } }, - "node_modules/@web/test-runner-core/node_modules/source-map": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", - "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, "node_modules/@web/test-runner-coverage-v8": { "version": "0.8.0", "resolved": "https://registry.npmjs.org/@web/test-runner-coverage-v8/-/test-runner-coverage-v8-0.8.0.tgz", @@ -2631,15 +2411,6 @@ "node": ">=18.0.0" } }, - "node_modules/@web/test-runner/node_modules/source-map": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", - "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, "node_modules/@web5/agent": { "resolved": "packages/agent", "link": true @@ -3019,18 +2790,15 @@ } }, "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dev": true, "dependencies": { - "color-convert": "^2.0.1" + "color-convert": "^1.9.0" }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "node": ">=4" } }, "node_modules/anymatch": { @@ -3529,9 +3297,9 @@ } }, "node_modules/browserslist": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.1.tgz", - "integrity": "sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ==", + "version": "4.22.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.2.tgz", + "integrity": "sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==", "dev": true, "funding": [ { @@ -3549,9 +3317,9 @@ ], "peer": true, "dependencies": { - "caniuse-lite": "^1.0.30001541", - "electron-to-chromium": "^1.4.535", - "node-releases": "^2.0.13", + "caniuse-lite": "^1.0.30001565", + "electron-to-chromium": "^1.4.601", + "node-releases": "^2.0.14", "update-browserslist-db": "^1.0.13" }, "bin": { @@ -3751,9 +3519,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001565", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001565.tgz", - "integrity": "sha512-xrE//a3O7TP0vaJ8ikzkD2c2NgcVUvsEe2IvFTntV4Yd1Z9FVzh+gW+enX96L0psrbaFMcVcH2l90xNuGDWc8w==", + "version": "1.0.30001566", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001566.tgz", + "integrity": "sha512-ggIhCsTxmITBAMmK8yZjEhCO5/47jKXPu6Dha/wuCS4JePVL+3uiDEBuhu2aIoT+bqTOR8L76Ip1ARL9xYsEJA==", "dev": true, "funding": [ { @@ -3831,6 +3599,50 @@ } }, "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk-template": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-0.4.0.tgz", + "integrity": "sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/chalk-template?sponsor=1" + } + }, + "node_modules/chalk-template/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/chalk-template/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", @@ -3846,19 +3658,43 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/chalk-template": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-0.4.0.tgz", - "integrity": "sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==", + "node_modules/chalk-template/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "dependencies": { - "chalk": "^4.1.2" + "color-name": "~1.1.4" }, "engines": { - "node": ">=12" + "node": ">=7.0.0" + } + }, + "node_modules/chalk-template/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/chalk-template/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/chalk-template/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" }, - "funding": { - "url": "https://github.com/chalk/chalk-template?sponsor=1" + "engines": { + "node": ">=8" } }, "node_modules/check-error": { @@ -3900,18 +3736,6 @@ "fsevents": "~2.3.2" } }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/chrome-dgram": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/chrome-dgram/-/chrome-dgram-3.0.6.tgz", @@ -3961,6 +3785,18 @@ "node": ">=12.13.0" } }, + "node_modules/chrome-launcher/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/chrome-net": { "version": "3.3.4", "resolved": "https://registry.npmjs.org/chrome-net/-/chrome-net-3.3.4.tgz", @@ -4057,6 +3893,39 @@ "node": ">=12" } }, + "node_modules/cliui/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/cliui/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, "node_modules/cliui/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -4126,21 +3995,18 @@ } }, "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dev": true, "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" + "color-name": "1.1.3" } }, "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", "dev": true }, "node_modules/command-line-args": { @@ -4522,27 +4388,6 @@ "node": ">= 14" } }, - "node_modules/degenerator/node_modules/escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "dev": true, - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, - "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" - } - }, "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", @@ -4620,9 +4465,9 @@ "integrity": "sha512-S6fWHvCXkZg2IhS4RcVHxwuyVejPR7c+a4Go0xbQ9ps5kILa8viiYQgrM4gfTyeTjJ0ekgJH9gk/BawTpmkbZA==" }, "node_modules/diff": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", - "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz", + "integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==", "dev": true, "engines": { "node": ">=0.3.1" @@ -4726,9 +4571,9 @@ "dev": true }, "node_modules/electron-to-chromium": { - "version": "1.4.597", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.597.tgz", - "integrity": "sha512-0XOQNqHhg2YgRVRUrS4M4vWjFCFIP2ETXcXe/0KIQBjXE9Cpy+tgzzYfuq6HGai3hWq0YywtG+5XK8fyG08EjA==", + "version": "1.4.601", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.601.tgz", + "integrity": "sha512-SpwUMDWe9tQu8JX5QCO1+p/hChAi9AE9UpoC3rcHVc+gdCGlbT3SGb5I1klgb952HRIyvt9wZhSz9bNBYz9swA==", "dev": true, "peer": true }, @@ -4942,104 +4787,56 @@ "dev": true }, "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=0.8.0" } }, "node_modules/escodegen": { - "version": "1.14.3", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", - "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "dev": true, "dependencies": { "esprima": "^4.0.1", - "estraverse": "^4.2.0", - "esutils": "^2.0.2", - "optionator": "^0.8.1" + "estraverse": "^5.2.0", + "esutils": "^2.0.2" }, "bin": { "escodegen": "bin/escodegen.js", "esgenerate": "bin/esgenerate.js" }, "engines": { - "node": ">=4.0" + "node": ">=6.0" }, "optionalDependencies": { "source-map": "~0.6.1" } }, - "node_modules/escodegen/node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/escodegen/node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", - "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true, "engines": { - "node": ">= 0.8.0" + "node": ">=0.10.0" } }, - "node_modules/escodegen/node_modules/optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", - "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/escodegen/node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/escodegen/node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", - "dependencies": { - "prelude-ls": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/eslint": { - "version": "8.54.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.54.0.tgz", - "integrity": "sha512-NY0DfAkM8BIZDVl6PgSa1ttZbx3xHgJzSNJKYcQglem6CppHyMhRIQkBVSSMaSRnLhig3jsDbEzOjwCVt4AmmA==", - "dev": true, - "peer": true, + "node_modules/eslint": { + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz", + "integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==", + "dev": true, + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.3", - "@eslint/js": "8.54.0", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.55.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -5173,6 +4970,95 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/eslint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "peer": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "peer": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "peer": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/eslint/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "peer": true + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "peer": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/eslint/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, "node_modules/eslint/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -5180,6 +5066,19 @@ "dev": true, "peer": true }, + "node_modules/eslint/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/espree": { "version": "9.6.1", "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", @@ -5273,6 +5172,11 @@ "node": ">=6" } }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==" + }, "node_modules/events": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", @@ -5353,18 +5257,6 @@ "node": ">=8.6.0" } }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", @@ -5560,9 +5452,9 @@ "dev": true }, "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, "optional": true, @@ -5703,15 +5595,15 @@ } }, "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "dependencies": { - "is-glob": "^4.0.3" + "is-glob": "^4.0.1" }, "engines": { - "node": ">=10.13.0" + "node": ">= 6" } }, "node_modules/glob-to-regexp": { @@ -5892,12 +5784,12 @@ } }, "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "dev": true, "engines": { - "node": ">=8" + "node": ">=4" } }, "node_modules/has-property-descriptors": { @@ -6072,16 +5964,7 @@ "node": ">= 0.8" } }, - "node_modules/http-assert/node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/http-assert/node_modules/http-errors": { + "node_modules/http-errors": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", @@ -6097,29 +5980,13 @@ "node": ">= 0.6" } }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dev": true, - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-errors/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "node_modules/http-errors/node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", "dev": true, "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, "node_modules/http-proxy-agent": { @@ -6783,6 +6650,18 @@ "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", "dev": true }, + "node_modules/isbinaryfile": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/isbinaryfile/-/isbinaryfile-5.0.0.tgz", + "integrity": "sha512-UDdnyGvMajJUWCkib7Cei/dvyJrrvo4FIrsvSFWdPpXSUorzXrDJ0S+X5Q4ZlasfPjca4yqCNNsjbCeiy8FFeg==", + "dev": true, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/gjtorikian/" + } + }, "node_modules/isexe": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", @@ -6824,6 +6703,27 @@ "node": ">=10" } }, + "node_modules/istanbul-lib-report/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/istanbul-reports": { "version": "3.1.6", "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.6.tgz", @@ -6966,6 +6866,16 @@ "node": ">= 10.13.0" } }, + "node_modules/jest-worker/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, "node_modules/jest-worker/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -7182,31 +7092,6 @@ "node": ">= 8" } }, - "node_modules/koa-send/node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/koa-send/node_modules/http-errors": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", - "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", - "dev": true, - "dependencies": { - "depd": "~1.1.2", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/koa-static": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/koa-static/-/koa-static-5.0.0.tgz", @@ -7229,31 +7114,6 @@ "ms": "^2.1.1" } }, - "node_modules/koa/node_modules/http-errors": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", - "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", - "dev": true, - "dependencies": { - "depd": "~1.1.2", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/koa/node_modules/http-errors/node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/last-one-wins": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/last-one-wins/-/last-one-wins-1.0.4.tgz", @@ -7392,6 +7252,92 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/log-symbols/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/log-symbols/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-symbols/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/log-update": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", @@ -7410,6 +7356,39 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/log-update/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, "node_modules/log-update/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -7640,15 +7619,15 @@ "dev": true }, "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", "dev": true, - "dependencies": { - "minimist": "^1.2.6" - }, "bin": { "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/mkdirp-classic": { @@ -7697,6 +7676,21 @@ "url": "https://opencollective.com/mochajs" } }, + "node_modules/mocha/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/mocha/node_modules/cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", @@ -7708,12 +7702,51 @@ "wrap-ansi": "^7.0.0" } }, + "node_modules/mocha/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/mocha/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/mocha/node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/mocha/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, + "node_modules/mocha/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mocha/node_modules/glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", @@ -7746,20 +7779,13 @@ "node": "*" } }, - "node_modules/mocha/node_modules/log-symbols": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "node_modules/mocha/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8" } }, "node_modules/mocha/node_modules/minimatch": { @@ -8035,6 +8061,25 @@ } } }, + "node_modules/node-fetch/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/node-fetch/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/node-fetch/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/node-gyp-build": { "version": "4.7.1", "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.7.1.tgz", @@ -8046,9 +8091,9 @@ } }, "node_modules/node-releases": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", - "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", + "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", "dev": true, "peer": true }, @@ -8236,12 +8281,12 @@ } }, "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", "has-symbols": "^1.0.3", "object-keys": "^1.1.1" }, @@ -8390,11 +8435,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-queue/node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==" - }, "node_modules/p-timeout": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-5.1.0.tgz", @@ -8678,6 +8718,20 @@ "node": ">=16" } }, + "node_modules/playwright/node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/portfinder": { "version": "1.0.32", "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.32.tgz", @@ -8701,6 +8755,18 @@ "ms": "^2.1.1" } }, + "node_modules/portfinder/node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -8920,6 +8986,28 @@ } } }, + "node_modules/puppeteer-core/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true + }, + "node_modules/puppeteer-core/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true + }, + "node_modules/puppeteer-core/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/puppeteer-core/node_modules/ws": { "version": "8.13.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", @@ -8942,10 +9030,10 @@ } }, "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dev": true, + "version": "6.11.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.2.tgz", + "integrity": "sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA==", + "dev": true, "dependencies": { "side-channel": "^1.0.4" }, @@ -9058,6 +9146,31 @@ "node": ">= 0.8" } }, + "node_modules/raw-body/node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dev": true, + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/raw-body/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/readable-stream": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz", @@ -9341,9 +9454,9 @@ } }, "node_modules/rollup": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.6.0.tgz", - "integrity": "sha512-R8i5Her4oO1LiMQ3jKf7MUglYV/mhQ5g5OKeld5CnkmPdIGo79FDDQYqPhq/PCVuTQVuxsWgIbDy9F+zdHn80w==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.6.1.tgz", + "integrity": "sha512-jZHaZotEHQaHLgKr8JnQiDT1rmatjgKlMekyksz+yk9jt/8z9quNjnKNRoaM0wd9DC2QKXjmWWuDYtM3jfF8pQ==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -9353,18 +9466,18 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.6.0", - "@rollup/rollup-android-arm64": "4.6.0", - "@rollup/rollup-darwin-arm64": "4.6.0", - "@rollup/rollup-darwin-x64": "4.6.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.6.0", - "@rollup/rollup-linux-arm64-gnu": "4.6.0", - "@rollup/rollup-linux-arm64-musl": "4.6.0", - "@rollup/rollup-linux-x64-gnu": "4.6.0", - "@rollup/rollup-linux-x64-musl": "4.6.0", - "@rollup/rollup-win32-arm64-msvc": "4.6.0", - "@rollup/rollup-win32-ia32-msvc": "4.6.0", - "@rollup/rollup-win32-x64-msvc": "4.6.0", + "@rollup/rollup-android-arm-eabi": "4.6.1", + "@rollup/rollup-android-arm64": "4.6.1", + "@rollup/rollup-darwin-arm64": "4.6.1", + "@rollup/rollup-darwin-x64": "4.6.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.6.1", + "@rollup/rollup-linux-arm64-gnu": "4.6.1", + "@rollup/rollup-linux-arm64-musl": "4.6.1", + "@rollup/rollup-linux-x64-gnu": "4.6.1", + "@rollup/rollup-linux-x64-musl": "4.6.1", + "@rollup/rollup-win32-arm64-msvc": "4.6.1", + "@rollup/rollup-win32-ia32-msvc": "4.6.1", + "@rollup/rollup-win32-x64-msvc": "4.6.1", "fsevents": "~2.3.2" } }, @@ -9719,13 +9832,25 @@ "url": "https://opencollective.com/sinon" } }, - "node_modules/sinon/node_modules/diff": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz", - "integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==", + "node_modules/sinon/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "engines": { - "node": ">=0.3.1" + "node": ">=8" + } + }, + "node_modules/sinon/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" } }, "node_modules/siphash24": { @@ -9762,6 +9887,39 @@ "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/slice-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, "node_modules/smart-buffer": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", @@ -9846,12 +10004,12 @@ } }, "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "devOptional": true, + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">= 8" } }, "node_modules/source-map-js": { @@ -9907,6 +10065,16 @@ "source-map": "^0.6.0" } }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/sparse-array": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/sparse-array/-/sparse-array-1.3.2.tgz", @@ -9952,6 +10120,91 @@ "escodegen": "^1.8.1" } }, + "node_modules/static-eval/node_modules/escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=4.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/static-eval/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/static-eval/node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/static-eval/node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/static-eval/node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/static-eval/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/static-eval/node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -10221,15 +10474,15 @@ } }, "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dev": true, "dependencies": { - "has-flag": "^4.0.0" + "has-flag": "^3.0.0" }, "engines": { - "node": ">=8" + "node": ">=4" } }, "node_modules/supports-preserve-symlinks-flag": { @@ -10316,9 +10569,9 @@ } }, "node_modules/terser": { - "version": "5.24.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.24.0.tgz", - "integrity": "sha512-ZpGR4Hy3+wBEzVEnHvstMvqpD/nABNelQn/z2r0fjVWGQsN3bpOLzQlqDxmb4CDZnXq5lpjnQ+mHQLAOpfM5iw==", + "version": "5.25.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.25.0.tgz", + "integrity": "sha512-we0I9SIsfvNUMP77zC9HG+MylwYYsGFSBG8qm+13oud2Yh+O104y614FRbyjpxys16jZwot72Fpi827YvGzuqg==", "dev": true, "peer": true, "dependencies": { @@ -10459,9 +10712,25 @@ } }, "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "dev": true, + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/tr46/node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } }, "node_modules/ts-api-utils": { "version": "1.0.3", @@ -10816,21 +11085,6 @@ "qs": "^6.11.2" } }, - "node_modules/url/node_modules/qs": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.2.tgz", - "integrity": "sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA==", - "dev": true, - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/util": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", @@ -10932,9 +11186,13 @@ } }, "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "engines": { + "node": ">=12" + } }, "node_modules/webpack": { "version": "5.89.0", @@ -11026,12 +11284,16 @@ "peer": true }, "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "dev": true, "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" } }, "node_modules/which": { @@ -11140,19 +11402,52 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "emoji-regex": "^8.0.0", + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" }, @@ -11205,6 +11500,27 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, + "node_modules/ws": { + "version": "7.5.9", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz", + "integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==", + "dev": true, + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/xsalsa20": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/xsalsa20/-/xsalsa20-1.2.0.tgz", @@ -11503,6 +11819,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/agent/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/agent/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/agent/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/agent/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/agent/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/agent/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -11557,12 +11934,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/agent/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/agent/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/agent/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/agent/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/api": { "name": "@web5/api", "version": "0.8.2", @@ -11737,6 +12147,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/api/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/api/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/api/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/api/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/api/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/api/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -11791,19 +12262,53 @@ "url": "https://opencollective.com/eslint" } }, + "packages/api/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/api/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/api/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/api/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/common": { "name": "@web5/common", "version": "0.2.1", "license": "Apache-2.0", "dependencies": { "level": "8.0.0", - "multiformats": "11.0.2" + "multiformats": "11.0.2", + "readable-stream": "4.4.2" }, "devDependencies": { "@playwright/test": "1.40.1", @@ -11811,6 +12316,7 @@ "@types/chai-as-promised": "7.1.5", "@types/eslint": "8.44.2", "@types/mocha": "10.0.1", + "@types/readable-stream": "4.0.9", "@typescript-eslint/eslint-plugin": "6.4.0", "@typescript-eslint/parser": "6.4.0", "@web/test-runner": "0.18.0", @@ -11830,6 +12336,16 @@ "node": ">=18.0.0" } }, + "packages/common/node_modules/@types/readable-stream": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-4.0.9.tgz", + "integrity": "sha512-4cwuvrmNF96M4Nrx0Eep37RwPB1Mth+nCSezsGRv5+PsFyRvDdLd0pil6gVLcWD/bh69INNdwZ98dJwfHpLohA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "safe-buffer": "~5.1.1" + } + }, "packages/common/node_modules/@typescript-eslint/parser": { "version": "6.4.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.4.0.tgz", @@ -11914,23 +12430,84 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "packages/common/node_modules/eslint": { - "version": "8.47.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", - "integrity": "sha512-spUQWrdPt+pRVP1TTJLmfRNJJHHZryFmptzcafwSvHsceV81djHOdnEeDmkdotZyLNjDhrOasNK8nikkoG1O8Q==", + "packages/common/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "^8.47.0", - "@humanwhocodes/config-array": "^0.11.10", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/common/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/common/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/common/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/common/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/common/node_modules/eslint": { + "version": "8.47.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", + "integrity": "sha512-spUQWrdPt+pRVP1TTJLmfRNJJHHZryFmptzcafwSvHsceV81djHOdnEeDmkdotZyLNjDhrOasNK8nikkoG1O8Q==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.2", + "@eslint/js": "^8.47.0", + "@humanwhocodes/config-array": "^0.11.10", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.2", @@ -11968,12 +12545,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/common/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/common/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/common/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/common/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/credentials": { "name": "@web5/credentials", "version": "0.3.2", @@ -12141,6 +12751,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/credentials/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/credentials/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/credentials/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/credentials/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/credentials/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/credentials/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -12195,12 +12866,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/credentials/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/credentials/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/credentials/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/credentials/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/crypto": { "name": "@web5/crypto", "version": "0.2.3", @@ -12325,6 +13029,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/crypto/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/crypto/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/crypto/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/crypto/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/crypto/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/crypto/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -12379,12 +13144,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/crypto/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/crypto/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/crypto/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/crypto/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/dids": { "name": "@web5/dids", "version": "0.2.2", @@ -12558,6 +13356,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/dids/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/dids/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/dids/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/dids/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/dids/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/dids/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -12612,12 +13471,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/dids/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/dids/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/dids/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/dids/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/identity-agent": { "name": "@web5/identity-agent", "version": "0.2.4", @@ -12741,46 +13633,107 @@ "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "packages/identity-agent/node_modules/@web5/crypto": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@web5/crypto/-/crypto-0.2.2.tgz", + "integrity": "sha512-vHFg0wXQSQXrwuBNQyDHnmSZchfTfO6/Sv+7rDsNkvofs+6lGTE8CZ02cwUYMeIwTRMLer12c+fMfzYrXokEUQ==", + "dependencies": { + "@noble/ciphers": "0.1.4", + "@noble/curves": "1.1.0", + "@noble/hashes": "1.3.1", + "@web5/common": "0.2.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "packages/identity-agent/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "packages/identity-agent/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/identity-agent/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "packages/identity-agent/node_modules/@web5/crypto": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/@web5/crypto/-/crypto-0.2.2.tgz", - "integrity": "sha512-vHFg0wXQSQXrwuBNQyDHnmSZchfTfO6/Sv+7rDsNkvofs+6lGTE8CZ02cwUYMeIwTRMLer12c+fMfzYrXokEUQ==", + "packages/identity-agent/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "dependencies": { - "@noble/ciphers": "0.1.4", - "@noble/curves": "1.1.0", - "@noble/hashes": "1.3.1", - "@web5/common": "0.2.1" + "color-name": "~1.1.4" }, "engines": { - "node": ">=18.0.0" + "node": ">=7.0.0" } }, - "packages/identity-agent/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "packages/identity-agent/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/identity-agent/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" + "engines": { + "node": ">=10" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" + "url": "https://github.com/sponsors/sindresorhus" } }, "packages/identity-agent/node_modules/eslint": { @@ -12837,12 +13790,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/identity-agent/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/identity-agent/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/identity-agent/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/identity-agent/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/proxy-agent": { "name": "@web5/proxy-agent", "version": "0.2.4", @@ -13007,6 +13993,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/proxy-agent/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/proxy-agent/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/proxy-agent/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/proxy-agent/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/proxy-agent/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/proxy-agent/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -13061,12 +14108,45 @@ "url": "https://opencollective.com/eslint" } }, + "packages/proxy-agent/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/proxy-agent/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/proxy-agent/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "packages/proxy-agent/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "packages/user-agent": { "name": "@web5/user-agent", "version": "0.2.4", @@ -13231,6 +14311,67 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "packages/user-agent/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "packages/user-agent/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "packages/user-agent/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "packages/user-agent/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "packages/user-agent/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "packages/user-agent/node_modules/eslint": { "version": "8.47.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz", @@ -13285,11 +14426,44 @@ "url": "https://opencollective.com/eslint" } }, + "packages/user-agent/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/user-agent/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "packages/user-agent/node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true + }, + "packages/user-agent/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } } } } diff --git a/packages/api/README.md b/packages/api/README.md index 45b122ce9..72a7ff5d0 100644 --- a/packages/api/README.md +++ b/packages/api/README.md @@ -163,12 +163,13 @@ Each `Record` instance has the following instance properties: `id`, `attestation Each `Record` instance has the following instance methods: - **`data`** - _`object`_: an object with the following convenience methods that read out the data of the record entry in the following formats: - - **`text`** - _`function`_: produces a textual representation of the data. - - **`json`** - _`function`_: if the value is JSON data, this method will return a parsed JSON object. - - **`stream`** - _`function`_: returns the raw stream of bytes for the data. + - **`blob`** - _`function`_: returns the data as a [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). + - **`bytes`** - _`function`_: returns the data as a raw byte array in `Uint8Array` format. + - **`json`** - _`function`_: returns a parsed JSON object. + - **`stream`** - _`function`_: returns the data as a raw stream of bytes. + - **`text`** - _`function`_: returns the data as a string. - **`send`** - _`function`_: sends the record the instance represents to the DWeb Node endpoints of a provided DID. - **`update`** - _`function`_: takes in a new request object matching the expected method signature of a `write` and overwrites the record. This is a convenience method that allows you to easily overwrite records with less verbosity. -- **`delete`** - _`function`_: generates a `delete` entry tombstone for the record. This is a convenience method that allows you to easily delete records with less verbosity. ### **`web5.dwn.records.query(request)`** diff --git a/packages/api/src/dwn-api.ts b/packages/api/src/dwn-api.ts index c28ec50ec..b9ee7ce31 100644 --- a/packages/api/src/dwn-api.ts +++ b/packages/api/src/dwn-api.ts @@ -371,13 +371,20 @@ export class DwnApi { * Extract the `author` DID from the record entry since records may be signed by the * tenant owner or any other entity. */ - author : RecordsWrite.getAuthor(entry), + author : RecordsWrite.getAuthor(entry), /** * Set the `target` DID to currently connected DID so that subsequent calls to * {@link Record} instance methods, such as `record.update()` are executed on the * local DWN even if the record was returned by a query of a remote DWN. */ - target : this.connectedDid, + target : this.connectedDid, + /** + * If the record was returned by a query of a remote DWN, set the `remoteTarget` to + * the DID of the DWN that returned the record. The `remoteTarget` will be used to + * determine which DWN to send subsequent read requests to in the event the data payload + * exceeds the threshold for being returned with queries. + */ + remoteTarget : request.from, ...entry as RecordsWriteMessage }; const record = new Record(this.agent, recordOptions); @@ -424,13 +431,20 @@ export class DwnApi { * Extract the `author` DID from the record since records may be signed by the * tenant owner or any other entity. */ - author : RecordsWrite.getAuthor(responseRecord), + author : RecordsWrite.getAuthor(responseRecord), /** * Set the `target` DID to currently connected DID so that subsequent calls to * {@link Record} instance methods, such as `record.update()` are executed on the * local DWN even if the record was read from a remote DWN. */ - target : this.connectedDid, + target : this.connectedDid, + /** + * If the record was returned by a query of a remote DWN, set the `remoteTarget` to + * the DID of the DWN that returned the record. The `remoteTarget` will be used to + * determine which DWN to send subsequent read requests to in the event the data payload + * exceeds the threshold for being returned with queries. + */ + remoteTarget : request.from, ...responseRecord, }; diff --git a/packages/api/src/record.ts b/packages/api/src/record.ts index b7df37753..8c742c29a 100644 --- a/packages/api/src/record.ts +++ b/packages/api/src/record.ts @@ -1,13 +1,13 @@ import type { Web5Agent } from '@web5/agent'; -import type { Readable } from 'readable-stream'; +import type { Readable } from '@web5/common'; import type { RecordsWriteMessage, RecordsWriteOptions, RecordsWriteDescriptor, } from '@tbd54566975/dwn-sdk-js'; -import { ReadableWebToNodeStream } from 'readable-web-to-node-stream'; -import { DataStream, DwnInterfaceName, DwnMethodName, Encoder } from '@tbd54566975/dwn-sdk-js'; +import { Convert, NodeStream, Stream } from '@web5/common'; +import { DwnInterfaceName, DwnMethodName } from '@tbd54566975/dwn-sdk-js'; import type { ResponseStatus } from './dwn-api.js'; @@ -23,6 +23,7 @@ export type RecordOptions = RecordsWriteMessage & { target: string; encodedData?: string | Blob; data?: Readable | ReadableStream; + remoteTarget?: string; }; /** @@ -54,8 +55,8 @@ export type RecordUpdateOptions = { } /** - * Record wrapper class with convenience methods to send, update, - * and delete itself, aside from manipulating and reading the record data. + * Record wrapper class with convenience methods to send and update, + * aside from manipulating and reading the record data. * * Note: The `messageTimestamp` of the most recent RecordsWrite message is * logically equivalent to the date/time at which a Record was most @@ -69,23 +70,21 @@ export type RecordUpdateOptions = { export class Record implements RecordModel { // mutable properties - /** Record's author */ + /** Record's author DID */ author: string; - /** Record's target (for sent records) */ + /** Record's target DID */ target: string; - /** Record deleted status */ - isDeleted = false; - private _agent: Web5Agent; private _attestation?: RecordsWriteMessage['attestation']; private _contextId?: string; private _descriptor: RecordsWriteDescriptor; - private _encodedData?: string | Blob | null; + private _encodedData?: Blob; private _encryption?: RecordsWriteMessage['encryption']; - private _readableStream?: Readable | Promise; + private _readableStream?: Readable; private _recordId: string; + private _remoteTarget?: string; // Immutable DWN Record properties. @@ -151,10 +150,16 @@ export class Record implements RecordModel { constructor(agent: Web5Agent, options: RecordOptions) { this._agent = agent; - // Store the target and author DIDs that were used to create the message to use for subsequent reads, etc. + /** Store the target and author DIDs that were used to create the message to use for subsequent + * updates, reads, etc. */ this.author = options.author; this.target = options.target; + /** If the record was queried from a remote DWN, the `remoteTarget` DID will be defined. This + * value is used to send subsequent read requests to the same remote DWN in the event the + * record's data payload was too large to be returned in query results. */ + this._remoteTarget = options.remoteTarget; + // RecordsWriteMessage properties. this._attestation = options.attestation; this._contextId = options.contextId; @@ -162,15 +167,22 @@ export class Record implements RecordModel { this._encryption = options.encryption; this._recordId = options.recordId; + if (options.encodedData) { + // If `encodedData` is set, then it is expected that: + // type is Blob if the Record object was instantiated by dwn.records.create()/write(). + // type is Base64 URL encoded string if the Record object was instantiated by dwn.records.query(). + // If it is a string, we need to Base64 URL decode to bytes and instantiate a Blob. + this._encodedData = (typeof options.encodedData === 'string') ? + new Blob([Convert.base64Url(options.encodedData).toUint8Array()], { type: this.dataFormat }) : + options.encodedData; + } - // options.encodedData will either be a base64url encoded string (in the case of RecordsQuery) - // OR a Blob in the case of a RecordsWrite. - this._encodedData = options.encodedData ?? null; - - // If the record was created from a RecordsRead reply then it will have a `data` property. if (options.data) { - this._readableStream = Record.isReadableWebStream(options.data) ? - new ReadableWebToNodeStream(options.data) as Readable : options.data as Readable; + // If the record was created from a RecordsRead reply then it will have a `data` property. + // If the `data` property is a web ReadableStream, convert it to a Node.js Readable. + this._readableStream = Stream.isReadableStream(options.data) ? + NodeStream.fromWebReadable({ readableStream: options.data }) : + options.data; } } @@ -180,93 +192,105 @@ export class Record implements RecordModel { * @returns a data stream with convenience methods such as `blob()`, `json()`, `text()`, and `stream()`, similar to the fetch API response * @throws `Error` if the record has already been deleted. * + * @beta */ get data() { - if (this.isDeleted) throw new Error('Operation failed: Attempted to access `data` of a record that has already been deleted.'); - - if (!this._encodedData && !this._readableStream) { - // `encodedData` will be set if the Record was instantiated by dwn.records.create()/write(). - // `readableStream` will be set if Record was instantiated by dwn.records.read(). - // If neither of the above are true, then the record must be fetched from the DWN. - this._readableStream = this._agent.processDwnRequest({ - author : this.author, - messageOptions : { filter: { recordId: this.id } }, - messageType : DwnInterfaceName.Records + DwnMethodName.Read, - target : this.target, - }) - .then(response => response.reply) - .then(reply => reply.record.data) - .catch(error => { throw new Error(`Error encountered while attempting to read data: ${error.message}`); }); - } - - if (typeof this._encodedData === 'string') { - // If `encodedData` is set, then it is expected that: - // type is Blob if the Record object was instantiated by dwn.records.create()/write(). - // type is Base64 URL encoded string if the Record object was instantiated by dwn.records.query(). - // If it is a string, we need to Base64 URL decode to bytes and instantiate a Blob. - const dataBytes = Encoder.base64UrlToBytes(this._encodedData); - this._encodedData = new Blob([dataBytes], { type: this.dataFormat }); - } - - // Explicitly cast `encodedData` as a Blob since, if non-null, it has been converted from string to Blob. - const dataBlob = this._encodedData as Blob; - // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; // Capture the context of the `Record` instance. const dataObj = { + + /** + * Returns the data of the current record as a `Blob`. + * + * @returns A promise that resolves to a Blob containing the record's data. + * @throws If the record data is not available or cannot be converted to a `Blob`. + * + * @beta + */ async blob(): Promise { - if (dataBlob) return dataBlob; - if (self._readableStream) return new Blob([await this.stream().then(DataStream.toBytes)], { type: self.dataFormat }); + return new Blob([await NodeStream.consumeToBytes({ readable: await this.stream() })], { type: self.dataFormat }); }, - async json() { - if (dataBlob) return this.text().then(JSON.parse); - if (self._readableStream) return this.text().then(JSON.parse); - return null; + + /** + * Returns the data of the current record as a `Uint8Array`. + * + * @returns A Promise that resolves to a `Uint8Array` containing the record's data bytes. + * @throws If the record data is not available or cannot be converted to a byte array. + * + * @beta + */ + async bytes(): Promise { + return await NodeStream.consumeToBytes({ readable: await this.stream() }); }, - async text() { - if (dataBlob) return dataBlob.text(); - if (self._readableStream) return this.stream().then(DataStream.toBytes).then(Encoder.bytesToString); - return null; + + /** + * Parses the data of the current record as JSON and returns it as a JavaScript object. + * + * @returns A Promise that resolves to a JavaScript object parsed from the record's JSON data. + * @throws If the record data is not available, not in JSON format, or cannot be parsed. + * + * @beta + */ + async json(): Promise { + return await NodeStream.consumeToJson({ readable: await this.stream() }); + }, + + /** + * Returns the data of the current record as a `string`. + * + * @returns A promise that resolves to a `string` containing the record's text data. + * @throws If the record data is not available or cannot be converted to text. + * + * @beta + */ + async text(): Promise { + return await NodeStream.consumeToText({ readable: await this.stream() }); }, - async stream() { - if (dataBlob) return new ReadableWebToNodeStream(dataBlob.stream()); - if (self._readableStream) return self._readableStream; - return null; + + /** + * Provides a `Readable` stream containing the record's data. + * + * @returns A promise that resolves to a Node.js `Readable` stream of the record's data. + * @throws If the record data is not available in-memory and cannot be fetched. + * + * @beta + */ + async stream(): Promise { + if (self._encodedData) { + /** If `encodedData` is set, it indicates that the Record was instantiated by + * `dwn.records.create()`/`dwn.records.write()` or the record's data payload was small + * enough to be returned in `dwn.records.query()` results. In either case, the data is + * already available in-memory and can be returned as a Node.js `Readable` stream. */ + self._readableStream = NodeStream.fromWebReadable({ readableStream: self._encodedData.stream() }); + + } else if (!NodeStream.isReadable({ readable: self._readableStream })) { + /** If `encodedData` is not set, then the Record was instantiated by `dwn.records.read()` + * or was too large to be returned in `dwn.records.query()` results. In either case, the + * data is not available in-memory and must be fetched from either: */ + self._readableStream = self._remoteTarget ? + // 1. ...a remote DWN if the record was queried from a remote DWN. + await self.readRecordData({ target: self._remoteTarget, isRemote: true }) : + // 2. ...a local DWN if the record was queried from the local DWN. + await self.readRecordData({ target: self.target, isRemote: false }); + } + + if (!self._readableStream) { + throw new Error('Record data is not available.'); + } + + return self._readableStream; }, + then(...callbacks) { return this.stream().then(...callbacks); }, + catch(callback) { return dataObj.then().catch(callback); }, }; - return dataObj; - } - /** - * Delete the current record from the DWN. - * @returns the status of the delete request - * @throws `Error` if the record has already been deleted. - */ - async delete(): Promise { - if (this.isDeleted) throw new Error('Operation failed: Attempted to call `delete()` on a record that has already been deleted.'); - - // Attempt to delete the record from the DWN. - const agentResponse = await this._agent.processDwnRequest({ - author : this.author, - messageOptions : { recordId: this.id }, - messageType : DwnInterfaceName.Records + DwnMethodName.Delete, - target : this.target, - }); - - const { reply: { status } } = agentResponse; - - if (status.code === 202) { - // If the record was successfully deleted, mark the instance as deleted to prevent further modifications. - this.setDeletedStatus(true); - } - - return { status }; + return dataObj; } /** @@ -275,10 +299,10 @@ export class Record implements RecordModel { * @param target - the DID to send the record to * @returns the status of the send record request * @throws `Error` if the record has already been deleted. + * + * @beta */ async send(target: string): Promise { - if (this.isDeleted) throw new Error('Operation failed: Attempted to call `send()` on a record that has already been deleted.'); - const { reply: { status } } = await this._agent.sendDwnRequest({ messageType : DwnInterfaceName.Records + DwnMethodName.Write, author : this.author, @@ -343,10 +367,10 @@ export class Record implements RecordModel { * @param options - options to update the record, including the new data * @returns the status of the update request * @throws `Error` if the record has already been deleted. + * + * @beta */ async update(options: RecordUpdateOptions = {}): Promise { - if (this.isDeleted) throw new Error('Operation failed: Attempted to call `update()` on a record that has already been deleted.'); - // Map Record class `dateModified` property to DWN SDK `messageTimestamp`. const { dateModified, ...updateOptions } = options as Partial & RecordUpdateOptions; updateOptions.messageTimestamp = dateModified; @@ -356,8 +380,8 @@ export class Record implements RecordModel { let dataBlob: Blob; if (options.data !== undefined) { - // If `data` is being updated then `dataCid` and `dataSize` must be undefined and the `data` property is passed as - // a top-level property to `agent.processDwnRequest()`. + // If `data` is being updated then `dataCid` and `dataSize` must be undefined and the `data` + // property is passed as a top-level property to `agent.processDwnRequest()`. delete updateMessage.dataCid; delete updateMessage.dataSize; delete updateMessage.data; @@ -416,22 +440,60 @@ export class Record implements RecordModel { } /** - * TODO: Document method. + * Fetches the record's data from the source DWN. + * + * This private method is called when the record data is not available in-memory + * and needs to be fetched from either a local or a remote DWN. + * It makes a read request to the specified DWN and processes the response to provide + * a Node.js `Readable` stream of the record's data. + * + * @param target - The DID of the DWN to fetch the data from. + * @param isRemote - Indicates whether the target DWN is a remote node. + * @returns A Promise that resolves to a Node.js `Readable` stream of the record's data. + * @throws If there is an error while fetching or processing the data from the DWN. + * + * @beta */ - private setDeletedStatus(status: boolean): void { - this.isDeleted = status; - } + private async readRecordData({ target, isRemote }: { target: string, isRemote: boolean }) { + const readRequest = { + author : this.author, + messageOptions : { filter: { recordId: this.id } }, + messageType : DwnInterfaceName.Records + DwnMethodName.Read, + target, + }; - /** - * TODO: Document method. - */ - private static isReadableWebStream(stream) { - // TODO: Improve robustness of the check modeled after node:stream. - return typeof stream._read !== 'function'; + const agentResponsePromise = isRemote ? + this._agent.sendDwnRequest(readRequest) : + this._agent.processDwnRequest(readRequest); + + try { + const { reply: { record }} = await agentResponsePromise; + const dataStream: ReadableStream | Readable = record.data; + // If the data stream is a web ReadableStream, convert it to a Node.js Readable. + const nodeReadable = Stream.isReadableStream(dataStream) ? + NodeStream.fromWebReadable({ readableStream: dataStream }) : + dataStream; + return nodeReadable; + + } catch (error) { + throw new Error(`Error encountered while attempting to read data: ${error.message}`); + } } /** - * TODO: Document method. + * Verifies if the properties to be mutated are mutable. + * + * This private method is used to ensure that only mutable properties of the `Record` instance + * are being changed. It checks whether the properties specified for mutation are among the + * set of properties that are allowed to be modified. If any of the properties to be mutated + * are not in the set of mutable properties, the method throws an error. + * + * @param propertiesToMutate - An iterable of property names that are intended to be mutated. + * @param mutableDescriptorProperties - A set of property names that are allowed to be mutated. + * + * @throws If any of the properties in `propertiesToMutate` are not in `mutableDescriptorProperties`. + * + * @beta */ private static verifyPermittedMutation(propertiesToMutate: Iterable, mutableDescriptorProperties: Set) { for (const property of propertiesToMutate) { diff --git a/packages/api/tests/dwn-api.spec.ts b/packages/api/tests/dwn-api.spec.ts index f6a99b234..d04705f20 100644 --- a/packages/api/tests/dwn-api.spec.ts +++ b/packages/api/tests/dwn-api.spec.ts @@ -812,7 +812,12 @@ describe('DwnApi', () => { expect(writeResult.status.detail).to.equal('Accepted'); expect(writeResult.record).to.exist; - await writeResult.record!.delete(); + // Delete the record + await dwnAlice.records.delete({ + message: { + recordId: writeResult.record!.id + } + }); const result = await dwnAlice.records.read({ message: { diff --git a/packages/api/tests/record.spec.ts b/packages/api/tests/record.spec.ts index 853375028..5e9f70163 100644 --- a/packages/api/tests/record.spec.ts +++ b/packages/api/tests/record.spec.ts @@ -8,14 +8,17 @@ import type { } from '@tbd54566975/dwn-sdk-js'; import chai, { expect } from 'chai'; +import { NodeStream } from '@web5/common'; import chaiAsPromised from 'chai-as-promised'; import { utils as didUtils } from '@web5/dids'; import { TestManagedAgent } from '@web5/agent'; import { + DataStream, DwnConstant, RecordsWrite, DwnMethodName, DwnInterfaceName, + PrivateKeySigner, EncryptionAlgorithm, KeyDerivationScheme, } from '@tbd54566975/dwn-sdk-js'; @@ -35,7 +38,6 @@ chai.use(chaiAsPromised); // NOTE: @noble/secp256k1 requires globalThis.crypto polyfill for node.js <=18: https://github.com/paulmillr/noble-secp256k1/blob/main/README.md#usage // Remove when we move off of node.js v18 to v20, earliest possible time would be Oct 2023: https://github.com/nodejs/release#release-schedule import { webcrypto } from 'node:crypto'; -import { PrivateKeySigner } from '@tbd54566975/dwn-sdk-js'; // @ts-ignore if (!globalThis.crypto) globalThis.crypto = webcrypto; @@ -206,85 +208,493 @@ describe('Record', () => { expect(record.dataFormat).to.equal(dataFormat); }); - describe('data.blob()', () => { - it('returns small data payloads after dwn.records.write()', async () => { - // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched - // with a RecordsRead when record.data.blob() is executed. - const dataJson = TestDataGenerator.randomJson(500); - const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + describe('data', () => { + let dataText500Bytes: string; + let dataTextExceedingMaxSize: string; - // Write the 500B record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataJson }); + before(async () => { + dataText500Bytes = TestDataGenerator.randomString(500); + dataTextExceedingMaxSize = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + }); - expect(status.code).to.equal(202); + describe('data.blob()', () => { + it('returns small data payloads after dwn.records.write()', async () => { + // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched + // with a RecordsRead when record.data.blob() is executed. + const dataJson = TestDataGenerator.randomJson(500); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. - const readDataBlob = await record!.data.blob(); - expect(readDataBlob.size).to.equal(inputDataBytes.length); + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); - // Convert the Blob into an array and ensure it matches the input data byte for byte. - const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); - expect(readDataBytes).to.deep.equal(inputDataBytes); + expect(status.code).to.equal(202); + + // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. + const readDataBlob = await record!.data.blob(); + expect(readDataBlob.size).to.equal(inputDataBytes.length); + + // Convert the Blob into an array and ensure it matches the input data byte for byte. + const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns small data payloads after dwn.records.read()', async () => { + // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched + // with a RecordsRead when record.data.blob() is executed. + const dataJson = TestDataGenerator.randomJson(500); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. + const readDataBlob = await readRecord.data.blob(); + expect(readDataBlob.size).to.equal(inputDataBytes.length); + + // Convert the Blob into an array and ensure it matches the input data byte for byte. + const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after dwn.records.write()', async () => { + // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched + // with a RecordsRead when record.data.blob() is executed. + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + + expect(status.code).to.equal(202); + + // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. + const readDataBlob = await record!.data.blob(); + expect(readDataBlob.size).to.equal(inputDataBytes.length); + + // Convert the Blob into an array and ensure it matches the input data byte for byte. + const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after local dwn.records.query()', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + expect(status.code).to.equal(202); + + // Query for the record that was just created. + const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ + message: { filter: { recordId: record!.id }} + }); + expect(queryRecordStatus.code).to.equal(200); + + // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. + const [ queryRecord ] = queryRecords; + const queriedDataBlob = await queryRecord.data.blob(); + expect(queriedDataBlob.size).to.equal(inputDataBytes.length); + + // Convert the Blob into an array and ensure it matches the input data, byte for byte. + const queriedDataBytes = new Uint8Array(await queriedDataBlob.arrayBuffer()); + expect(queriedDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after local dwn.records.read()', async () => { + // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched + // with a RecordsRead when record.data.blob() is executed. + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + message: { filter: { recordId: record!.id }} + }); + + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. + const readDataBlob = await readRecord.data.blob(); + expect(readDataBlob.size).to.equal(inputDataBytes.length); + + // Convert the Blob into an array and ensure it matches the input data byte for byte. + const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); }); - it('returns small data payloads after dwn.records.read()', async () => { + describe('data.json()', () => { + it('returns small data payloads after dwn.records.write()', async () => { + // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched + // with a RecordsRead when record.data.json() is executed. + const dataJson = TestDataGenerator.randomJson(500); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + + expect(status.code).to.equal(202); + + // Confirm that the size, in bytes, of the data read as JSON matches the original input data. + const readDataJson = await record!.data.json(); + const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + expect(readDataBytes.length).to.equal(inputDataBytes.length); + + // Ensure the JSON returned matches the input data, byte for byte. + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns small data payloads after dwn.records.read()', async () => { // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched - // with a RecordsRead when record.data.blob() is executed. - const dataJson = TestDataGenerator.randomJson(500); - const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + // with a RecordsRead when record.data.json() is executed. + const dataJson = TestDataGenerator.randomJson(500); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - // Write the 500B record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataJson }); + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); - expect(status.code).to.equal(202); + expect(status.code).to.equal(202); - // Read the record that was just created. - const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); - expect(readRecordStatus.code).to.equal(200); + expect(readRecordStatus.code).to.equal(200); - // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. - const readDataBlob = await readRecord.data.blob(); - expect(readDataBlob.size).to.equal(inputDataBytes.length); + // Confirm that the size, in bytes, of the data read as JSON matches the original input data. + const readDataJson = await readRecord!.data.json(); + const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - // Convert the Blob into an array and ensure it matches the input data byte for byte. - const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); - expect(readDataBytes).to.deep.equal(inputDataBytes); + // Ensure the JSON returned matches the input data, byte for byte. + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after dwn.records.write()', async () => { + // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched + // with a RecordsRead when record.data.json() is executed. + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + + expect(status.code).to.equal(202); + + // Confirm that the size, in bytes, of the data read as JSON matches the original input data. + const readDataJson = await record!.data.json(); + const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + expect(readDataBytes.length).to.equal(inputDataBytes.length); + + // Ensure the JSON returned matches the input data, byte for byte. + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after local dwn.records.query()', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.json() is executed. */ + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + expect(status.code).to.equal(202); + + // Query for the record that was just created. + const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ + message: { filter: { recordId: record!.id }} + }); + expect(queryRecordStatus.code).to.equal(200); + + // Confirm that the size, in bytes, of the data read as JSON matches the original input data. + const [ queryRecord ] = queryRecords; + const queriedDataBlob = await queryRecord!.data.json(); + + // Convert the JSON to bytes and ensure it matches the input data, byte for byte. + const queriedDataBytes = new TextEncoder().encode(JSON.stringify(queriedDataBlob)); + expect(queriedDataBytes.length).to.equal(inputDataBytes.length); + expect(queriedDataBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after local dwn.records.read()', async () => { + // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched + // with a RecordsRead when record.data.json() is executed. + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson }); + + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + message: { filter: { recordId: record!.id }} + }); + + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the size, in bytes, of the data read as JSON matches the original input data. + const readDataJson = await readRecord!.data.json(); + const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + expect(readDataBytes.length).to.equal(inputDataBytes.length); + + // Ensure the JSON returned matches the input data, byte for byte. + expect(readDataBytes).to.deep.equal(inputDataBytes); + }); }); - it('returns large data payloads after dwn.records.write()', async () => { - // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched - // with a RecordsRead when record.data.blob() is executed. - const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); - const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + describe('data.stream()', () => { + it('returns small data payloads after dwn.records.write()', async () => { + // Use a data payload that is less than the encoded data limit to ensure that the data will + // not have to be fetched with a RecordsRead when record.data.text() is executed. + const inputDataBytes = new TextEncoder().encode(dataText500Bytes); - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataJson }); + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText500Bytes }); + expect(status.code).to.equal(202); - expect(status.code).to.equal(202); + // Confirm that the length of the data read as text matches the original input data. + const dataStream = await record!.data.stream(); + const dataStreamBytes = await DataStream.toBytes(dataStream); + expect(dataStreamBytes.length).to.equal(dataText500Bytes.length); - // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. - const readDataBlob = await record!.data.blob(); - expect(readDataBlob.size).to.equal(inputDataBytes.length); + // Ensure the text returned matches the input data, byte for byte. + expect(dataStreamBytes).to.deep.equal(inputDataBytes); + }); - // Convert the Blob into an array and ensure it matches the input data byte for byte. - const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); - expect(readDataBytes).to.deep.equal(inputDataBytes); + it('returns small data payloads after dwn.records.read()', async () => { + // Use a data payload that is less than the encoded data limit to ensure that the data will + // not have to be fetched with a RecordsRead when record.data.text() is executed. + const inputDataBytes = new TextEncoder().encode(dataText500Bytes); + + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText500Bytes }); + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the length of the data read as text matches the original input data. + const dataStream = await readRecord!.data.stream(); + const dataStreamBytes = await DataStream.toBytes(dataStream); + expect(dataStreamBytes.length).to.equal(dataText500Bytes.length); + + // Ensure the text returned matches the input data, byte for byte. + expect(dataStreamBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after dwn.records.write()', async () => { + // Use a data payload that exceeds the DWN encoded data limit to ensure that the data will + // have to be fetched with a RecordsRead when record.data.text() is executed. + const inputDataBytes = new TextEncoder().encode(dataTextExceedingMaxSize); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataTextExceedingMaxSize }); + expect(status.code).to.equal(202); + + // Confirm that the length of the data read as text matches the original input data. + const dataStream = await record!.data.stream(); + const dataStreamBytes = await DataStream.toBytes(dataStream); + expect(dataStreamBytes.length).to.equal(dataTextExceedingMaxSize.length); + + // Ensure the text returned matches the input data, byte for byte. + expect(dataStreamBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after local dwn.records.query()', async () => { + // Use a data payload that exceeds the DWN encoded data limit to ensure that the data will + // have to be fetched with a RecordsRead when record.data.text() is executed. + const inputDataBytes = new TextEncoder().encode(dataTextExceedingMaxSize); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataTextExceedingMaxSize }); + expect(status.code).to.equal(202); + + // Query for the record that was just created. + const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ + message: { filter: { recordId: record!.id }} + }); + expect(queryRecordStatus.code).to.equal(200); + + // Confirm that the length of the data read as text matches the original input data. + const [ queryRecord ] = queryRecords; + const dataStream = await queryRecord!.data.stream(); + const dataStreamBytes = await DataStream.toBytes(dataStream); + expect(dataStreamBytes.length).to.equal(dataTextExceedingMaxSize.length); + + // Ensure the text returned matches the input data, byte for byte. + expect(dataStreamBytes).to.deep.equal(inputDataBytes); + }); + + it('returns large data payloads after local dwn.records.read()', async () => { + // Use a data payload that exceeds the DWN encoded data limit to ensure that the data will + // have to be fetched with a RecordsRead when record.data.text() is executed. + const inputDataBytes = new TextEncoder().encode(dataTextExceedingMaxSize); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataTextExceedingMaxSize }); + + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + message: { filter: { recordId: record!.id }} + }); + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the length of the data read as text matches the original input data. + const dataStream = await readRecord!.data.stream(); + const dataStreamBytes = await DataStream.toBytes(dataStream); + expect(dataStreamBytes.length).to.equal(dataTextExceedingMaxSize.length); + + // Ensure the text returned matches the input data, byte for byte. + expect(dataStreamBytes).to.deep.equal(inputDataBytes); + }); + }); + + describe('data.text()', () => { + it('returns small data payloads after dwn.records.write()', async () => { + // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched + // with a RecordsRead when record.data.text() is executed. + const dataText = TestDataGenerator.randomString(500); + + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText }); + + expect(status.code).to.equal(202); + + // Confirm that the length of the data read as text matches the original input data. + const readDataText = await record!.data.text(); + expect(readDataText.length).to.equal(dataText.length); + + // Ensure the text returned matches the input data, char for char. + expect(readDataText).to.deep.equal(dataText); + }); + + it('returns small data payloads after dwn.records.read()', async () => { + // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched + // with a RecordsRead when record.data.text() is executed. + const dataText = TestDataGenerator.randomString(500); + + // Write the 500B record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText }); + + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the length of the data read as text matches the original input data. + const readDataText = await readRecord!.data.text(); + expect(readDataText.length).to.equal(dataText.length); + + // Ensure the text returned matches the input data, char for char. + expect(readDataText).to.deep.equal(dataText); + }); + + it('returns large data payloads after dwn.records.write()', async () => { + // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched + // with a RecordsRead when record.data.text() is executed. + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText }); + + expect(status.code).to.equal(202); + + // Confirm that the length of the data read as text matches the original input data. + const readDataText = await record!.data.text(); + expect(readDataText.length).to.equal(dataText.length); + + // Ensure the text returned matches the input data, char for char. + expect(readDataText).to.deep.equal(dataText); + }); + + it('returns large data payloads after local dwn.records.query()', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText }); + expect(status.code).to.equal(202); + + // Query for the record that was just created. + const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ + message: { filter: { recordId: record!.id }} + }); + expect(queryRecordStatus.code).to.equal(200); + + // Confirm that the length of the data read as text matches the original input data. + const [ queryRecord ] = queryRecords; + const queriedDataText = await queryRecord!.data.text(); + expect(queriedDataText.length).to.equal(dataText.length); + + // Ensure the text returned matches the input data, char for char. + expect(queriedDataText).to.deep.equal(dataText); + }); + + it('returns large data payloads after local dwn.records.read()', async () => { + // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched + // with a RecordsRead when record.data.text() is executed. + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Write the large record to agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataText }); + + expect(status.code).to.equal(202); + + // Read the record that was just created. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + message: { filter: { recordId: record!.id }} + }); + + expect(readRecordStatus.code).to.equal(200); + + // Confirm that the length of the data read as text matches the original input data. + const readDataText = await readRecord!.data.text(); + expect(readDataText.length).to.equal(dataText.length); + + // Ensure the text returned matches the input data, char for char. + expect(readDataText).to.deep.equal(dataText); + }); }); - it('returns large data payloads after dwn.records.query()', async () => { + it('returns large data payloads after remote dwn.records.query()', async () => { /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to - * be fetched with a RecordsRead when record.data.blob() is executed. */ + * be fetched with a RecordsRead when record.data.* is executed. */ const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataJson }); + // Create a large record but do NOT store it on the local, agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson, store: false }); expect(status.code).to.equal(202); - // Query for the record that was just created. + // Write the large record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + // Query for the record that was just created on the remote DWN. const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ - message: { filter: { recordId: record!.id }} + from : alice.did, + message : { filter: { recordId: record!.id }} }); expect(queryRecordStatus.code).to.equal(200); @@ -292,26 +702,27 @@ describe('Record', () => { const [ queryRecord ] = queryRecords; const queriedDataBlob = await queryRecord.data.blob(); expect(queriedDataBlob.size).to.equal(inputDataBytes.length); - - // Convert the Blob into an array and ensure it matches the input data, byte for byte. - const queriedDataBytes = new Uint8Array(await queriedDataBlob.arrayBuffer()); - expect(queriedDataBytes).to.deep.equal(inputDataBytes); }); - it('returns large data payloads after dwn.records.read()', async () => { - // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched - // with a RecordsRead when record.data.blob() is executed. + it('returns large data payloads after remote dwn.records.read()', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.* is executed. */ const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataJson }); - + // Create a large record but do NOT store it on the local, agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson, store: false }); expect(status.code).to.equal(202); - // Read the record that was just created. - const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + // Write the large record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + // Read the record that was just created on the remote DWN. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + from : alice.did, + message : { filter: { recordId: record!.id }} + }); expect(readRecordStatus.code).to.equal(200); // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. @@ -322,278 +733,259 @@ describe('Record', () => { const readDataBytes = new Uint8Array(await readDataBlob.arrayBuffer()); expect(readDataBytes).to.deep.equal(inputDataBytes); }); - }); - describe('data.json()', () => { - it('returns small data payloads after dwn.records.write()', async () => { - // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched - // with a RecordsRead when record.data.json() is executed. - const dataJson = TestDataGenerator.randomJson(500); + it('returns small data payloads repeatedly after dwn.records.write()', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.* is executed. */ + const dataJson = TestDataGenerator.randomJson(100_000); const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); // Write the 500B record to agent-connected DWN. const { record, status } = await dwn.records.write({ data: dataJson }); - expect(status.code).to.equal(202); - // Confirm that the size, in bytes, of the data read as JSON matches the original input data. - const readDataJson = await record!.data.json(); - const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); - expect(readDataBytes.length).to.equal(inputDataBytes.length); + // Read the data payload as bytes. + let readDataBytes = await record!.data.bytes(); + // Ensure the JSON returned matches the input data, byte for byte. + expect(inputDataBytes).to.deep.equal(readDataBytes); + // Read the data payload a second time. + readDataBytes = await record!.data.bytes(); // Ensure the JSON returned matches the input data, byte for byte. - expect(readDataBytes).to.deep.equal(inputDataBytes); + expect(inputDataBytes).to.deep.equal(readDataBytes); + + // Read the data payload a third time. + readDataBytes = await record!.data.bytes(); + // Ensure the JSON returned matches the input data, byte for byte. + expect(inputDataBytes).to.deep.equal(readDataBytes); }); - it('returns small data payloads after dwn.records.read()', async () => { - // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched - // with a RecordsRead when record.data.json() is executed. - const dataJson = TestDataGenerator.randomJson(500); + it('returns large data payloads repeatedly after dwn.records.write()', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.* is executed. */ + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 25000); const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - // Write the 500B record to agent-connected DWN. + // Write the large record to agent-connected DWN. const { record, status } = await dwn.records.write({ data: dataJson }); - expect(status.code).to.equal(202); - // Read the record that was just created. - const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); - - expect(readRecordStatus.code).to.equal(200); - // Confirm that the size, in bytes, of the data read as JSON matches the original input data. - const readDataJson = await readRecord!.data.json(); - const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + let readDataJson = await record!.data.json(); + let readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); expect(readDataBytes.length).to.equal(inputDataBytes.length); // Ensure the JSON returned matches the input data, byte for byte. expect(readDataBytes).to.deep.equal(inputDataBytes); - }); - it('returns large data payloads after dwn.records.write()', async () => { - // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched - // with a RecordsRead when record.data.json() is executed. - const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); - const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataJson }); + // Attempt to read the record again. + readDataJson = await record!.data.json(); + readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - expect(status.code).to.equal(202); + // Ensure the JSON returned matches the input data, byte for byte. + expect(readDataBytes).to.deep.equal(inputDataBytes); - // Confirm that the size, in bytes, of the data read as JSON matches the original input data. - const readDataJson = await record!.data.json(); - const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + // Attempt to read the record again. + readDataJson = await record!.data.json(); + readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); expect(readDataBytes.length).to.equal(inputDataBytes.length); // Ensure the JSON returned matches the input data, byte for byte. expect(readDataBytes).to.deep.equal(inputDataBytes); }); - it('returns large data payloads after dwn.records.query()', async () => { - /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to - * be fetched with a RecordsRead when record.data.json() is executed. */ - const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + it('allows small data payloads written locally to be consumed as a stream repeatedly', async () => { + /** Generate data that is less than the encoded data limit to ensure that the data will not + * have to be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataJson = TestDataGenerator.randomJson(1000); const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); // Write the large record to agent-connected DWN. const { record, status } = await dwn.records.write({ data: dataJson }); expect(status.code).to.equal(202); - // Query for the record that was just created. - const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ - message: { filter: { recordId: record!.id }} - }); - expect(queryRecordStatus.code).to.equal(200); - - // Confirm that the size, in bytes, of the data read as JSON matches the original input data. - const [ queryRecord ] = queryRecords; - const queriedDataBlob = await queryRecord!.data.json(); + // Consume the data stream as bytes. + let readDataStream = await record!.data.stream(); + let readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - // Convert the JSON to bytes and ensure it matches the input data, byte for byte. - const queriedDataBytes = new TextEncoder().encode(JSON.stringify(queriedDataBlob)); - expect(queriedDataBytes.length).to.equal(inputDataBytes.length); - expect(queriedDataBytes).to.deep.equal(inputDataBytes); + // Consume the data stream as bytes a second time. + readDataStream = await record!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); }); - it('returns large data payloads after dwn.records.read()', async () => { - // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched - // with a RecordsRead when record.data.json() is executed. + it('allows large data payloads written locally to be consumed as a stream repeatedly', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.* is executed. */ const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); // Write the large record to agent-connected DWN. const { record, status } = await dwn.records.write({ data: dataJson }); - expect(status.code).to.equal(202); - // Read the record that was just created. - const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); - - expect(readRecordStatus.code).to.equal(200); - - // Confirm that the size, in bytes, of the data read as JSON matches the original input data. - const readDataJson = await readRecord!.data.json(); - const readDataBytes = new TextEncoder().encode(JSON.stringify(readDataJson)); + // Consume the data stream as bytes. + let readDataStream = await record!.data.stream(); + let readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); expect(readDataBytes.length).to.equal(inputDataBytes.length); - // Ensure the JSON returned matches the input data, byte for byte. - expect(readDataBytes).to.deep.equal(inputDataBytes); - }); - }); - - describe('data.text()', () => { - it('returns small data payloads after dwn.records.write()', async () => { - // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched - // with a RecordsRead when record.data.text() is executed. - const dataText = TestDataGenerator.randomString(500); - - // Write the 500B record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataText }); - - expect(status.code).to.equal(202); - - // Confirm that the length of the data read as text matches the original input data. - const readDataText = await record!.data.text(); - expect(readDataText.length).to.equal(dataText.length); - - // Ensure the text returned matches the input data, char for char. - expect(readDataText).to.deep.equal(dataText); + // Consume the data stream as bytes a second time. + readDataStream = await record!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); }); - it('returns small data payloads after dwn.records.read()', async () => { - // Generate data that is less than the encoded data limit to ensure that the data will not have to be fetched - // with a RecordsRead when record.data.text() is executed. - const dataText = TestDataGenerator.randomString(500); - - // Write the 500B record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataText }); + it('allows small data payloads read from a remote to be consumed as a stream repeatedly', async () => { + /** Generate data that is less than the encoded data limit to ensure that the data will not + * have to be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataJson = TestDataGenerator.randomJson(1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + // Create a large record but do NOT store it on the local, agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson, store: false }); expect(status.code).to.equal(202); - // Read the record that was just created. - const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + // Write the large record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + // Read the record that was just created on the remote DWN. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + from : alice.did, + message : { filter: { recordId: record!.id }} + }); expect(readRecordStatus.code).to.equal(200); - // Confirm that the length of the data read as text matches the original input data. - const readDataText = await readRecord!.data.text(); - expect(readDataText.length).to.equal(dataText.length); - - // Ensure the text returned matches the input data, char for char. - expect(readDataText).to.deep.equal(dataText); - }); - - it('returns large data payloads after dwn.records.write()', async () => { - // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched - // with a RecordsRead when record.data.text() is executed. - const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); - - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataText }); - - expect(status.code).to.equal(202); + // Confirm that the size, in bytes, of the data read as a Blob matches the original input data. + const readDataBlob = await readRecord.data.blob(); + expect(readDataBlob.size).to.equal(inputDataBytes.length); - // Confirm that the length of the data read as text matches the original input data. - const readDataText = await record!.data.text(); - expect(readDataText.length).to.equal(dataText.length); + // Confirm that the size, in bytes, of the data read as JSON matches the original input data. + let readDataStream = await readRecord!.data.stream(); + let readDataBytes = await DataStream.toBytes(readDataStream); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - // Ensure the text returned matches the input data, char for char. - expect(readDataText).to.deep.equal(dataText); + // Consume the data stream as bytes a third time. + readDataStream = await readRecord!.data.stream(); + readDataBytes = await DataStream.toBytes(readDataStream); + expect(readDataBytes.length).to.equal(inputDataBytes.length); }); - it('returns large data payloads after dwn.records.query()', async () => { + it('allows large data payloads read from a remote to be consumed as a stream repeatedly', async () => { /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to - * be fetched with a RecordsRead when record.data.blob() is executed. */ - const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + * be fetched with a RecordsRead when record.data.* is executed. */ + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataText }); + // Create a large record but do NOT store it on the local, agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson, store: false }); expect(status.code).to.equal(202); - // Query for the record that was just created. - const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ - message: { filter: { recordId: record!.id }} + // Write the large record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + // Read the record that was just created on the remote DWN. + const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ + from : alice.did, + message : { filter: { recordId: record!.id }} }); - expect(queryRecordStatus.code).to.equal(200); + expect(readRecordStatus.code).to.equal(200); - // Confirm that the length of the data read as text matches the original input data. - const [ queryRecord ] = queryRecords; - const queriedDataText = await queryRecord!.data.text(); - expect(queriedDataText.length).to.equal(dataText.length); + // Consume the data stream as bytes. + let readDataStream = await readRecord!.data.stream(); + let readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - // Ensure the text returned matches the input data, char for char. - expect(queriedDataText).to.deep.equal(dataText); - }); + // Consume the data stream as bytes a second time. + readDataStream = await record!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - it('returns large data payloads after dwn.records.read()', async () => { - // Generate data that exceeds the DWN encoded data limit to ensure that the data will have to be fetched - // with a RecordsRead when record.data.text() is executed. - const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + // Consume the data stream as bytes a third time. + readDataStream = await record!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); + }); - // Write the large record to agent-connected DWN. - const { record, status } = await dwn.records.write({ data: dataText }); + it('allows small data payloads queried from a remote to be consumed as a stream repeatedly', async () => { + /** Generate data that is less than the encoded data limit to ensure that the data will not + * have to be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataJson = TestDataGenerator.randomJson(1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + // Create a large record but do NOT store it on the local, agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson, store: false }); expect(status.code).to.equal(202); - // Read the record that was just created. - const { record: readRecord, status: readRecordStatus } = await dwn.records.read({ message: { filter: { recordId: record!.id }}}); + // Write the large record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); - expect(readRecordStatus.code).to.equal(200); + // Read the record that was just created on the remote DWN. + const { records: queriedRecords, status: queriedRecordStatus } = await dwn.records.query({ + from : alice.did, + message : { filter: { recordId: record!.id }} + }); + expect(queriedRecordStatus.code).to.equal(200); + + const [ queriedRecord ] = queriedRecords; - // Confirm that the length of the data read as text matches the original input data. - const readDataText = await readRecord!.data.text(); - expect(readDataText.length).to.equal(dataText.length); + // Consume the data stream as bytes. + let readDataStream = await queriedRecord!.data.stream(); + let readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); + + // Consume the data stream as bytes a second time. + readDataStream = await queriedRecord!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - // Ensure the text returned matches the input data, char for char. - expect(readDataText).to.deep.equal(dataText); + // Consume the data stream as bytes a third time. + readDataStream = await queriedRecord!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); }); - }); - describe('delete()', () => { - it('deletes the record', async () => { - const { status, record } = await dwn.records.write({ - data : 'Hello, world!', - message : { - schema : 'foo/bar', - dataFormat : 'text/plain' - } - }); + it('allows large data payloads queried from a remote to be consumed as a stream repeatedly', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.* is executed. */ + const dataJson = TestDataGenerator.randomJson(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + const inputDataBytes = new TextEncoder().encode(JSON.stringify(dataJson)); + // Create a large record but do NOT store it on the local, agent-connected DWN. + const { record, status } = await dwn.records.write({ data: dataJson, store: false }); expect(status.code).to.equal(202); - expect(record).to.not.be.undefined; - const deleteResult = await record!.delete(); - expect(deleteResult.status.code).to.equal(202); + // Write the large record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); - const queryResult = await dwn.records.query({ - message: { - filter: { - recordId: record!.id - } - } + // Query for the record that was just created on the remote DWN. + const { records: queriedRecords, status: queriedRecordStatus } = await dwn.records.query({ + from : alice.did, + message : { filter: { recordId: record!.id }} }); + expect(queriedRecordStatus.code).to.equal(200); - expect(queryResult.status.code).to.equal(200); - expect(queryResult.records!.length).to.equal(0); - }); - - it('throws an exception when delete is called twice', async () => { - const { status, record } = await dwn.records.write({ - data : 'Hello, world!', - message : { - schema : 'foo/bar', - dataFormat : 'text/plain' - } - }); + const [ queriedRecord ] = queriedRecords; - expect(status.code).to.equal(202); - expect(record).to.not.be.undefined; + // Consume the data stream as bytes. + let readDataStream = await queriedRecord!.data.stream(); + let readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - let deleteResult = await record!.delete(); - expect(deleteResult.status.code).to.equal(202); + // Consume the data stream as bytes a second time. + readDataStream = await queriedRecord!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); - await expect(record!.delete()).to.eventually.be.rejectedWith('Operation failed'); + // Consume the data stream as bytes a third time. + readDataStream = await queriedRecord!.data.stream(); + readDataBytes = await NodeStream.consumeToBytes({ readable: readDataStream }); + expect(readDataBytes.length).to.equal(inputDataBytes.length); }); }); @@ -610,7 +1002,7 @@ describe('Record', () => { }); }); - it('writes records to remote DWNs for your own DID', async () => { + it('writes small records to remote DWNs for your own DID', async () => { const dataString = 'Hello, world!'; // Alice writes a message to her agent connected DWN. @@ -659,6 +1051,69 @@ describe('Record', () => { expect(await aliceRemoteEmailRecord.data.text()).to.equal(dataString); }); + it('writes large records to remote DWNs that were initially queried from a local DWN', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.send() is executed. */ + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Alice writes a message to her agent connected DWN. + const { status: aliceEmailStatus } = await dwn.records.write({ + data : dataText, + message : { + schema: 'email', + } + }); + expect(aliceEmailStatus.code).to.equal(202); + + // Query Alice's local, agent connected DWN for `email` schema records. + const aliceAgentQueryResult = await dwn.records.query({ + message: { + filter: { + schema: 'email' + } + } + }); + + expect(aliceAgentQueryResult.status.code).to.equal(200); + expect(aliceAgentQueryResult!.records).to.have.length(1); + const [ aliceAgentEmailRecord ] = aliceAgentQueryResult!.records!; + + // Attempt to write the record to Alice's remote DWN. + const { status } = await aliceAgentEmailRecord!.send(alice.did); + expect(status.code).to.equal(202); + }); + + it('writes large records to remote DWNs that were initially read from a local DWN', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.send() is executed. */ + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Alice writes a message to her agent connected DWN. + const { status: aliceEmailStatus, record: aliceEmailRecord } = await dwn.records.write({ + data : dataText, + message : { + schema: 'email', + } + }); + expect(aliceEmailStatus.code).to.equal(202); + + // Read from Alice's local, agent connected DWN for the record that was just created. + const aliceAgentReadResult = await dwn.records.read({ + message: { + filter: { + recordId: aliceEmailRecord.id + } + } + }); + + expect(aliceAgentReadResult.status.code).to.equal(200); + expect(aliceAgentReadResult.record).to.exist; + + // Attempt to write the record to Alice's remote DWN. + const { status } = await aliceAgentReadResult.record.send(alice.did); + expect(status.code).to.equal(202); + }); + it('writes updated records to a remote DWN', async () => { /** * NOTE: The issue that this test was added to cover was intermittently failing the first @@ -699,6 +1154,180 @@ describe('Record', () => { expect(sendResult.status.code).to.equal(202); }); + // TODO: Fix after changes are made to dwn-sdk-js to include the initial write in every query/read response. + it('fails to write updated records to a remote DWN that is missing the initial write', async () => { + // Alice writes a message to her agent connected DWN. + const { status, record } = await dwn.records.write({ + data : 'Hello, world!', + message : { + schema : 'foo/bar', + dataFormat : 'text/plain' + } + }); + expect(status.code).to.equal(202); + + // Update the record by mutating the data property. + const updateResult = await record!.update({ data: 'hi' }); + expect(updateResult.status.code).to.equal(202); + + // Write the updated record to Alice's remote DWN a second time. + const sendResult = await record!.send(alice.did); + expect(sendResult.status.code).to.equal(400); + expect(sendResult.status.detail).to.equal('RecordsWriteGetInitialWriteNotFound: initial write is not found'); + + // TODO: Uncomment the following line after changes are made to dwn-sdk-js to include the initial write in every query/read response. + // expect(sendResult.status.code).to.equal(202); + }); + + it('writes large records to remote DWNs that were initially queried from a remote DWN', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Install the email protocol for Alice's local DWN. + let { protocol: aliceProtocol, status: aliceStatus } = await dwn.protocols.configure({ + message: { definition: emailProtocolDefinition } + }); + expect(aliceStatus.code).to.equal(202); + expect(aliceProtocol).to.exist; + + // Install the email protocol for Alice's remote DWN. + const { status: alicePushStatus } = await aliceProtocol!.send(alice.did); + expect(alicePushStatus.code).to.equal(202); + + // Instantiate DwnApi instance for Bob. + const bobDwn = new DwnApi({ agent: testAgent.agent, connectedDid: bob.did }); + + // Install the email protocol for Bob's local DWN. + const { protocol: bobProtocol, status: bobStatus } = await bobDwn.protocols.configure({ + message: { + definition: emailProtocolDefinition + } + }); + + expect(bobStatus.code).to.equal(202); + expect(bobProtocol).to.exist; + + // Install the email protocol for Bob's remote DWN. + const { status: bobPushStatus } = await bobProtocol!.send(bob.did); + expect(bobPushStatus.code).to.equal(202); + + // Alice creates a new large record but does not store it in her local DWN. + const { status: aliceEmailStatus, record: aliceEmailRecord } = await dwn.records.write({ + store : false, + data : dataText, + message : { + protocol : emailProtocolDefinition.protocol, + protocolPath : 'email', + schema : 'http://email-protocol.xyz/schema/email', + } + }); + expect(aliceEmailStatus.code).to.equal(202); + + // Alice writes the large record to her own remote DWN. + const { status: sendStatus } = await aliceEmailRecord!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + // Alice queries for the record that was just created on her remote DWN. + const { records: queryRecords, status: queryRecordStatus } = await dwn.records.query({ + from : alice.did, + message : { filter: { recordId: aliceEmailRecord!.id }} + }); + expect(queryRecordStatus.code).to.equal(200); + + // Attempt to write the record to Bob's DWN. + const [ queryRecord ] = queryRecords; + const { status } = await queryRecord!.send(bob.did); + expect(status.code).to.equal(202); + + // Confirm Bob can query his own remote DWN for the created record. + const bobQueryResult = await bobDwn.records.query({ + from : bob.did, + message : { + filter: { + schema: 'http://email-protocol.xyz/schema/email' + } + } + }); + expect(bobQueryResult.status.code).to.equal(200); + expect(bobQueryResult.records).to.exist; + expect(bobQueryResult.records!.length).to.equal(1); + }); + + it('writes large records to remote DWNs that were initially read from a remote DWN', async () => { + /** Generate data that exceeds the DWN encoded data limit to ensure that the data will have to + * be fetched with a RecordsRead when record.data.blob() is executed. */ + const dataText = TestDataGenerator.randomString(DwnConstant.maxDataSizeAllowedToBeEncoded + 1000); + + // Install the email protocol for Alice's local DWN. + let { protocol: aliceProtocol, status: aliceStatus } = await dwn.protocols.configure({ + message: { definition: emailProtocolDefinition } + }); + expect(aliceStatus.code).to.equal(202); + expect(aliceProtocol).to.exist; + + // Install the email protocol for Alice's remote DWN. + const { status: alicePushStatus } = await aliceProtocol!.send(alice.did); + expect(alicePushStatus.code).to.equal(202); + + // Instantiate DwnApi instance for Bob. + const bobDwn = new DwnApi({ agent: testAgent.agent, connectedDid: bob.did }); + + // Install the email protocol for Bob's local DWN. + const { protocol: bobProtocol, status: bobStatus } = await bobDwn.protocols.configure({ + message: { + definition: emailProtocolDefinition + } + }); + + expect(bobStatus.code).to.equal(202); + expect(bobProtocol).to.exist; + + // Install the email protocol for Bob's remote DWN. + const { status: bobPushStatus } = await bobProtocol!.send(bob.did); + expect(bobPushStatus.code).to.equal(202); + + // Alice creates a new large record but does not store it in her local DWN. + const { status: aliceEmailStatus, record: aliceEmailRecord } = await dwn.records.write({ + store : false, + data : dataText, + message : { + protocol : emailProtocolDefinition.protocol, + protocolPath : 'email', + schema : 'http://email-protocol.xyz/schema/email', + } + }); + expect(aliceEmailStatus.code).to.equal(202); + + // Alice writes the large record to her own remote DWN. + const { status: sendStatus } = await aliceEmailRecord!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + // Alice queries for the record that was just created on her remote DWN. + const { record: queryRecord, status: queryRecordStatus } = await dwn.records.read({ + from : alice.did, + message : { filter: { recordId: aliceEmailRecord!.id }} + }); + expect(queryRecordStatus.code).to.equal(200); + + // Attempt to write the record to Bob's DWN. + const { status } = await queryRecord!.send(bob.did); + expect(status.code).to.equal(202); + + // Confirm Bob can query his own remote DWN for the created record. + const bobQueryResult = await bobDwn.records.query({ + from : bob.did, + message : { + filter: { + schema: 'http://email-protocol.xyz/schema/email' + } + } + }); + expect(bobQueryResult.status.code).to.equal(200); + expect(bobQueryResult.records).to.exist; + expect(bobQueryResult.records!.length).to.equal(1); + }); + it(`writes records to remote DWNs for someone else's DID`, async () => { const dataString = 'Hello, world!'; @@ -767,8 +1396,8 @@ describe('Record', () => { }); describe('with store: false', () => { - it('writes records to your own remote DWN but not your agent DWN', async () => { - // Alice writes a message to her agent DWN with `store: false`. + it('writes records to your own remote DWN but not your local DWN', async () => { + // Alice creates a record but does not store it on her local DWN with `store: false`. const dataString = 'Hello, world!'; const writeResult = await dwn.records.write({ store : false, @@ -1091,7 +1720,7 @@ describe('Record', () => { }); describe('update()', () => { - it('updates a record', async () => { + it('updates a local record on the local DWN', async () => { const { status, record } = await dwn.records.write({ data : 'Hello, world!', message : { @@ -1126,6 +1755,180 @@ describe('Record', () => { expect(updatedData).to.equal('bye'); }); + // TODO: Fix after changes are made to dwn-sdk-js to include the initial write in every query/read response. + it('fails to update a record locally that only written to a remote DWN', async () => { + // Create a record but do not store it on the local DWN. + const { status, record } = await dwn.records.write({ + store : false, + data : 'Hello, world!', + message : { + schema : 'foo/bar', + dataFormat : 'text/plain' + } + }); + expect(status.code).to.equal(202); + expect(record).to.not.be.undefined; + + // Store the data CID of the record before it is updated. + // const dataCidBeforeDataUpdate = record!.dataCid; + + // Write the record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + /** Attempt to update the record, which should write the updated record the local DWN but + * instead fails due to a missing initial write. */ + const updateResult = await record!.update({ data: 'bye' }); + expect(updateResult.status.code).to.equal(400); + expect(updateResult.status.detail).to.equal('RecordsWriteGetInitialWriteNotFound: initial write is not found'); + + // TODO: Uncomment these lines after the issue mentioned above is fixed. + // expect(updateResult.status.code).to.equal(202); + + // Confirm that the record was written to the local DWN. + // const readResult = await dwn.records.read({ + // message: { + // filter: { + // recordId: record!.id + // } + // } + // }); + // expect(readResult.status.code).to.equal(200); + // expect(readResult.record).to.not.be.undefined; + + // Confirm that the data CID of the record was updated. + // expect(readResult.record.dataCid).to.not.equal(dataCidBeforeDataUpdate); + // expect(readResult.record.dataCid).to.equal(record!.dataCid); + + // Confirm that the data payload of the record was modified. + // const updatedData = await record!.data.text(); + // expect(updatedData).to.equal('bye'); + }); + + // TODO: Fix after changes are made to dwn-sdk-js to include the initial write in every query/read response. + it('fails to update a record locally that was initially read from a remote DWN', async () => { + // Create a record but do not store it on the local DWN. + const { status, record } = await dwn.records.write({ + store : false, + data : 'Hello, world!', + message : { + schema : 'foo/bar', + dataFormat : 'text/plain' + } + }); + expect(status.code).to.equal(202); + expect(record).to.not.be.undefined; + + // Store the data CID of the record before it is updated. + // const dataCidBeforeDataUpdate = record!.dataCid; + + // Write the record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + // Read the record from the remote DWN. + const readResult = await dwn.records.read({ + from : alice.did, + message : { + filter: { + recordId: record!.id + } + } + }); + expect(readResult.status.code).to.equal(200); + expect(readResult.record).to.not.be.undefined; + + // Attempt to update the record, which should write the updated record the local DWN. + const updateResult = await readResult.record!.update({ data: 'bye' }); + expect(updateResult.status.code).to.equal(400); + expect(updateResult.status.detail).to.equal('RecordsWriteGetInitialWriteNotFound: initial write is not found'); + + // TODO: Uncomment these lines after the issue mentioned above is fixed. + // expect(updateResult.status.code).to.equal(202); + + // Confirm that the record was written to the local DWN. + // const readResult = await dwn.records.read({ + // message: { + // filter: { + // recordId: record!.id + // } + // } + // }); + // expect(readResult.status.code).to.equal(200); + // expect(readResult.record).to.not.be.undefined; + + // Confirm that the data CID of the record was updated. + // expect(readResult.record.dataCid).to.not.equal(dataCidBeforeDataUpdate); + // expect(readResult.record.dataCid).to.equal(record!.dataCid); + + // Confirm that the data payload of the record was modified. + // const updatedData = await record!.data.text(); + // expect(updatedData).to.equal('bye'); + }); + + // TODO: Fix after changes are made to dwn-sdk-js to include the initial write in every query/read response. + it('fails to update a record locally that was initially queried from a remote DWN', async () => { + // Create a record but do not store it on the local DWN. + const { status, record } = await dwn.records.write({ + store : false, + data : 'Hello, world!', + message : { + schema : 'foo/bar', + dataFormat : 'text/plain' + } + }); + expect(status.code).to.equal(202); + expect(record).to.not.be.undefined; + + // Store the data CID of the record before it is updated. + // const dataCidBeforeDataUpdate = record!.dataCid; + + // Write the record to a remote DWN. + const { status: sendStatus } = await record!.send(alice.did); + expect(sendStatus.code).to.equal(202); + + // Query the record from the remote DWN. + const queryResult = await dwn.records.query({ + from : alice.did, + message : { + filter: { + recordId: record!.id + } + } + }); + expect(queryResult.status.code).to.equal(200); + expect(queryResult.records).to.not.be.undefined; + expect(queryResult.records.length).to.equal(1); + + // Attempt to update the queried record, which should write the updated record the local DWN. + const [ queriedRecord ] = queryResult.records; + const updateResult = await queriedRecord!.update({ data: 'bye' }); + expect(updateResult.status.code).to.equal(400); + expect(updateResult.status.detail).to.equal('RecordsWriteGetInitialWriteNotFound: initial write is not found'); + + // TODO: Uncomment these lines after the issue mentioned above is fixed. + // expect(updateResult.status.code).to.equal(202); + + // Confirm that the record was written to the local DWN. + // const readResult = await dwn.records.read({ + // message: { + // filter: { + // recordId: record!.id + // } + // } + // }); + // expect(readResult.status.code).to.equal(200); + // expect(readResult.record).to.not.be.undefined; + + // Confirm that the data CID of the record was updated. + // expect(readResult.record.dataCid).to.not.equal(dataCidBeforeDataUpdate); + // expect(readResult.record.dataCid).to.equal(record!.dataCid); + + // Confirm that the data payload of the record was modified. + // const updatedData = await record!.data.text(); + // expect(updatedData).to.equal('bye'); + }); + it('returns new dateModified after each update', async () => { // Initial write of the record. const { status, record } = await dwn.records.write({ @@ -1173,4 +1976,4 @@ describe('Record', () => { ).to.eventually.be.rejectedWith('is an immutable property. Its value cannot be changed.'); }); }); -}); +}); \ No newline at end of file diff --git a/packages/common/package.json b/packages/common/package.json index 8f104b669..4da717894 100644 --- a/packages/common/package.json +++ b/packages/common/package.json @@ -69,7 +69,8 @@ }, "dependencies": { "level": "8.0.0", - "multiformats": "11.0.2" + "multiformats": "11.0.2", + "readable-stream": "4.4.2" }, "devDependencies": { "@playwright/test": "1.40.1", @@ -77,6 +78,7 @@ "@types/chai-as-promised": "7.1.5", "@types/eslint": "8.44.2", "@types/mocha": "10.0.1", + "@types/readable-stream": "4.0.9", "@typescript-eslint/eslint-plugin": "6.4.0", "@typescript-eslint/parser": "6.4.0", "@web/test-runner": "0.18.0", diff --git a/packages/common/src/convert.ts b/packages/common/src/convert.ts index 294648582..3ae75ef7b 100644 --- a/packages/common/src/convert.ts +++ b/packages/common/src/convert.ts @@ -3,7 +3,7 @@ import type { Multibase } from 'multiformats'; import { base58btc } from 'multiformats/bases/base58'; import { base64url } from 'multiformats/bases/base64'; -import { isArrayBufferSlice, universalTypeOf } from './type-utils.js'; +import { isAsyncIterable, isArrayBufferSlice, universalTypeOf } from './type-utils.js'; const textEncoder = new TextEncoder(); const textDecoder = new TextDecoder(); @@ -21,6 +21,13 @@ export class Convert { return new Convert(data, 'ArrayBuffer'); } + static asyncIterable(data: AsyncIterable): Convert { + if (!isAsyncIterable(data)) { + throw new TypeError('Input must be of type AsyncIterable.'); + } + return new Convert(data, 'AsyncIterable'); + } + static base58Btc(data: string): Convert { return new Convert(data, 'Base58Btc'); } @@ -112,6 +119,18 @@ export class Convert { } } + async toArrayBufferAsync(): Promise { + switch (this.format) { + case 'AsyncIterable': { + const blob = await this.toBlobAsync(); + return await blob.arrayBuffer(); + } + + default: + throw new TypeError(`Asynchronous conversion from ${this.format} to ArrayBuffer is not supported.`); + } + } + toBase58Btc(): string { switch (this.format) { @@ -166,6 +185,30 @@ export class Convert { } } + async toBlobAsync(): Promise { + switch (this.format) { + case 'AsyncIterable': { + // Initialize an array to hold the chunks from the AsyncIterable. + const chunks = []; + + // Asynchronously iterate over each chunk in the AsyncIterable. + for await (const chunk of (this.data as AsyncIterable)) { + // Append each chunk to the chunks array. These chunks can be of any type, typically binary data or text. + chunks.push(chunk); + } + + // Create a new Blob from the aggregated chunks. + // The Blob constructor combines these chunks into a single Blob object. + const blob = new Blob(chunks); + + return blob; + } + + default: + throw new TypeError(`Asynchronous conversion from ${this.format} to Blob is not supported.`); + } + } + toHex(): string { // pre-calculating Hex values improves runtime by 6-10x. const hexes = Array.from({ length: 256 }, (v, i) => i.toString(16).padStart(2, '0')); @@ -211,8 +254,8 @@ export class Convert { case 'Base64Url': { const u8a = base64url.baseDecode(this.data); - const string = textDecoder.decode(u8a); - return JSON.parse(string); + const text = textDecoder.decode(u8a); + return JSON.parse(text); } case 'String': { @@ -220,8 +263,8 @@ export class Convert { } case 'Uint8Array': { - const string = textDecoder.decode(this.data); - return JSON.parse(string); + const text = textDecoder.decode(this.data); + return JSON.parse(text); } default: @@ -229,6 +272,26 @@ export class Convert { } } + async toObjectAsync(): Promise { + switch (this.format) { + case 'AsyncIterable': { + // Convert the AsyncIterable to a String. + const text = await this.toStringAsync(); + + // Parse the string as JSON. This step assumes that the string represents a valid JSON structure. + // JSON.parse() will convert the string into a corresponding JavaScript object. + const json = JSON.parse(text); + + // Return the parsed JavaScript object. The type of this object will depend on the structure + // of the JSON in the stream. It could be an object, array, string, number, etc. + return json; + } + + default: + throw new TypeError(`Asynchronous conversion from ${this.format} to Object is not supported.`); + } + } + toString(): string { switch (this.format) { @@ -254,6 +317,37 @@ export class Convert { } } + async toStringAsync(): Promise { + switch (this.format) { + case 'AsyncIterable': { + // Initialize an empty string to accumulate the decoded text. + let str = ''; + + // Iterate over the chunks from the AsyncIterable. + for await (const chunk of (this.data as AsyncIterable)) { + // If the chunk is already a string, concatenate it directly. + if (typeof chunk === 'string') + str += chunk; + else + // If the chunk is a Uint8Array or similar, use the decoder to convert it to a string. + // The `stream: true` option lets the decoder handle multi-byte characters spanning + // multiple chunks. + str += textDecoder.decode(chunk, { stream: true }); + } + + // Finalize the decoding process to handle any remaining bytes and signal the end of the stream. + // The `stream: false` option flushes the decoder's internal state. + str += textDecoder.decode(undefined, { stream: false }); + + // Return the accumulated string. + return str; + } + + default: + throw new TypeError(`Asynchronous conversion from ${this.format} to String is not supported.`); + } + } + toUint8Array(): Uint8Array { switch (this.format) { @@ -314,4 +408,16 @@ export class Convert { throw new TypeError(`Conversion from ${this.format} to Uint8Array is not supported.`); } } + + async toUint8ArrayAsync(): Promise { + switch (this.format) { + case 'AsyncIterable': { + const arrayBuffer = await this.toArrayBufferAsync(); + return new Uint8Array(arrayBuffer); + } + + default: + throw new TypeError(`Asynchronous conversion from ${this.format} to Uint8Array is not supported.`); + } + } } \ No newline at end of file diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts index 204ddeb1c..9766464cf 100644 --- a/packages/common/src/index.ts +++ b/packages/common/src/index.ts @@ -4,4 +4,6 @@ export * from './convert.js'; export * from './multicodec.js'; export * from './object.js'; export * from './stores.js'; +export * from './stream.js'; +export * from './stream-node.js'; export * from './type-utils.js'; \ No newline at end of file diff --git a/packages/common/src/stream-node.ts b/packages/common/src/stream-node.ts new file mode 100644 index 000000000..261e4b74a --- /dev/null +++ b/packages/common/src/stream-node.ts @@ -0,0 +1,381 @@ +import type { Duplex, ReadableStateOptions, Transform, Writable } from 'readable-stream'; + +import { Readable } from 'readable-stream'; +import { Stream } from './stream.js'; +import { Convert } from './convert.js'; + +export { Readable } from 'readable-stream'; + +export class NodeStream { + /** + * Consumes a `Readable` stream and returns its contents as an `ArrayBuffer`. + * + * This method reads all data from a Node.js `Readable` stream, collects it, and converts it into + * an `ArrayBuffer`. + * + * @example + * ```ts + * const nodeReadable = getReadableStreamSomehow(); + * const arrayBuffer = await NodeStream.consumeToArrayBuffer({ readable: nodeReadable }); + * ``` + * + * @param readable - The Node.js Readable stream whose data will be consumed. + * @returns A Promise that resolves to an `ArrayBuffer` containing all the data from the stream. + */ + public static async consumeToArrayBuffer({ readable }: { readable: Readable}): Promise { + const arrayBuffer = await Convert.asyncIterable(readable).toArrayBufferAsync(); + + return arrayBuffer; + } + + /** + * Consumes a `Readable` stream and returns its contents as a `Blob`. + * + * This method reads all data from a Node.js `Readable` stream, collects it, and converts it into + * a `Blob`. + * + * @example + * ```ts + * const nodeReadable = getReadableStreamSomehow(); + * const blob = await NodeStream.consumeToBlob({ readable: nodeReadable }); + * ``` + * + * @param readableStream - The Node.js `Readable` stream whose data will be consumed. + * @returns A Promise that resolves to a `Blob` containing all the data from the stream. + */ + public static async consumeToBlob({ readable }: { readable: Readable }): Promise { + const blob = await Convert.asyncIterable(readable).toBlobAsync(); + + return blob; + } + + /** + * Consumes a `Readable` stream and returns its contents as a `Uint8Array`. + * + * This method reads all data from a Node.js `Readable`, collects it, and converts it into a + * `Uint8Array`. + * + * @example + * ```ts + * const nodeReadable = getReadableStreamSomehow(); + * const bytes = await NodeStream.consumeToBytes({ readable: nodeReadable }); + * ``` + * + * @param readableStream - The Node.js `Readable` stream whose data will be consumed. + * @returns A Promise that resolves to a `Uint8Array` containing all the data from the stream. + */ + public static async consumeToBytes({ readable }: { readable: Readable }): Promise { + const bytes = await Convert.asyncIterable(readable).toUint8ArrayAsync(); + + return bytes; + } + + /** + * Consumes a `Readable` stream and parses its contents as JSON. + * + * This method reads all the data from the stream, converts it to a text string, and then parses + * it as JSON, returning the resulting object. + * + * @example + * ```ts + * const nodeReadable = getReadableStreamSomehow(); + * const jsonData = await NodeStream.consumeToJson({ readable: nodeReadable }); + * ``` + * + * @param readableStream - The Node.js `Readable` stream whose JSON content will be consumed. + * @returns A Promise that resolves to the parsed JSON object from the stream's data. + */ + public static async consumeToJson({ readable }: { readable: Readable }): Promise { + const object = await Convert.asyncIterable(readable).toObjectAsync(); + + return object; + } + + /** + * Consumes a `Readable` stream and returns its contents as a text string. + * + * This method reads all the data from the stream, converting it into a single string. + * + * @example + * ```ts + * const nodeReadable = getReadableStreamSomehow(); + * const text = await NodeStream.consumeToText({ readable: nodeReadable }); + * ``` + * + * @param readableStream - The Node.js `Readable` stream whose text content will be consumed. + * @returns A Promise that resolves to a string containing all the data from the stream. + */ + public static async consumeToText({ readable }: { readable: Readable}): Promise { + const text = await Convert.asyncIterable(readable).toStringAsync(); + + return text; + } + + /** + * Converts a Web `ReadableStream` to a Node.js `Readable` stream. + * + * This method takes a Web `ReadableStream` and converts it to a Node.js `Readable` stream. + * The conversion is done by reading chunks from the Web `ReadableStream` and pushing them + * into the Node.js `Readable` stream. + * + * @example + * ```ts + * const webReadableStream = getWebReadableStreamSomehow(); + * const nodeReadableStream = NodeStream.fromWebReadable({ readableStream: webReadableStream }); + * ``` + * + * @param readableStream - The Web `ReadableStream` to be converted. + * @param readableOptions - Optional `Readable` stream options for the Node.js stream. + * @returns The Node.js `Readable` stream. + */ + public static fromWebReadable({ readableStream, readableOptions }: { + readableStream: ReadableStream, + readableOptions?: ReadableStateOptions + }): Readable { + if (!Stream.isReadableStream(readableStream)) { + throw new TypeError(`NodeStream.fromWebReadable: 'readableStream' is not a Web ReadableStream.`); + } + + const reader = readableStream.getReader(); + let closed = false; + + const nodeReadable = new Readable({ + ...readableOptions, + + read: function () { + reader.read().then(({ done, value }) => { + if (done) { + this.push(null); // Push null to signify end of stream. + } else { + if (!this.push(value)) { + // When push returns false, we should stop reading until _read is called again. + return; + } + } + }).catch((error) => { + // If an error occurs while reading, destroy the stream. + this.destroy(error); + }); + }, + + destroy: function (error, callback) { + function done() { + callback(error); + } + + if (!closed) { + reader.cancel(error) + .then(done) + .catch(done); + return; + } + done(); + } + }); + + reader.closed + .then(() => { + closed = true; // Prevents reader.cancel() from being called in destroy() + }) + .catch((error) => { + closed = true; // Prevents reader.cancel() from being called in destroy() + nodeReadable.destroy(error); + }); + + return nodeReadable; + } + + /** + * Checks if a Node.js stream (`Readable`, `Writable`, `Duplex`, or `Transform`) has been destroyed. + * + * This method determines whether the provided Node.js stream has been destroyed. A stream + * is considered destroyed if its 'destroyed' property is set to true or if its internal state + * indicates it has been destroyed. + * + * @example + * ```ts + * const stream = getStreamSomehow(); + * stream.destroy(); // Destroy the stream. + * const isDestroyed = NodeStream.isDestroyed({ stream }); + * console.log(isDestroyed); // Output: true + * ``` + * + * @param stream - The Node.js stream to check. + * @returns `true` if the stream has been destroyed; otherwise, `false`. + */ + public static isDestroyed({ stream }: { stream: Readable | Writable | Duplex | Transform }): boolean { + if (!NodeStream.isStream(stream)) { + throw new TypeError(`NodeStream.isDestroyed: 'stream' is not a Node stream.`); + } + + const writableState = '_writableState' in stream ? stream._writableState : undefined; + const readableState = stream._readableState; + const state = writableState || readableState; + + return !!(stream.destroyed || state.destroyed); + } + + /** + * Checks if a Node.js `Readable` stream is still readable. + * + * This method checks if a Node.js `Readable` stream is still in a state that allows reading from + * it. A stream is considered readable if it has not ended, has not been destroyed, and is not + * currently paused. + * + * @example + * ```ts + * const readableStream = new Readable(); + * const isReadable = NodeStream.isReadable({ readable: readableStream }); + * console.log(isReadable); // Output: true or false + * ``` + * + * @param readable - The Node.js `Readable` stream to be checked. + * @returns `true` if the stream is still readable; otherwise, `false`. + */ + public static isReadable({ readable }: { readable: Readable }): boolean { + // Check if the object is a Node Readable stream. + if (!NodeStream.isReadableStream(readable)) { + return false; + } + + // Check if the stream is still readable. + return ( + readable.readable && // Is the stream readable? + (typeof readable._readableState.ended === 'boolean' && !readable._readableState.ended) && // Has the 'end' method been called? + (typeof readable._readableState.endEmitted === 'boolean' && !readable._readableState.endEmitted) && // Has the 'end' event been emitted? + !readable.destroyed && // Has the 'destroy' method been called? + !readable.isPaused() // Is the stream paused? + ); + } + + /** + * Checks if an object is a Node.js `Readable` stream. + * + * This method verifies if the provided object is a Node.js `Readable` stream by checking for + * specific properties and methods typical of a `Readable` stream in Node.js. + * + * @example + * ```ts + * const obj = getSomeObject(); + * if (NodeStream.isReadableStream(obj)) { + * // obj is a Node.js Readable stream + * } + * ``` + * + * @param obj - The object to be checked. + * @returns `true` if `obj` is a Node.js `Readable` stream; otherwise, `false`. + */ + static isReadableStream(obj: unknown): obj is Readable { + return ( + typeof obj === 'object' && + obj !== null && + ('pipe' in obj && typeof obj.pipe === 'function') && + ('on' in obj && typeof obj.on === 'function') && + (!('_writableState' in obj) && '_readableState' in obj) + ); + } + + /** + * Checks if the provided object is a Node.js stream (`Duplex`, `Readable`, `Writable`, or `Transform`). + * + * This method checks for the presence of internal properties specific to Node.js streams: + * `_readableState` and `_writableState`. These properties are present in Node.js stream + * instances, allowing identification of the stream type. + * + * The `_readableState` property is found in `Readable` and `Duplex` streams (including + * `Transform` streams, which are a type of `Duplex` stream), indicating that the stream can be + * read from. The `_writableState` property is found in `Writable` and `Duplex` streams, + * indicating that the stream can be written to. + * + * @example + * ```ts + * const { Readable, Writable, Duplex, Transform } = require('stream'); + * + * const readableStream = new Readable(); + * console.log(NodeStream.isStream(readableStream)); // Output: true + * + * const writableStream = new Writable(); + * console.log(NodeStream.isStream(writableStream)); // Output: true + * + * const duplexStream = new Duplex(); + * console.log(NodeStream.isStream(duplexStream)); // Output: true + * + * const transformStream = new Transform(); + * console.log(NodeStream.isStream(transformStream)); // Output: true + * + * const nonStreamObject = {}; + * console.log(NodeStream.isStream(nonStreamObject)); // Output: false + * ``` + * + * @remarks + * - This method does not differentiate between the different types of streams (Readable, + * Writable, Duplex, Transform). It simply checks if the object is any kind of Node.js stream. + * - While this method can identify standard Node.js streams, it may not recognize custom or + * third-party stream-like objects that do not inherit directly from Node.js's stream classes + * or do not have these internal state properties. This is intentional as many of the methods + * in this library are designed to work with standard Node.js streams. + * + * @param obj - The object to be checked for being a Node.js stream. + * @returns `true` if the object is a Node.js stream (`Duplex`, `Readable`, `Writable`, or `Transform`); otherwise, `false`. + */ + public static isStream(obj: unknown): obj is Duplex | Readable | Writable | Transform { + return ( + typeof obj === 'object' && obj !== null && + ('_readableState' in obj || '_writableState' in obj) + ); + } + + /** + * Converts a Node.js `Readable` stream to a Web `ReadableStream`. + * + * This method provides a bridge between Node.js streams and the Web Streams API by converting a + * Node.js `Readable` stream into a Web `ReadableStream`. It listens for 'data', 'end', and 'error' + * events on the Node.js stream and appropriately enqueues data, closes, or errors the Web + * `ReadableStream`. + * + * If the Node.js stream is already destroyed, the method returns an immediately cancelled + * Web `ReadableStream`. + * + * @example + * ```ts + * const nodeReadable = getNodeReadableStreamSomehow(); + * const webReadableStream = NodeStream.toWebReadable({ readable: nodeReadable }); + * ``` + * + * @param readable - The Node.js `Readable` stream to be converted. + * @returns A Web `ReadableStream` corresponding to the provided Node.js `Readable` stream. + * @throws TypeError if `readable` is not a Node.js `Readable` stream. + * @throws Error if the Node.js `Readable` stream is already destroyed. + */ + static toWebReadable({ readable }: { readable: Readable }): ReadableStream { + if (!NodeStream.isReadableStream(readable)) { + throw new TypeError(`NodeStream.toWebReadable: 'readable' is not a Node Readable stream.`); + } + + if (NodeStream.isDestroyed({ stream: readable })) { + const readable = new ReadableStream(); + readable.cancel(); + return readable; + } + + return new ReadableStream({ + start(controller) { + readable.on('data', (chunk) => { + controller.enqueue(chunk); + }); + + readable.on('end', () => { + controller.close(); + }); + + readable.on('error', (err) => { + controller.error(err); + }); + }, + + cancel() { + readable.destroy(); + } + }); + } +} \ No newline at end of file diff --git a/packages/common/src/stream.ts b/packages/common/src/stream.ts new file mode 100644 index 000000000..5551ce92f --- /dev/null +++ b/packages/common/src/stream.ts @@ -0,0 +1,406 @@ +import { Convert } from './convert.js'; + +export class Stream { + /** + * Transforms a `ReadableStream` into an `AsyncIterable`. This allows for the asynchronous + * iteration over the stream's data chunks. + * + * This method creates an async iterator from a `ReadableStream`, enabling the use of + * `for await...of` loops to process stream data. It reads from the stream until it's closed or + * errored, yielding each chunk as it becomes available. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * for await (const chunk of Stream.asAsyncIterator(readableStream)) { + * // process each chunk + * } + * ``` + * + * @remarks + * - The method ensures proper cleanup by releasing the reader lock when iteration is completed or + * if an error occurs. + * + * @param readableStream - The Web `ReadableStream` to be transformed into an `AsyncIterable`. + * @returns An `AsyncIterable` that yields data chunks from the `ReadableStream`. + */ + public static async * asAsyncIterator(readableStream: ReadableStream): AsyncIterable { + const reader = readableStream.getReader(); + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + yield value; + } + } finally { + reader.releaseLock(); + } + } + + /** + * Consumes a `ReadableStream` and returns its contents as an `ArrayBuffer`. + * + * This method reads all data from a `ReadableStream`, collects it, and converts it into an + * `ArrayBuffer`. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * const arrayBuffer = await Stream.consumeToArrayBuffer({ readableStream }); + * ``` + * + * @param readableStream - The Web `ReadableStream` whose data will be consumed. + * @returns A Promise that resolves to an `ArrayBuffer` containing all the data from the stream. + */ + public static async consumeToArrayBuffer({ readableStream }: { readableStream: ReadableStream}): Promise { + const iterableStream = Stream.asAsyncIterator(readableStream); + const arrayBuffer = await Convert.asyncIterable(iterableStream).toArrayBufferAsync(); + + return arrayBuffer; + } + + /** + * Consumes a `ReadableStream` and returns its contents as a `Blob`. + * + * This method reads all data from a `ReadableStream`, collects it, and converts it into a `Blob`. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * const blob = await Stream.consumeToBlob({ readableStream }); + * ``` + * + * @param readableStream - The Web `ReadableStream` whose data will be consumed. + * @returns A Promise that resolves to a `Blob` containing all the data from the stream. + */ + public static async consumeToBlob({ readableStream }: { readableStream: ReadableStream}): Promise { + const iterableStream = Stream.asAsyncIterator(readableStream); + const blob = await Convert.asyncIterable(iterableStream).toBlobAsync(); + + return blob; + } + + /** + * Consumes a `ReadableStream` and returns its contents as a `Uint8Array`. + * + * This method reads all data from a `ReadableStream`, collects it, and converts it into a + * `Uint8Array`. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * const bytes = await Stream.consumeToBytes({ readableStream }); + * ``` + * + * @param readableStream - The Web `ReadableStream` whose data will be consumed. + * @returns A Promise that resolves to a `Uint8Array` containing all the data from the stream. + */ + public static async consumeToBytes({ readableStream }: { readableStream: ReadableStream }): Promise { + const iterableStream = Stream.asAsyncIterator(readableStream); + const bytes = await Convert.asyncIterable(iterableStream).toUint8ArrayAsync(); + + return bytes; + } + + /** + * Consumes a `ReadableStream` and parses its contents as JSON. + * + * This method reads all the data from the stream, converts it to a text string, and then parses + * it as JSON, returning the resulting object. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * const jsonData = await Stream.consumeToJson({ readableStream }); + * ``` + * + * @param readableStream - The Web `ReadableStream` whose JSON content will be consumed. + * @returns A Promise that resolves to the parsed JSON object from the stream's data. + */ + public static async consumeToJson({ readableStream }: { readableStream: ReadableStream}): Promise { + const iterableStream = Stream.asAsyncIterator(readableStream); + const object = await Convert.asyncIterable(iterableStream).toObjectAsync(); + + return object; + } + + /** + * Consumes a `ReadableStream` and returns its contents as a text string. + * + * This method reads all the data from the stream, converting it into a single string. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * const text = await Stream.consumeToText({ readableStream }); + * ``` + * + * @param readableStream - The Web `ReadableStream` whose text content will be consumed. + * @returns A Promise that resolves to a string containing all the data from the stream. + */ + public static async consumeToText({ readableStream }: { readableStream: ReadableStream}): Promise { + const iterableStream = Stream.asAsyncIterator(readableStream); + const text = await Convert.asyncIterable(iterableStream).toStringAsync(); + + return text; + } + + /** + * Generates a `ReadableStream` of `Uint8Array` chunks with customizable length and fill value. + * + * This method creates a `ReadableStream` that emits `Uint8Array` chunks. You can specify the + * total length of the stream, the length of individual chunks, and a fill value or range for the + * chunks. It's useful for testing or when specific binary data streams are required. + * + * @example + * ```ts + * // Create a stream of 1000 bytes with 100-byte chunks filled with 0xAA. + * const byteStream = Stream.generateByteStream({ + * streamLength: 1000, + * chunkLength: 100, + * fillValue: 0xAA + * }); + * + * // Create an unending stream of 100KB chunks filled with values that range from 1 to 99. + * const byteStream = Stream.generateByteStream({ + * chunkLength: 100 * 1024, + * fillValue: [1, 99] + * }); + * ``` + * + * @param streamLength - The total length of the stream in bytes. If omitted, the stream is infinite. + * @param chunkLength - The length of each chunk. If omitted, each chunk is the size of `streamLength`. + * @param fillValue - A value or range to fill the chunks with. Can be a single number or a tuple [min, max]. + * @returns A `ReadableStream` that emits `Uint8Array` chunks. + */ + public static generateByteStream({ streamLength, chunkLength, fillValue }: { + streamLength?: number, + chunkLength?: number, + fillValue?: number | [number, number] + }): ReadableStream { + let bytesRemaining = streamLength ?? Infinity; + let controller: ReadableStreamDefaultController; + + function enqueueChunk() { + const currentChunkLength = Math.min(bytesRemaining, chunkLength ?? Infinity); + bytesRemaining -= currentChunkLength; + + let chunk: Uint8Array; + + if (typeof fillValue === 'number') { + chunk = new Uint8Array(currentChunkLength).fill(fillValue); + + } else if (Array.isArray(fillValue)) { + chunk = new Uint8Array(currentChunkLength); + const [min, max] = fillValue; + const range = max - min + 1; + for (let i = 0; i < currentChunkLength; i++) { + chunk[i] = Math.floor(Math.random() * range) + min; + } + + } else { + chunk = new Uint8Array(currentChunkLength); + } + + controller.enqueue(chunk); + + // If there are no more bytes to send, close the stream + if (bytesRemaining <= 0) { + controller.close(); + } + } + + return new ReadableStream({ + start(c) { + controller = c; + enqueueChunk(); + }, + pull() { + enqueueChunk(); + }, + }); + } + + /** + * Checks if the provided Web `ReadableStream` is in a readable state. + * + * After verifying that the stream is a Web {@link https://streams.spec.whatwg.org/#rs-model | ReadableStream}, + * this method checks the {@link https://streams.spec.whatwg.org/#readablestream-locked | locked} + * property of the ReadableStream. The `locked` property is `true` if a reader is currently + * active, meaning the stream is either being read or has already been read (and hence is not in a + * readable state). If `locked` is `false`, it means the stream is still in a state where it can + * be read. + * + * In the case where a `ReadableStream` has been unlocked but is no longer readable (for example, + * if it has been fully read or cancelled), additional checks are needed beyond just examining the + * locked property. The ReadableStream API does not provide a direct way to check if the stream + * has data left or if it's in a readable state once it's been unlocked. + * + * Per {@link https://streams.spec.whatwg.org/#other-specs-rs-introspect | WHATWG Streams, Section 9.1.3. Introspection}: + * + * > ...note that apart from checking whether or not the stream is locked, this direct + * > introspection is not possible via the public JavaScript API, and so specifications should + * > instead use the algorithms in §9.1.2 Reading. (For example, instead of testing if the stream + * > is readable, attempt to get a reader and handle any exception.) + * + * This implementation employs the technique suggested by the WHATWG Streams standard by + * attempting to acquire a reader and checking the state of the reader. If acquiring a reader + * succeeds, it immediately releases the lock and returns `true`, indicating the stream is + * readable. If an error occurs while trying to get a reader (which can happen if the stream is + * already closed or errored), it catches the error and returns `false`, indicating the stream is + * not readable. + * + * @example + * ```ts + * const readableStream = new ReadableStream({ ... }); + * const isStreamReadable = Stream.isReadable({ readableStream }); + * console.log(isStreamReadable); // Output: true or false + * ``` + * + * @remarks + * - This method does not check whether the stream has data left to read; it only checks if the + * stream is in a state that allows reading. It is possible for a stream to be unlocked but + * still have no data left if it has never been locked to a reader. + * + * @param readableStream - The Web `ReadableStream` to be checked for readability. + * + * @returns `true` if the stream is a `ReadableStream` and is in a readable state (not locked and + * no error on getting a reader); otherwise, `false`. + */ + public static isReadable({ readableStream }: { readableStream: ReadableStream }): boolean { + // Check if the stream is a WHATWG `ReadableStream`. + if (!Stream.isReadableStream(readableStream)) { + return false; + } + + // Check if the stream is locked. + if (readableStream.locked) { + return false; + } + + try { + // Try to get a reader to check if the stream is readable. + const reader = readableStream.getReader(); + // If successful, immediately release the lock. + reader.releaseLock(); + return true; + } catch (error) { + // If an error occurs (e.g., the stream is not readable), return false. + return false; + } + } + + /** + * Checks if an object is a Web `ReadableStream`. + * + * This method verifies whether the given object is a `ReadableStream` by checking its type and + * the presence of the `getReader` function. + * + * @example + * ```ts + * const obj = getSomeObject(); + * if (Stream.isReadableStream(obj)) { + * // obj is a ReadableStream + * } + * ``` + * + * @param obj - The object to be checked. + * @returns `true` if `obj` is a `ReadableStream`; otherwise, `false`. + */ + public static isReadableStream(obj: unknown): obj is ReadableStream { + return ( + typeof obj === 'object' && obj !== null && + 'getReader' in obj && typeof obj.getReader === 'function' + ); + } + + /** + * Checks if an object is a Web `ReadableStream`, `WritableStream`, or `TransformStream`. + * + * This method verifies the type of a given object to determine if it is one of the standard + * stream types in the Web Streams API: `ReadableStream`, `WritableStream`, or `TransformStream`. + * It employs type-checking strategies that are specific to each stream type. + * + * The method checks for the specific functions and properties associated with each stream type: + * - `ReadableStream`: Identified by the presence of a `getReader` method. + * - `WritableStream`: Identified by the presence of a `getWriter` and `abort` methods. + * - `TransformStream`: Identified by having both `readable` and `writable` properties. + * + * @example + * ```ts + * const readableStream = new ReadableStream(); + * console.log(Stream.isStream(readableStream)); // Output: true + * + * const writableStream = new WritableStream(); + * console.log(Stream.isStream(writableStream)); // Output: true + * + * const transformStream = new TransformStream(); + * console.log(Stream.isStream(transformStream)); // Output: true + * + * const nonStreamObject = {}; + * console.log(Stream.isStream(nonStreamObject)); // Output: false + * ``` + * + * @remarks + * - This method does not differentiate between `ReadableStream`, `WritableStream`, and + * `TransformStream`. It checks if the object conforms to any of these types. + * - This method is specific to the Web Streams API and may not recognize non-standard or custom + * stream-like objects that do not adhere to the Web Streams API specifications. + * + * @param obj - The object to be checked for being a Web `ReadableStream`, `WritableStream`, or `TransformStream`. + * @returns `true` if the object is a `ReadableStream`, `WritableStream`, or `TransformStream`; otherwise, `false`. + */ + public static isStream(obj: unknown): obj is ReadableStream | WritableStream | TransformStream { + return Stream.isReadableStream(obj) || Stream.isWritableStream(obj) || Stream.isTransformStream(obj); + } + + /** + * Checks if an object is a `TransformStream`. + * + * This method verifies whether the given object is a `TransformStream` by checking its type and + * the presence of `readable` and `writable` properties. + * + * @example + * ```ts + * const obj = getSomeObject(); + * if (Stream.isTransformStream(obj)) { + * // obj is a TransformStream + * } + * ``` + * + * @param obj - The object to be checked. + * @returns `true` if `obj` is a `TransformStream`; otherwise, `false`. + */ + public static isTransformStream(obj: unknown): obj is TransformStream { + return ( + typeof obj === 'object' && obj !== null && + 'readable' in obj && typeof obj.readable === 'object' && + 'writable' in obj && typeof obj.writable === 'object' + ); + } + + /** + * Checks if an object is a `WritableStream`. + * + * This method determines whether the given object is a `WritableStream` by verifying its type and + * the presence of the `getWriter` and `abort` functions. + * + * @example + * ```ts + * const obj = getSomeObject(); + * if (Stream.isWritableStream(obj)) { + * // obj is a WritableStream + * } + * ``` + * + * @param obj - The object to be checked. + * @returns `true` if `obj` is a `TransformStream`; otherwise, `false`. + */ + public static isWritableStream(obj: unknown): obj is WritableStream { + return ( + typeof obj === 'object' && obj !== null && + 'getWriter' in obj && typeof obj.getWriter === 'function' && + 'abort' in obj && typeof obj.abort === 'function' + ); + } +} \ No newline at end of file diff --git a/packages/common/src/type-utils.ts b/packages/common/src/type-utils.ts index 7ec28ab0d..dbf14d310 100644 --- a/packages/common/src/type-utils.ts +++ b/packages/common/src/type-utils.ts @@ -17,6 +17,43 @@ export function isArrayBufferSlice(arrayBufferView: ArrayBufferView): boolean { return arrayBufferView.byteOffset !== 0 || arrayBufferView.byteLength !== arrayBufferView.buffer.byteLength; } +/** + * Checks if the given object is an AsyncIterable. + * + * An AsyncIterable is an object that implements the AsyncIterable protocol, + * which means it has a [Symbol.asyncIterator] method. This function checks + * if the provided object conforms to this protocol by verifying the presence + * and type of the [Symbol.asyncIterator] method. + * + * @param obj - The object to be checked for AsyncIterable conformity. + * @returns True if the object is an AsyncIterable, false otherwise. + * + * @example + * ```ts + * // Returns true for a valid AsyncIterable + * const asyncIterable = { + * async *[Symbol.asyncIterator]() { + * yield 1; + * yield 2; + * } + * }; + * console.log(isAsyncIterable(asyncIterable)); // true + * ``` + * + * @example + * ```ts + * // Returns false for a regular object + * console.log(isAsyncIterable({ a: 1, b: 2 })); // false + * ``` + */ +export function isAsyncIterable(obj: any): obj is AsyncIterable { + if (typeof obj !== 'object' || obj === null) { + return false; + } + + return typeof obj[Symbol.asyncIterator] === 'function'; +} + /** * isDefined * diff --git a/packages/common/tests/convert.spec.ts b/packages/common/tests/convert.spec.ts index 8e6b3d21f..a4ae8740f 100644 --- a/packages/common/tests/convert.spec.ts +++ b/packages/common/tests/convert.spec.ts @@ -2,6 +2,8 @@ import { expect } from 'chai'; import { Convert } from '../src/convert.js'; +const textEncoder = new TextEncoder(); + describe('Convert', () =>{ describe('from: ArrayBuffer', () => { it('to: Base58Btc', () => { @@ -53,6 +55,105 @@ describe('Convert', () =>{ }); }); + describe('from: AsyncIterable', () => { + let asyncIterableBytes: AsyncIterable; + let asyncIterableJson: AsyncIterable; + let asyncIterableString: AsyncIterable; + + // Create a generator function that yields two Uint8Array chunks. + async function* generateBytesData() { + yield new Uint8Array([1, 2, 3]); + yield new Uint8Array([4, 5, 6]); + } + + // Create a generator function that yields parts of a JSON string. + async function* generateJsonData() { + yield '{"foo":'; + yield '"bar"'; + yield '}'; + } + + // Create a generator function that yields Uint8Array chunks of encoded string data. + async function* generateStringData() { + yield textEncoder.encode('Hello, '); + yield textEncoder.encode('world!'); + } + + beforeEach(() => { + asyncIterableBytes = generateBytesData(); + asyncIterableJson = generateJsonData(); + asyncIterableString = generateStringData(); + }); + + it('to: ArrayBuffer', async () => { + const output = await Convert.asyncIterable(asyncIterableBytes).toArrayBufferAsync(); + + // The expected ArrayBuffer is a concatenation of the yielded Uint8Arrays + const expected = new Uint8Array([1, 2, 3, 4, 5, 6]).buffer; + + // Compare the result with the expected ArrayBuffer + expect(new Uint8Array(output)).to.deep.equal(new Uint8Array(expected)); + }); + + it('to: Blob', async () => { + const output = await Convert.asyncIterable(asyncIterableBytes).toBlobAsync(); + + // Check if the returned object is a Blob + expect(output).to.be.an.instanceOf(Blob); + + // Convert Blob to ArrayBuffer to verify contents + const arrayBuffer = await output.arrayBuffer(); + const result = new Uint8Array(arrayBuffer); + + // The expected result is a concatenation of the yielded Uint8Arrays + const expected = new Uint8Array([1, 2, 3, 4, 5, 6]); + + // Compare the result with the expected Uint8Array + expect(result).to.deep.equal(expected); + }); + + it('to: Object', async () => { + const output = await Convert.asyncIterable(asyncIterableJson).toObjectAsync(); + + // The expected result is the object formed by the concatenated JSON string + const expected = { foo: 'bar' }; + + // Compare the result with the expected object + expect(output).to.deep.equal(expected); + }); + + it('to: String', async () => { + const output = await Convert.asyncIterable(asyncIterableString).toStringAsync(); + + // The expected result is the concatenated string + const expected = 'Hello, world!'; + + // Compare the result with the expected string + expect(output).to.equal(expected); + }); + + it('to: Uint8Array', async () => { + const output = await Convert.asyncIterable(asyncIterableBytes).toUint8ArrayAsync(); + + // The expected result is a Uint8Array that concatenates all chunks + const expected = new Uint8Array([1, 2, 3, 4, 5, 6]); + + // Compare the result with the expected Uint8Array + expect(output).to.deep.equal(expected); + }); + + it('throws an error if input is not AsyncIterable', async () => { + try { + // @ts-expect-error because incorrect input data type is intentionally being used to trigger error. + Convert.asyncIterable('unsupported'); + expect.fail('Should have thrown an error for incorrect type'); + } catch (error: any) { + expect(error).to.be.instanceOf(TypeError); + expect(error.message).to.include('must be of type AsyncIterable'); + } + }); + }); + describe('from: Base58Btc', () => { it('to: ArrayBuffer', () => { // Test Vector 1. @@ -456,6 +557,16 @@ describe('Convert', () =>{ expect(() => unsupported.toArrayBuffer()).to.throw(TypeError, 'not supported'); }); + it('toArrayBufferAsync() throw an error', async () => { + try { + await unsupported.toArrayBufferAsync(); + expect.fail('Should have thrown an error for incorrect type'); + } catch (error: any) { + expect(error).to.be.instanceOf(TypeError); + expect(error.message).to.include('not supported'); + } + }); + it('toBase58Btc() throw an error', () => { expect(() => unsupported.toBase58Btc()).to.throw(TypeError, 'not supported'); }); @@ -464,6 +575,16 @@ describe('Convert', () =>{ expect(() => unsupported.toBase64Url()).to.throw(TypeError, 'not supported'); }); + it('toBlobAsync() throw an error', async () => { + try { + await unsupported.toBlobAsync(); + expect.fail('Should have thrown an error for incorrect type'); + } catch (error: any) { + expect(error).to.be.instanceOf(TypeError); + expect(error.message).to.include('not supported'); + } + }); + it('toHex() throw an error', () => { expect(() => unsupported.toHex()).to.throw(TypeError, 'not supported'); }); @@ -476,12 +597,42 @@ describe('Convert', () =>{ expect(() => unsupported.toObject()).to.throw(TypeError, 'not supported'); }); + it('toObjectAsync() throw an error', async () => { + try { + await unsupported.toObjectAsync(); + expect.fail('Should have thrown an error for incorrect type'); + } catch (error: any) { + expect(error).to.be.instanceOf(TypeError); + expect(error.message).to.include('not supported'); + } + }); + it('toString() throw an error', () => { expect(() => unsupported.toString()).to.throw(TypeError, 'not supported'); }); + it('toStringAsync() throw an error', async () => { + try { + await unsupported.toStringAsync(); + expect.fail('Should have thrown an error for incorrect type'); + } catch (error: any) { + expect(error).to.be.instanceOf(TypeError); + expect(error.message).to.include('not supported'); + } + }); + it('toUint8Array() throw an error', () => { expect(() => unsupported.toUint8Array()).to.throw(TypeError, 'not supported'); }); + + it('toUint8ArrayAsync() throw an error', async () => { + try { + await unsupported.toUint8ArrayAsync(); + expect.fail('Should have thrown an error for incorrect type'); + } catch (error: any) { + expect(error).to.be.instanceOf(TypeError); + expect(error.message).to.include('not supported'); + } + }); }); }); \ No newline at end of file diff --git a/packages/common/tests/object.spec.ts b/packages/common/tests/object.spec.ts index 58470795f..988f64ae5 100644 --- a/packages/common/tests/object.spec.ts +++ b/packages/common/tests/object.spec.ts @@ -2,87 +2,90 @@ import { expect } from 'chai'; import { isEmptyObject, removeEmptyObjects, removeUndefinedProperties } from '../src/object.js'; - -describe('isEmptyObject()', () => { - it('should return true for an empty object', () => { - expect(isEmptyObject({})).to.be.true; - }); - - it('should return false for a non-empty object', () => { - expect(isEmptyObject({ key: 'value' })).to.be.false; - }); - - it('should return false for null', () => { - expect(isEmptyObject(null)).to.be.false; +describe('Object', () => { + + describe('isEmptyObject()', () => { + it('should return true for an empty object', () => { + expect(isEmptyObject({})).to.be.true; + }); + + it('should return false for a non-empty object', () => { + expect(isEmptyObject({ key: 'value' })).to.be.false; + }); + + it('should return false for null', () => { + expect(isEmptyObject(null)).to.be.false; + }); + + it('should return true for an object with no prototype', () => { + expect(isEmptyObject(Object.create(null))).to.be.true; + }); + + it('should return false for an object with no prototype but containing properties', () => { + const obj = Object.create(null); + obj.key = 'value'; + expect(isEmptyObject(obj)).to.be.false; + }); + + it('should return false for an object with symbol properties', () => { + const symbol = Symbol('key'); + const obj = { [symbol]: 'value' }; + expect(isEmptyObject(obj)).to.be.false; + }); + + it('should return false for a non-object (number)', () => { + expect(isEmptyObject(42)).to.be.false; + }); + + it('should return false for a non-object (string)', () => { + expect(isEmptyObject('text')).to.be.false; + }); + + it('should return true for an object that inherits properties but has none of its own', () => { + const parent = { parentKey: 'value' }; + const child = Object.create(parent); + expect(isEmptyObject(child)).to.be.true; + }); }); - it('should return true for an object with no prototype', () => { - expect(isEmptyObject(Object.create(null))).to.be.true; - }); - - it('should return false for an object with no prototype but containing properties', () => { - const obj = Object.create(null); - obj.key = 'value'; - expect(isEmptyObject(obj)).to.be.false; - }); - - it('should return false for an object with symbol properties', () => { - const symbol = Symbol('key'); - const obj = { [symbol]: 'value' }; - expect(isEmptyObject(obj)).to.be.false; - }); + describe('removeEmptyObjects()', () => { + it('should remove all empty objects', () => { + const mockObject = { + foo : {}, + bar : { baz: {} }, + buzz : 'hello' + }; - it('should return false for a non-object (number)', () => { - expect(isEmptyObject(42)).to.be.false; - }); + const expectedResult = { buzz: 'hello' }; - it('should return false for a non-object (string)', () => { - expect(isEmptyObject('text')).to.be.false; - }); + removeEmptyObjects(mockObject); - it('should return true for an object that inherits properties but has none of its own', () => { - const parent = { parentKey: 'value' }; - const child = Object.create(parent); - expect(isEmptyObject(child)).to.be.true; + expect(mockObject).to.deep.equal(expectedResult); + }); }); -}); - -describe('removeEmptyObjects()', () => { - it('should remove all empty objects', () => { - const mockObject = { - foo : {}, - bar : { baz: {} }, - buzz : 'hello' - }; - - const expectedResult = { buzz: 'hello' }; - - removeEmptyObjects(mockObject); - expect(mockObject).to.deep.equal(expectedResult); - }); -}); - -describe('removeUndefinedProperties()', () => { - it('should remove all `undefined` properties of a nested object', () => { - const mockObject = { - a : true, - b : undefined, - c : { - a : 0, + describe('removeUndefinedProperties()', () => { + it('should remove all `undefined` properties of a nested object', () => { + const mockObject = { + a : true, b : undefined, - } - }; - - const expectedResult = { - a : true, - c : { - a: 0 - } - }; - - removeUndefinedProperties(mockObject); - - expect(mockObject).to.deep.equal(expectedResult); + c : { + a : 0, + b : undefined, + } + }; + + const expectedResult = { + a : true, + c : { + a: 0 + } + }; + + removeUndefinedProperties(mockObject); + + expect(mockObject).to.deep.equal(expectedResult); + }); }); + }); \ No newline at end of file diff --git a/packages/common/tests/stream-node.spec.ts b/packages/common/tests/stream-node.spec.ts new file mode 100644 index 000000000..ab34f46df --- /dev/null +++ b/packages/common/tests/stream-node.spec.ts @@ -0,0 +1,791 @@ + +import { expect } from 'chai'; +import { Duplex, Readable, Transform, Writable } from 'readable-stream'; + +import { Stream } from '../src/stream.js'; +import { NodeStream } from '../src/stream-node.js'; + +// Helper function to simulate a slow consumer. +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); +} + +describe('NodeStream', () => { + describe('consumeToArrayBuffer()', () => { + it('consumes a Readable stream and returns an ArrayBuffer', async () => { + const inputText = 'Hello, World!'; + const inputBytes = new TextEncoder().encode(inputText); + const nodeReadable = new Readable(); + nodeReadable.push(inputBytes); + nodeReadable.push(null); // Signifies the end of the stream + + const arrayBuffer = await NodeStream.consumeToArrayBuffer({ readable: nodeReadable }); + expect(arrayBuffer).to.be.an.instanceof(ArrayBuffer); + expect(new Uint8Array(arrayBuffer)).to.deep.equal(inputBytes); + }); + + it('handles an empty Readable stream', async () => { + const nodeReadable = new Readable(); + nodeReadable.push(null); // Empty stream + + const arrayBuffer = await NodeStream.consumeToArrayBuffer({ readable: nodeReadable }); + expect(arrayBuffer).to.be.an.instanceof(ArrayBuffer); + expect(arrayBuffer.byteLength).to.equal(0); + }); + + it('consumes a large Readable stream and returns the expected ArrayBuffer', async () => { + const largeData = new Uint8Array(1024 * 1024).fill('a'.charCodeAt(0)); // 1MB data + const nodeReadable = new Readable(); + nodeReadable.push(largeData); + nodeReadable.push(null); + + const arrayBuffer = await NodeStream.consumeToArrayBuffer({ readable: nodeReadable }); + expect(arrayBuffer).to.be.an.instanceof(ArrayBuffer); + expect(arrayBuffer.byteLength).to.equal(largeData.byteLength); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const nodeReadable = new Readable({ + read() { + this.emit('error', error); + } + }); + + try { + await NodeStream.consumeToArrayBuffer({ readable: nodeReadable }); + expect.fail('consumeToArrayBuffer() should have thrown an error'); + } catch (caughtError) { + expect(caughtError).to.equal(error); + } + }); + }); + + describe('consumeToBlob()', () => { + it('consumes a Readable stream and returns a Blob', async () => { + const inputText = 'Hello, World!'; + const inputBytes = new TextEncoder().encode(inputText); + const nodeReadable = new Readable(); + nodeReadable.push(inputBytes); + nodeReadable.push(null); // Signifies the end of the stream + + const blob = await NodeStream.consumeToBlob({ readable: nodeReadable }); + expect(blob).to.be.an.instanceof(Blob); + expect(blob.size).to.equal(inputBytes.byteLength); + + const text = await blob.text(); + expect(text).to.equal(inputText); + }); + + it('handles an empty Readable stream', async () => { + const nodeReadable = new Readable(); + nodeReadable.push(null); // Empty stream + + const blob = await NodeStream.consumeToBlob({ readable: nodeReadable }); + expect(blob).to.be.an.instanceof(Blob); + expect(blob.size).to.equal(0); + }); + + it('consumes a large Readable stream and returns the expected blob size', async () => { + const largeData = new Uint8Array(1024 * 1024).fill('a'.charCodeAt(0)); // 1MB data + const nodeReadable = new Readable(); + nodeReadable.push(largeData); + nodeReadable.push(null); + + const blob = await NodeStream.consumeToBlob({ readable: nodeReadable }); + expect(blob).to.be.an.instanceof(Blob); + expect(blob.size).to.equal(largeData.byteLength); + }); + + it('consumes a Readable stream containing a string and returns the correct Blob', async () => { + const inputString = 'Hello, World!'; + const textEncoder = new TextEncoder(); + const inputBytes = textEncoder.encode(inputString); + const nodeReadable = new Readable(); + nodeReadable.push(inputBytes); + nodeReadable.push(null); + + const blob = await NodeStream.consumeToBlob({ readable: nodeReadable }); + expect(blob.size).to.equal(inputBytes.length); + + // Read the blob and verify its content + const blobText = await blob.text(); + expect(blobText).to.equal(inputString); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const nodeReadable = new Readable({ + read() { + this.emit('error', error); + } + }); + + try { + await NodeStream.consumeToBlob({ readable: nodeReadable }); + expect.fail('consumeToBlob() should have thrown an error'); + } catch (caughtError) { + expect(caughtError).to.equal(error); + } + }); + }); + + describe('consumeToBytes()', () => { + it('consumes a Readable stream and returns a Uint8Array', async () => { + const inputBytes = new Uint8Array([1, 2, 3, 4, 5]); + const nodeReadable = new Readable(); + nodeReadable.push(inputBytes); + nodeReadable.push(null); + + const result = await NodeStream.consumeToBytes({ readable: nodeReadable }); + expect(result).to.be.an.instanceof(Uint8Array); + expect(result).to.deep.equal(inputBytes); + }); + + it('consumes a 5-byte ReadableStream and returns the expected bytes', async () => { + const inputBytes = new Uint8Array([1, 2, 3, 4, 5]); + const nodeReadable = new Readable(); + nodeReadable.push(inputBytes); + nodeReadable.push(null); + + const result = await NodeStream.consumeToBytes({ readable: nodeReadable }); + expect(result).to.deep.equal(inputBytes); + }); + + it('consumes a large ReadableStream and returns the expected bytes', async () => { + // Create a 1MB byte stream that is filled with monotonically increasing values from 0 to 255, repeatedly. + const oneMegabyte = new Uint8Array(1024 * 1024).map((_, i) => i % 256); + const nodeReadable = new Readable(); + nodeReadable.push(oneMegabyte); + nodeReadable.push(null); + + const result = await NodeStream.consumeToBytes({ readable: nodeReadable }); + expect(result).to.deep.equal(oneMegabyte); + }); + + it('handles an empty ReadableStream', async () => { + const nodeReadable = new Readable(); + nodeReadable.push(null); // Empty stream + + const result = await NodeStream.consumeToBytes({ readable: nodeReadable }); + expect(result).to.be.an.instanceof(Uint8Array); + expect(result.length).to.equal(0); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const nodeReadable = new Readable({ + read() { + this.emit('error', error); + } + }); + + try { + await NodeStream.consumeToBytes({ readable: nodeReadable }); + expect.fail('consumeToBytes() should have thrown an error'); + } catch (caughtError) { + expect(caughtError).to.equal(error); + } + }); + }); + + describe('consumeToJson()', () => { + it('consumes a Readable stream containing JSON and returns a JavaScript object', async () => { + const inputObject = { message: 'Hello, World!' }; + const inputString = JSON.stringify(inputObject); + const textEncoder = new TextEncoder(); + const inputBytes = textEncoder.encode(inputString); + const nodeReadable = new Readable(); + nodeReadable.push(inputBytes); + nodeReadable.push(null); + + const result = await NodeStream.consumeToJson({ readable: nodeReadable }); + expect(result).to.deep.equal(inputObject); + }); + + it('throws an error for a stream containing invalid JSON', async () => { + const invalidJson = 'Invalid JSON'; + const nodeReadable = new Readable(); + nodeReadable.push(new TextEncoder().encode(invalidJson)); + nodeReadable.push(null); + + try { + await NodeStream.consumeToJson({ readable: nodeReadable }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.be.instanceOf(SyntaxError); + } + }); + + it('throws an error for an empty Readable stream', async () => { + const nodeReadable = new Readable(); + nodeReadable.push(null); // Empty stream + + try { + await NodeStream.consumeToJson({ readable: nodeReadable }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.be.instanceOf(SyntaxError); // Empty string is not valid JSON + } + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const nodeReadable = new Readable({ + read() { + this.emit('error', error); + } + }); + + try { + await NodeStream.consumeToJson({ readable: nodeReadable }); + expect.fail('consumeToJson() should have thrown an error'); + } catch (caughtError) { + expect(caughtError).to.equal(error); + } + }); + }); + + describe('consumeToText()', () => { + it('consumes a Readable stream containing text and returns a string', async () => { + const inputText = 'Hello, World!'; + const nodeReadable = new Readable(); + nodeReadable.push(new TextEncoder().encode(inputText)); + nodeReadable.push(null); + + const result = await NodeStream.consumeToText({ readable: nodeReadable}); + expect(result).to.be.a('string'); + expect(result).to.equal(inputText); + }); + + it('handles an empty Readable stream', async () => { + const nodeReadable = new Readable(); + nodeReadable.push(null); // Empty stream + + const result = await NodeStream.consumeToText({ readable: nodeReadable}); + expect(result).to.be.a('string'); + expect(result).to.equal(''); + }); + + it('consumes a large text stream and returns the expected text', async () => { + const largeText = 'a'.repeat(1024 * 1024); // 1MB of 'a' + const nodeReadable = new Readable(); + nodeReadable.push(new TextEncoder().encode(largeText)); + nodeReadable.push(null); + + const result = await NodeStream.consumeToText({ readable: nodeReadable}); + expect(result).to.equal(largeText); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const nodeReadable = new Readable({ + read() { + this.emit('error', error); + } + }); + + try { + await NodeStream.consumeToText({ readable: nodeReadable }); + expect.fail('consumeToText() should have thrown an error'); + } catch (caughtError) { + expect(caughtError).to.equal(error); + } + }); + }); + + describe('fromWebReadable()', () => { + it('converts a Web ReadableStream to a Node Readable and reads the data correctly', (done) => { + // Step 1: Create a Web ReadableStream + const inputData = ['chunk1', 'chunk2', 'chunk3']; + const webStream = new ReadableStream({ + start(controller) { + inputData.forEach(chunk => controller.enqueue(chunk)); + controller.close(); + } + }); + + // Step 2: Convert to Node Readable + const nodeReadable = NodeStream.fromWebReadable({ readableStream: webStream }); + + // Step 3: Read from the Node Readable + let concatenatedData = ''; + nodeReadable.on('data', (chunk) => { + concatenatedData += chunk; + }); + + nodeReadable.on('end', () => { + // Step 4: Compare the concatenated data with the original input + const originalDataString = inputData.join(''); + expect(concatenatedData).to.equal(originalDataString); + done(); + }); + + nodeReadable.on('error', (error) => { + done(error); + }); + }); + + it('handles backpressure properly', async () => { + // Create a Web ReadableStream with 1MB of data in 100KB chunks. + const streamLength = 1*1024*1024; // 1MB + const chunkLength = 100*1024; // 100KB + const webStream = Stream.generateByteStream({ streamLength, chunkLength }); + + // Convert to Node Readable with a small highWaterMark to induce backpressure + const nodeReadable = NodeStream.fromWebReadable({ + readableStream : webStream, + readableOptions : { highWaterMark: 1, readableHighWaterMark: 1 } + }); + + // 'end' will be triggered once when there is no more data available. + let endReached = false; + nodeReadable.on('end', () => { + // Reached end of stream. + endReached = true; + }); + + let receivedBytes = 0; + + // Read chunks one at a time with delay to simulate slow consumer. + for await (const chunk of nodeReadable) { + receivedBytes += chunk.length; // Keep track of total bytes received. + await sleep(2); // Introduce delay between reads + } + + expect(receivedBytes).to.equal(streamLength); + expect(endReached).to.be.true; + }); + + it('throws an error when passed a Node Readable stream', () => { + const nodeReadable = new Readable(); + + try { + // @ts-expect-error because a Node Readable is specified instead of a Web ReadableStream. + NodeStream.fromWebReadable({ readableStream: nodeReadable }); + expect.fail('Expected method to throw an error with Node Readable input'); + } catch (error: any) { + expect(error).to.be.an.instanceOf(TypeError); + expect(error.message).to.include('not a Web ReadableStream'); + } + }); + + it('throws an error when passed a non-stream object', () => { + const notAStream = {}; + + try { + // @ts-expect-error because notAStream is not a stream. + NodeStream.fromWebReadable({ readableStream: notAStream }); + expect.fail('Expected method to throw an error with non-stream object'); + } catch (error: any) { + expect(error).to.be.an.instanceOf(TypeError); + expect(error.message).to.include('not a Web ReadableStream'); + } + }); + + it('throws an error if an error occurs during stream processing', async () => { + // Create a Web ReadableStream that throws an error. + let controller: ReadableStreamDefaultController; + const webStream = new ReadableStream({ + start(c) { + controller = c; + // Simulate an error after a delay. + setTimeout(() => controller.error(new Error('Test error1')), 10); + } + }); + + // Convert to Node Readable + const nodeReadable = NodeStream.fromWebReadable({ readableStream: webStream }); + + nodeReadable.on('error', (error) => { + // Expect the 'error' event to be emitted with the error 'Test error'. + expect(error).to.be.an.instanceof(Error); + expect(error.message).to.equal('Test error1'); + }); + + try { + // Start reading from the stream to trigger the error. + for await (const _chunk of nodeReadable) { /* Do nothing */ } + expect.fail('Should have thrown an error'); + } catch (error: any) { + // Expect the error to be thrown. + expect(error).to.be.an.instanceof(Error); + expect(error.message).to.equal('Test error1'); + } + }); + + it('calls reader.cancel() if the stream is destroyed before closing', function (done) { + // Create a Web ReadableStream. + const webStream = new ReadableStream({ + start(controller) { + // Enqueue some data and then delay closing the stream. + controller.enqueue('test data'); + setTimeout(() => { + try { + controller.close(); + } catch (error: any) { + // Expect an error indicating that an already closed stream can't be closed again. + expect(error).to.be.an.instanceof(Error); + expect(error.message).to.include('close'); + } + }, 25); + } + }); + + // Convert to Node Readable. + const nodeReadable = NodeStream.fromWebReadable({ readableStream: webStream }); + + // Destroy the Node stream before the Web stream has closed. + setTimeout(() => nodeReadable.destroy(new Error('Test error')), 5); + + nodeReadable.on('close', () => { + // The test passes if this callback is invoked, indicating that destroy was called. + done(); + }); + + nodeReadable.on('error', (error) => { + expect(error).to.be.an.instanceof(Error); + expect(error.message).to.equal('Test error'); + }); + }); + }); + + describe('isDestroyed', () => { + it('returns true for a destroyed Duplex stream', () => { + const duplex = new Duplex({ + read() {}, + write(chunk, encoding, callback) { callback(); } + }); + duplex.destroy(); + expect(NodeStream.isDestroyed({ stream: duplex })).to.be.true; + }); + + it('returns true for a destroyed Readable stream', () => { + const readable = new Readable({ read() {} }); + readable.destroy(); + expect(NodeStream.isDestroyed({ stream: readable })).to.be.true; + }); + + it('returns true for a destroyed Transform stream', () => { + const transform = new Transform({ + transform(chunk, encoding, callback) { callback(); } + }); + transform.destroy(); + expect(NodeStream.isDestroyed({ stream: transform })).to.be.true; + }); + + it('returns true for a destroyed Writable stream', () => { + const writable = new Writable({ + write(chunk, encoding, callback) { callback(); } + }); + writable.destroy(); + expect(NodeStream.isDestroyed({ stream: writable })).to.be.true; + }); + + it('returns false for a non-destroyed Duplex stream', () => { + const duplex = new Duplex({ read() {}, write(chunk, encoding, callback) { callback(); } }); + expect(NodeStream.isDestroyed({ stream: duplex })).to.be.false; + }); + + it('returns false for a non-destroyed Readable stream', () => { + const readable = new Readable({ read() {} }); + expect(NodeStream.isDestroyed({ stream: readable })).to.be.false; + }); + + it('returns false for a non-destroyed Transform stream', () => { + const transform = new Transform({ transform(chunk, encoding, callback) { callback(); } }); + expect(NodeStream.isDestroyed({ stream: transform })).to.be.false; + }); + + it('returns false for a non-destroyed Writable stream', () => { + const writable = new Writable({ write(chunk, encoding, callback) { callback(); } }); + expect(NodeStream.isDestroyed({ stream: writable })).to.be.false; + }); + + it('throws an error when input is not a Node stream', () => { + const notAStream = {}; + + try { + // @ts-expect-error because notAStream is not a Node stream. + NodeStream.isDestroyed({ stream: notAStream }); + expect.fail('Method did not throw'); + } catch (error: any) { + expect(error).to.be.an.instanceOf(TypeError); + expect(error.message).to.equal('NodeStream.isDestroyed: \'stream\' is not a Node stream.'); + } + }); + }); + + describe('isReadable()', () => { + it('returns true for a readable stream', () => { + const nodeReadable = new Readable({ read() {} }); + + expect(NodeStream.isReadable({ readable: nodeReadable })).to.be.true; + }); + + it('returns false for a paused stream', () => { + const nodeReadable = new Readable({ read() {} }); + nodeReadable.pause(); + expect(NodeStream.isReadable({ readable: nodeReadable })).to.be.false; + }); + + it('returns false for a stream that has ended', async () => { + const nodeReadable = new Readable(); + nodeReadable.push(null); // End the stream + + expect(NodeStream.isReadable({ readable: nodeReadable })).to.be.false; + }); + + it(`returns false for a stream that has ended and the 'end' event has been emitted`, async () => { + const nodeReadable = new Readable(); + nodeReadable.push(new Uint8Array([1])); + nodeReadable.push(null); // End the stream + + nodeReadable.on('end', () => { + expect(NodeStream.isReadable({ readable: nodeReadable })).to.be.false; + }); + + for await (const _chunk of nodeReadable) { + // Only reading the chunks to trigger emitting the 'end' event. + } + }); + + it('returns false for a destroyed stream', () => { + const nodeReadable = new Readable({ read() {} }); + nodeReadable.destroy(); + expect(NodeStream.isReadable({ readable: nodeReadable })).to.be.false; + }); + + it('returns false for a non-stream object', () => { + const nonStreamObject = {}; + // @ts-expect-error because nonStreamObject is not a stream. + expect(NodeStream.isReadable({ readable: nonStreamObject })).to.be.false; + }); + + it('returns false for null', () => { + // @ts-expect-error because null is not a stream. + expect(NodeStream.isReadable({ readable: null })).to.be.false; + }); + + it('returns false for undefined', () => { + // @ts-expect-error because undefined is not a stream. + expect(NodeStream.isReadable({ readable: undefined })).to.be.false; + }); + }); + + describe('isReadableStream()', () => { + it('returns true for a Node Readable stream', () => { + const nodeReadable = new Readable(); + const result = NodeStream.isReadableStream(nodeReadable); + expect(result).to.be.true; + }); + + it('returns false for a web ReadableStream', () => { + const readableStream = new ReadableStream(); + expect(NodeStream.isReadableStream(readableStream)).to.be.false; + }); + + it('returns false for a non-stream object', () => { + const nonStreamObject = { pipe: () => {}, on: () => {} }; + const result = NodeStream.isReadableStream(nonStreamObject); + expect(result).to.be.false; + }); + + it('returns false for null', () => { + const result = NodeStream.isReadableStream(null); + expect(result).to.be.false; + }); + + it('returns false for undefined', () => { + const result = NodeStream.isReadableStream(undefined); + expect(result).to.be.false; + }); + + it('returns false for a string', () => { + const result = NodeStream.isReadableStream('not a stream'); + expect(result).to.be.false; + }); + + it('returns false for a number', () => { + const result = NodeStream.isReadableStream(42); + expect(result).to.be.false; + }); + + it('returns false for an array', () => { + const result = NodeStream.isReadableStream([]); + expect(result).to.be.false; + }); + + it('returns false for a function', () => { + const result = NodeStream.isReadableStream(() => {}); + expect(result).to.be.false; + }); + + it('returns false for an object without stream methods', () => { + const nonStreamObject = { someProperty: 'some value' }; + const result = NodeStream.isReadableStream(nonStreamObject); + expect(result).to.be.false; + }); + }); + + describe('isStream', () => { + it('returns true for a Readable stream', () => { + const readableStream = new Readable({ read() {} }); + expect(NodeStream.isStream(readableStream)).to.be.true; + }); + + it('returns true for a Writable stream', () => { + const writableStream = new Writable(); + expect(NodeStream.isStream(writableStream)).to.be.true; + }); + + it('returns true for a Duplex stream', () => { + const duplexStream = new Duplex(); + expect(NodeStream.isStream(duplexStream)).to.be.true; + }); + + it('returns true for a Transform stream', () => { + const transformStream = new Transform(); + expect(NodeStream.isStream(transformStream)).to.be.true; + }); + + it('returns false for a non-stream object', () => { + const nonStreamObject = { someProperty: 'value' }; + expect(NodeStream.isStream(nonStreamObject)).to.be.false; + }); + + it('returns false for null', () => { + expect(NodeStream.isStream(null)).to.be.false; + }); + + it('returns false for undefined', () => { + expect(NodeStream.isStream(undefined)).to.be.false; + }); + + it('returns false for a string', () => { + expect(NodeStream.isStream('not a stream')).to.be.false; + }); + + it('returns false for a number', () => { + expect(NodeStream.isStream(42)).to.be.false; + }); + + it('returns false for a function', () => { + expect(NodeStream.isStream(() => {})).to.be.false; + }); + + it('returns false for an array', () => { + expect(NodeStream.isStream([])).to.be.false; + }); + }); + + describe('toWebReadable()', () => { + it('converts a Node Readable stream to a Web ReadableStream', async () => { + const inputData = ['chunk1', 'chunk2', 'chunk3']; + const nodeReadable = new Readable(); + inputData.forEach(chunk => nodeReadable.push(chunk)); + nodeReadable.push(null); // Signifies the end of the stream + + const webReadable = NodeStream.toWebReadable({ readable: nodeReadable }); + + // Read data from the Web ReadableStream + const reader = webReadable.getReader(); + let concatenatedData = ''; + let result; + do { + result = await reader.read(); + if (!result.done) { + concatenatedData += result.value; + } + } while (!result.done); + + // Compare the concatenated data with the original input + expect(concatenatedData).to.equal(inputData.join('')); + }); + + it('closes the Web ReadableStream when the Node stream ends', async () => { + const nodeReadable = new Readable({ + read() { + this.push('data'); + this.push(null); // End the stream + } + }); + + const webReadable = NodeStream.toWebReadable({ readable: nodeReadable }); + const reader = webReadable.getReader(); + + const { done } = await reader.read(); + expect(done).to.be.false; + + const result = await reader.read(); + expect(result.done).to.be.true; + }); + + it('handles errors in the Node stream', async () => { + const error = new Error('Test error'); + const nodeReadable = new Readable({ + read() { + this.emit('error', error); + } + }); + + const webReadable = NodeStream.toWebReadable({ readable: nodeReadable }); + const reader = webReadable.getReader(); + + try { + await reader.read(); + expect.fail('Error was not thrown'); + } catch (caughtError) { + expect(caughtError).to.equal(error); + } + }); + + it('cancels the Node stream when the Web ReadableStream is canceled', async () => { + let canceled = false; + const nodeReadable = new Readable({ + read() {}, + destroy() { + canceled = true; + } + }); + + const webReadable = NodeStream.toWebReadable({ readable: nodeReadable }); + const reader = webReadable.getReader(); + + await reader.cancel(); + expect(canceled).to.be.true; + }); + + it('throws an error when input is not a Node Readable stream', () => { + const notAStream = {}; + + try { + // @ts-expect-error because notAStream is not a Node stream. + NodeStream.toWebReadable({ readable: notAStream }); + expect.fail('Method did not throw'); + } catch (error: any) { + expect(error).to.be.an.instanceOf(TypeError); + expect(error.message).to.include('is not a Node Readable stream'); + } + }); + + it('returns a cancelled ReadableStream for a destroyed Node stream', async () => { + const destroyedStream = new Readable({ + read() { this.destroy(); } + }); + destroyedStream.destroy(); + + const webReadable = NodeStream.toWebReadable({ readable: destroyedStream }); + + try { + const reader = webReadable.getReader(); + await reader.read(); + expect.fail('Stream was not cancelled'); + } catch (error) { + // Check if the error is due to cancellation + expect(error).to.be.an.instanceOf(Error); // Adjust according to the expected error type + } + }); + }); + +}); \ No newline at end of file diff --git a/packages/common/tests/stream.spec.ts b/packages/common/tests/stream.spec.ts new file mode 100644 index 000000000..86c6cd4ce --- /dev/null +++ b/packages/common/tests/stream.spec.ts @@ -0,0 +1,649 @@ +import { expect } from 'chai'; +import { Readable } from 'readable-stream'; + +import { Stream } from '../src/stream.js'; + +describe('Stream', () => { + + describe('consumeToArrayBuffer()', () => { + it('consumes a ReadableStream and returns an ArrayBuffer', async () => { + const inputBytes = new Uint8Array([1, 2, 3, 4, 5]); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(inputBytes); + controller.close(); + } + }); + + const result = await Stream.consumeToArrayBuffer({ readableStream }); + expect(result).to.be.an.instanceof(ArrayBuffer); + expect(new Uint8Array(result)).to.deep.equal(inputBytes); + }); + + it('consumes a large ReadableStream and returns the expected bytes', async () => { + const oneMegabyte = new Uint8Array(1024 * 1024).map((_, i) => i % 256); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(oneMegabyte); + controller.close(); + } + }); + + const result = await Stream.consumeToArrayBuffer({ readableStream }); + expect(new Uint8Array(result)).to.deep.equal(oneMegabyte); + }); + + it('handles an empty ReadableStream', async () => { + const readableStream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + const result = await Stream.consumeToArrayBuffer({ readableStream }); + expect(result).to.be.an.instanceof(ArrayBuffer); + expect(result.byteLength).to.equal(0); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const readableStream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + + try { + await Stream.consumeToArrayBuffer({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.equal(error); + } + }); + }); + + describe('consumeToBlob()', () => { + it('consumes a ReadableStream and returns a Blob', async () => { + const inputBytes = new Uint8Array([1, 2, 3, 4, 5]); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(inputBytes); + controller.close(); + } + }); + + const result = await Stream.consumeToBlob({ readableStream }); + expect(result).to.be.an.instanceof(Blob); + expect(result.size).to.equal(inputBytes.length); + + // Read the blob to verify its content + const arrayBuffer = await result.arrayBuffer(); + expect(new Uint8Array(arrayBuffer)).to.deep.equal(inputBytes); + }); + + it('handles an empty ReadableStream', async () => { + const readableStream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + const result = await Stream.consumeToBlob({ readableStream }); + expect(result).to.be.an.instanceof(Blob); + expect(result.size).to.equal(0); + }); + + it('consumes a large ReadableStream and returns the expected blob size', async () => { + const oneMegabyte = new Uint8Array(1024 * 1024).map((_, i) => i % 256); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(oneMegabyte); + controller.close(); + } + }); + + const result = await Stream.consumeToBlob({ readableStream }); + expect(result.size).to.equal(oneMegabyte.length); + }); + + it('consumes a ReadableStream containing a string and returns the correct Blob', async () => { + const inputString = 'Hello, World!'; + const textEncoder = new TextEncoder(); + const inputBytes = textEncoder.encode(inputString); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(inputBytes); + controller.close(); + } + }); + + const blob = await Stream.consumeToBlob({ readableStream }); + expect(blob).to.be.an.instanceof(Blob); + expect(blob.size).to.equal(inputBytes.length); + + // Read the blob and verify its content + const blobText = await blob.text(); + expect(blobText).to.equal(inputString); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const readableStream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + + try { + await Stream.consumeToBlob({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.equal(error); + } + }); + }); + + describe('consumeToBytes()', () => { + it('consumes a ReadableStream and returns a Uint8Array', async () => { + const inputBytes = new Uint8Array([1, 2, 3, 4, 5]); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(inputBytes); + controller.close(); + } + }); + + const result = await Stream.consumeToBytes({ readableStream }); + expect(result).to.be.an.instanceof(Uint8Array); + expect(result).to.deep.equal(inputBytes); + }); + + it('consumes a 5-byte ReadableStream and returns the expected bytes', async () => { + const inputBytes = new Uint8Array([1, 2, 3, 4, 5]); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(inputBytes); + controller.close(); + } + }); + + const result = await Stream.consumeToBytes({ readableStream }); + expect(result).to.deep.equal(inputBytes); + }); + + it('consumes a large ReadableStream and returns the expected bytes', async () => { + // Create a 1MB byte stream that is filled with monotonically increasing values from 0 to 255, repeatedly. + const oneMegabyte = new Uint8Array(1024 * 1024).map((_, i) => i % 256); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(oneMegabyte); + controller.close(); + } + }); + + const result = await Stream.consumeToBytes({ readableStream }); + expect(result).to.deep.equal(oneMegabyte); + }); + + it('handles an empty ReadableStream', async () => { + const readableStream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + const result = await Stream.consumeToBytes({ readableStream }); + expect(result).to.be.an.instanceof(Uint8Array); + expect(result.length).to.equal(0); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const readableStream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + + try { + await Stream.consumeToBytes({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.equal(error); + } + }); + }); + + describe('consumeToJson()', () => { + it('consumes a ReadableStream containing JSON and returns a JavaScript object', async () => { + const inputObject = { message: 'Hello, World!' }; + const inputString = JSON.stringify(inputObject); + const textEncoder = new TextEncoder(); + const inputBytes = textEncoder.encode(inputString); + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(inputBytes); + controller.close(); + } + }); + + const result = await Stream.consumeToJson({ readableStream }); + expect(result).to.deep.equal(inputObject); + }); + + it('throws an error for a stream containing invalid JSON', async () => { + const invalidJson = 'Invalid JSON'; + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(invalidJson)); + controller.close(); + } + }); + + try { + await Stream.consumeToJson({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.be.instanceOf(SyntaxError); + } + }); + + it('handles an empty ReadableStream', async () => { + const readableStream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + try { + await Stream.consumeToJson({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.be.instanceOf(SyntaxError); // Empty string is not valid JSON + } + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const readableStream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + + try { + await Stream.consumeToJson({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.equal(error); + } + }); + }); + + describe('consumeToText', () => { + it('consumes a ReadableStream containing text and returns a string', async () => { + const inputText = 'Hello, World!'; + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(inputText)); + controller.close(); + } + }); + + const result = await Stream.consumeToText({ readableStream }); + expect(result).to.be.a('string'); + expect(result).to.equal(inputText); + }); + + it('handles an empty ReadableStream', async () => { + const readableStream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + const result = await Stream.consumeToText({ readableStream }); + expect(result).to.be.a('string'); + expect(result).to.equal(''); + }); + + it('consumes a large text stream and returns the expected text', async () => { + const largeText = 'a'.repeat(1024 * 1024); // 1MB of 'a' + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(largeText)); + controller.close(); + } + }); + + const result = await Stream.consumeToText({ readableStream }); + expect(result).to.equal(largeText); + }); + + it('throws an error for a stream that errors', async () => { + const error = new Error('Stream error'); + const readableStream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + + try { + await Stream.consumeToText({ readableStream }); + expect.fail('Should have thrown an error'); + } catch (err) { + expect(err).to.equal(error); + } + }); + }); + + describe('generateByteStream()', function () { + it('generates a stream with the specified length and fill value', async function () { + const streamByteLength = 100; + const fillValue = 43; + const stream = Stream.generateByteStream({ streamLength: streamByteLength, fillValue }); + + // Read data from the stream. + const consumedBytes = await Stream.consumeToBytes({ readableStream: stream }); + + // Check the length of the received bytes + expect(consumedBytes.length).to.equal(streamByteLength); + + // Check if all bytes are set to 43 + consumedBytes.forEach(byte => { + expect(byte).to.equal(fillValue); + }); + }); + + it('generates a stream with the specified chunk length', async function () { + const streamByteLength = 100; + const chunkLength = 10; + const fillValue = 43; + const stream = Stream.generateByteStream({ streamLength: streamByteLength, chunkLength, fillValue }); + + // Collecting data from the stream. + const reader = stream.getReader(); + let receivedBytes = new Uint8Array(0); + let chunkCount = 0; + let firstChunkLength: number | undefined; + + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) break; + receivedBytes = new Uint8Array([...receivedBytes, ...value]); + firstChunkLength ??= value.length; + chunkCount++; + } + + // Check the length of the received bytes. + expect(receivedBytes.length).to.equal(streamByteLength); + + // Check the number of chunks received. + expect(chunkCount).to.equal(Math.ceil(streamByteLength / chunkLength)); + + // Check if the first chunk is of the expected length. + expect(firstChunkLength).to.equal(chunkLength); + }); + + it('handles stream lengths that are evenly divisible by chunk length', async function () { + const streamByteLength = 100; + const chunkLength = 10; + const stream = Stream.generateByteStream({ streamLength: streamByteLength, chunkLength }); + + // Read data from the stream. + const consumedBytes = await Stream.consumeToBytes({ readableStream: stream }); + + // Confirm that the stream contents are as expected. + expect(consumedBytes.length).to.equal(streamByteLength); + }); + + it('handles stream lengths that are not evenly divisible by chunk length', async function () { + const streamByteLength = 100; + const chunkLength = 11; + const stream = Stream.generateByteStream({ streamLength: streamByteLength, chunkLength }); + + // Read data from the stream. + const consumedBytes = await Stream.consumeToBytes({ readableStream: stream }); + + // Confirm that the stream contents are as expected. + expect(consumedBytes.length).to.equal(streamByteLength); + }); + + it('generates a stream with chunks having random bytes within a specified range', async () => { + const streamLength = 100; + const chunkLength = 10; + const fillValueRange: [number, number] = [50, 60]; // Range for random values + + const readableStream = Stream.generateByteStream({ streamLength, chunkLength, fillValue: fillValueRange }); + const reader = readableStream.getReader(); + + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) break; + + expect(value).to.be.an.instanceof(Uint8Array); + expect(value.length).to.be.at.most(chunkLength); + + // Check each byte in the chunk is within the specified range + for (const byte of value) { + expect(byte).to.be.at.least(fillValueRange[0]); + expect(byte).to.be.at.most(fillValueRange[1]); + } + } + }); + + it('generates an indefinite stream when streamLength is not provided', async () => { + const chunkLength = 1; + const fillValue = 0; + const maxIterations = 10_000; // Limit iterations to avoid an infinite loop in the test. + + const readableStream = Stream.generateByteStream({ chunkLength, fillValue }); + const reader = readableStream.getReader(); + + let iterations = 0; + let allChunksValid = true; + while (iterations < maxIterations) { + const { done, value } = await reader.read(); + if (done) break; + + allChunksValid = allChunksValid && value.length === chunkLength; + iterations++; + } + + expect(iterations).to.equal(maxIterations); + expect(allChunksValid).to.be.true; + }); + }); + + describe('isReadable()', () => { + it('returns true for a new ReadableStream', () => { + const stream = new ReadableStream(); + expect(Stream.isReadable({ readableStream: stream })).to.be.true; + }); + + it('returns true for an errored ReadableStream', () => { + /** + * Detecting an errored ReadableStream without actually reading from it is a bit tricky, + * as the stream's error state isn't directly exposed through its interface. The standard + * methods (getReader(), locked, etc.) do not provide information about the errored state + * unless you attempt to read from the stream. + * + * Since we don't want to actually read from (i.e., partly consume) the stream, the + * `isReadable()` method is incapable of detecting an errored stream. + */ + const erroredStream = new ReadableStream({ + start(controller) { + controller.error(new Error('Stream intentionally errored')); + } + }); + expect(Stream.isReadable({ readableStream: erroredStream })).to.be.true; + }); + + it('returns false for a locked ReadableStream', () => { + const stream = new ReadableStream(); + const reader = stream.getReader(); + expect(Stream.isReadable({ readableStream: stream })).to.be.false; + reader.releaseLock(); + }); + + it('returns false for a consumed ReadableStream', async () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('data'); + controller.close(); + }, + }); + const reader = stream.getReader(); + await reader.read(); + await reader.closed; + expect(Stream.isReadable({ readableStream: stream })).to.be.false; + }); + + it('returns false for a closed ReadableStream', async () => { + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + stream.getReader(); + + expect(Stream.isReadable({ readableStream: stream })).to.be.false; + }); + + it('returns false for non-stream objects', () => { + // @ts-expect-error because we're testing non-stream input. + expect(Stream.isReadable({ readableStream: {} })).to.be.false; + // @ts-expect-error because we're testing non-stream input. + expect(Stream.isReadable({ readableStream: null })).to.be.false; + // @ts-expect-error because we're testing non-stream input. + expect(Stream.isReadable({ readableStream: undefined })).to.be.false; + }); + + it('returns false for a ReadableStream where getReader() throws an error', () => { + // Create a custom ReadableStream with an overridden getReader method that throws an error + const erroredStream = new ReadableStream(); + erroredStream.getReader = () => { throw new Error('getReader intentionally throws an error'); }; + + const result = Stream.isReadable({ readableStream: erroredStream }); + expect(result).to.be.false; + }); + }); + + describe('isReadableStream()', () => { + it('returns true for a ReadableStream', () => { + const readableStream = new ReadableStream(); + expect(Stream.isReadableStream(readableStream)).to.be.true; + }); + + it('returns false for a Node Readable stream', () => { + expect(Stream.isReadableStream(new Readable())).to.be.false; + }); + + + it('returns false for null', () => { + expect(Stream.isReadableStream(null)).to.be.false; + }); + + it('returns false for undefined', () => { + expect(Stream.isReadableStream(undefined)).to.be.false; + }); + + it('returns false for a number', () => { + expect(Stream.isReadableStream(123)).to.be.false; + }); + + it('returns false for a string', () => { + expect(Stream.isReadableStream('string')).to.be.false; + }); + + it('returns false for a boolean', () => { + expect(Stream.isReadableStream(true)).to.be.false; + }); + + it('returns false for an array', () => { + expect(Stream.isReadableStream([])).to.be.false; + }); + + it('returns false for an object without getReader method', () => { + expect(Stream.isReadableStream({})).to.be.false; + }); + + it('returns false for a function', () => { + expect(Stream.isReadableStream(() => {})).to.be.false; + }); + + it('returns false for an object with a non-function getReader property', () => { + const objWithNonFunctionGetReader = { getReader: 'not a function' }; + expect(Stream.isReadableStream(objWithNonFunctionGetReader)).to.be.false; + }); + }); + + describe('isStream', () => { + it('returns true for a ReadableStream', () => { + const readableStream = new ReadableStream(); + expect(Stream.isStream(readableStream)).to.be.true; + }); + + it('returns true for a WritableStream', () => { + const writableStream = new WritableStream(); + expect(Stream.isStream(writableStream)).to.be.true; + }); + + it('returns true for a TransformStream', () => { + const transformStream = new TransformStream(); + expect(Stream.isStream(transformStream)).to.be.true; + }); + + it('returns false for non-stream objects', () => { + expect(Stream.isStream({})).to.be.false; + expect(Stream.isStream(null)).to.be.false; + expect(Stream.isStream(undefined)).to.be.false; + expect(Stream.isStream(123)).to.be.false; + }); + }); + + describe('isTransformStream', () => { + it('returns true for a TransformStream', () => { + const transformStream = new TransformStream(); + expect(Stream.isTransformStream(transformStream)).to.be.true; + }); + + it('returns false for ReadableStream and WritableStream', () => { + const readableStream = new ReadableStream(); + const writableStream = new WritableStream(); + expect(Stream.isTransformStream(readableStream)).to.be.false; + expect(Stream.isTransformStream(writableStream)).to.be.false; + }); + + it('returns false for non-stream objects', () => { + expect(Stream.isTransformStream({})).to.be.false; + expect(Stream.isTransformStream(null)).to.be.false; + expect(Stream.isTransformStream(undefined)).to.be.false; + expect(Stream.isTransformStream(123)).to.be.false; + }); + }); + + describe('isWritableStream', () => { + it('returns true for a WritableStream', () => { + const writableStream = new WritableStream(); + expect(Stream.isWritableStream(writableStream)).to.be.true; + }); + + it('returns false for ReadableStream and TransformStream', () => { + const readableStream = new ReadableStream(); + const transformStream = new TransformStream(); + expect(Stream.isWritableStream(readableStream)).to.be.false; + expect(Stream.isWritableStream(transformStream)).to.be.false; + }); + + it('returns false for non-stream objects', () => { + expect(Stream.isWritableStream({})).to.be.false; + expect(Stream.isWritableStream(null)).to.be.false; + expect(Stream.isWritableStream(undefined)).to.be.false; + expect(Stream.isWritableStream(123)).to.be.false; + }); + }); + +}); \ No newline at end of file diff --git a/packages/common/tests/type-utils.spec.ts b/packages/common/tests/type-utils.spec.ts index 009f587e7..d09d6137c 100644 --- a/packages/common/tests/type-utils.spec.ts +++ b/packages/common/tests/type-utils.spec.ts @@ -1,60 +1,96 @@ import { expect } from 'chai'; -import { isDefined, universalTypeOf } from '../src/type-utils.js'; - -describe('isDefined()', () => { - it('should return true for defined non-null values', () => { - expect(isDefined('string')).to.equal(true); - expect(isDefined(42)).to.equal(true); - expect(isDefined(false)).to.equal(true); - expect(isDefined({})).to.equal(true); - expect(isDefined([])).to.equal(true); - }); +import { isAsyncIterable, isDefined, universalTypeOf } from '../src/type-utils.js'; - it('should return false for undefined or null', () => { - expect(isDefined(undefined)).to.equal(false); - expect(isDefined(null)).to.equal(false); - }); -}); +describe('Type Utils', () => { + describe('isAsyncIterable()', () => { + it('should return true for a valid AsyncIterable', () => { + const asyncIterator = { + async *[Symbol.asyncIterator]() { + yield 1; + yield 2; + } + }; + expect(isAsyncIterable(asyncIterator)).to.be.true; + }); -describe('universalTypeOf()', () => { - it('should correctly identify Array', () => { - expect(universalTypeOf([1, 2, 3])).to.equal('Array'); - }); + it('should return false for non-object types', () => { + expect(isAsyncIterable(123)).to.be.false; + expect(isAsyncIterable('string')).to.be.false; + expect(isAsyncIterable(true)).to.be.false; + expect(isAsyncIterable(undefined)).to.be.false; + }); - it('should correctly identify ArrayBuffer', () => { - expect(universalTypeOf(new ArrayBuffer(2))).to.equal('ArrayBuffer'); - }); + it('should return false for null', () => { + expect(isAsyncIterable(null)).to.be.false; + }); - it('should correctly identify Blob', () => { - expect(universalTypeOf(new Blob(['foo']))).to.equal('Blob'); - }); + it('should return false for an object without [Symbol.asyncIterator]', () => { + const regularObject = { a: 1, b: 2 }; + expect(isAsyncIterable(regularObject)).to.be.false; + }); - it('should correctly identify Boolean', () => { - expect(universalTypeOf(true)).to.equal('Boolean'); + it('should return false for an object with a non-function [Symbol.asyncIterator]', () => { + const invalidAsyncIterator = { [Symbol.asyncIterator]: 123 }; + expect(isAsyncIterable(invalidAsyncIterator)).to.be.false; + }); }); - it('should correctly identify Number', () => { - expect(universalTypeOf(42)).to.equal('Number'); - }); + describe('isDefined()', () => { + it('should return true for defined non-null values', () => { + expect(isDefined('string')).to.equal(true); + expect(isDefined(42)).to.equal(true); + expect(isDefined(false)).to.equal(true); + expect(isDefined({})).to.equal(true); + expect(isDefined([])).to.equal(true); + }); - it('should correctly identify Null', () => { - expect(universalTypeOf(null)).to.equal('Null'); + it('should return false for undefined or null', () => { + expect(isDefined(undefined)).to.equal(false); + expect(isDefined(null)).to.equal(false); + }); }); - it('should correctly identify Object', () => { - expect(universalTypeOf({a: 1, b: 2})).to.equal('Object'); - }); + describe('universalTypeOf()', () => { + it('should correctly identify Array', () => { + expect(universalTypeOf([1, 2, 3])).to.equal('Array'); + }); - it('should correctly identify String', () => { - expect(universalTypeOf('some string')).to.equal('String'); - }); + it('should correctly identify ArrayBuffer', () => { + expect(universalTypeOf(new ArrayBuffer(2))).to.equal('ArrayBuffer'); + }); - it('should correctly identify Uint8Array', () => { - expect(universalTypeOf(new Uint8Array([1, 2, 3]))).to.equal('Uint8Array'); - }); + it('should correctly identify Blob', () => { + expect(universalTypeOf(new Blob(['foo']))).to.equal('Blob'); + }); + + it('should correctly identify Boolean', () => { + expect(universalTypeOf(true)).to.equal('Boolean'); + }); + + it('should correctly identify Number', () => { + expect(universalTypeOf(42)).to.equal('Number'); + }); + + it('should correctly identify Null', () => { + expect(universalTypeOf(null)).to.equal('Null'); + }); - it('should correctly identify Undefined', () => { - expect(universalTypeOf(undefined)).to.equal('Undefined'); + it('should correctly identify Object', () => { + expect(universalTypeOf({a: 1, b: 2})).to.equal('Object'); + }); + + it('should correctly identify String', () => { + expect(universalTypeOf('some string')).to.equal('String'); + }); + + it('should correctly identify Uint8Array', () => { + expect(universalTypeOf(new Uint8Array([1, 2, 3]))).to.equal('Uint8Array'); + }); + + it('should correctly identify Undefined', () => { + expect(universalTypeOf(undefined)).to.equal('Undefined'); + }); }); + }); \ No newline at end of file diff --git a/packages/dids/src/did-ion.ts b/packages/dids/src/did-ion.ts index 1eaed9daa..baca61304 100644 --- a/packages/dids/src/did-ion.ts +++ b/packages/dids/src/did-ion.ts @@ -84,11 +84,11 @@ export class DidIonMethod implements DidMethod { operationsEndpoint?: string }): Promise { const { - challengeEnabled = true, + challengeEnabled = false, challengeEndpoint = 'https://beta.ion.msidentity.com/api/v1.0/proof-of-work-challenge', keySet, services, - operationsEndpoint = 'https://beta.ion.msidentity.com/api/v1.0/operations' + operationsEndpoint = 'https://ion.tbd.engineering/operations' } = options; // Create ION Document. @@ -460,7 +460,7 @@ export class DidIonMethod implements DidMethod { }; } - const { resolutionEndpoint = 'https://discover.did.msidentity.com/1.0/identifiers/' } = resolutionOptions; + const { resolutionEndpoint = 'https://ion.tbd.engineering/identifiers/' } = resolutionOptions; const normalizeUrl = (url: string): string => url.endsWith('/') ? url : url + '/'; const resolutionUrl = `${normalizeUrl(resolutionEndpoint)}${parsedDid.did}`; diff --git a/packages/dids/tests/did-ion.spec.ts b/packages/dids/tests/did-ion.spec.ts index 0f3bcbe14..17b45de12 100644 --- a/packages/dids/tests/did-ion.spec.ts +++ b/packages/dids/tests/did-ion.spec.ts @@ -682,7 +682,7 @@ describe('DidIonMethod', () => { expect(resolutionResult.didDocumentMetadata.method).to.have.property('published', true); }); - it('returns notFound error with unpublished short form ION DIDs', async() => { + it('returns internalError error with unpublished short form ION DIDs', async() => { const did = 'did:ion:EiBCi7lnGtotBsFkbI_lQskQZLk_GPelU0C5-nRB4_nMfA'; const resolutionResult = await DidIonMethod.resolve({ didUrl: did }); @@ -690,7 +690,7 @@ describe('DidIonMethod', () => { expect(resolutionResult).to.have.property('didDocument'); expect(resolutionResult).to.have.property('didDocumentMetadata'); - expect(resolutionResult.didResolutionMetadata).to.have.property('error', 'notFound'); + expect(resolutionResult.didResolutionMetadata).to.have.property('error', 'internalError'); }); it('resolves published long form ION DIDs', async() => {