diff --git a/config.env b/config.env new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/.bin/ignored b/node_modules/.bin/ignored new file mode 120000 index 0000000..0e0ac00 --- /dev/null +++ b/node_modules/.bin/ignored @@ -0,0 +1 @@ +../dotignore/bin/ignored \ No newline at end of file diff --git a/node_modules/.bin/mime b/node_modules/.bin/mime new file mode 120000 index 0000000..fbb7ee0 --- /dev/null +++ b/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 0000000..317eb29 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver \ No newline at end of file diff --git a/node_modules/.bin/tap-out b/node_modules/.bin/tap-out new file mode 120000 index 0000000..cb6a60e --- /dev/null +++ b/node_modules/.bin/tap-out @@ -0,0 +1 @@ +../tap-out/bin/cmd.js \ No newline at end of file diff --git a/node_modules/.bin/tap-spec b/node_modules/.bin/tap-spec new file mode 120000 index 0000000..a22b7e1 --- /dev/null +++ b/node_modules/.bin/tap-spec @@ -0,0 +1 @@ +../tap-spec/bin/cmd.js \ No newline at end of file diff --git a/node_modules/.bin/tape b/node_modules/.bin/tape new file mode 120000 index 0000000..dc4bc23 --- /dev/null +++ b/node_modules/.bin/tape @@ -0,0 +1 @@ +../tape/bin/tape \ No newline at end of file diff --git a/node_modules/.bin/tspec b/node_modules/.bin/tspec new file mode 120000 index 0000000..a22b7e1 --- /dev/null +++ b/node_modules/.bin/tspec @@ -0,0 +1 @@ +../tap-spec/bin/cmd.js \ No newline at end of file diff --git a/node_modules/accepts/HISTORY.md b/node_modules/accepts/HISTORY.md new file mode 100644 index 0000000..0bf0417 --- /dev/null +++ b/node_modules/accepts/HISTORY.md @@ -0,0 +1,236 @@ +1.3.7 / 2019-04-29 +================== + + * deps: negotiator@0.6.2 + - Fix sorting charset, encoding, and language with extra parameters + +1.3.6 / 2019-04-28 +================== + + * deps: mime-types@~2.1.24 + - deps: mime-db@~1.40.0 + +1.3.5 / 2018-02-28 +================== + + * deps: mime-types@~2.1.18 + - deps: mime-db@~1.33.0 + +1.3.4 / 2017-08-22 +================== + + * deps: mime-types@~2.1.16 + - deps: mime-db@~1.29.0 + +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/node_modules/accepts/LICENSE b/node_modules/accepts/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/accepts/README.md b/node_modules/accepts/README.md new file mode 100644 index 0000000..66a2f54 --- /dev/null +++ b/node_modules/accepts/README.md @@ -0,0 +1,142 @@ +# accepts + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). +Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` + as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install accepts +``` + +## API + + + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app (req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch (accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master +[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master +[node-version-image]: https://badgen.net/npm/node/accepts +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/accepts +[npm-url]: https://npmjs.org/package/accepts +[npm-version-image]: https://badgen.net/npm/v/accepts +[travis-image]: https://badgen.net/travis/jshttp/accepts/master +[travis-url]: https://travis-ci.org/jshttp/accepts diff --git a/node_modules/accepts/index.js b/node_modules/accepts/index.js new file mode 100644 index 0000000..e9b2f63 --- /dev/null +++ b/node_modules/accepts/index.js @@ -0,0 +1,238 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts (req) { + if (!(this instanceof Accepts)) { + return new Accepts(req) + } + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + // no accept header, return first given type + if (!this.headers.accept) { + return types[0] + } + + var mimes = types.map(extToMime) + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) + var first = accepts[0] + + return first + ? types[mimes.indexOf(first)] + : false +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime (type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime (type) { + return typeof type === 'string' +} diff --git a/node_modules/accepts/package.json b/node_modules/accepts/package.json new file mode 100644 index 0000000..fbf01ee --- /dev/null +++ b/node_modules/accepts/package.json @@ -0,0 +1,86 @@ +{ + "_from": "accepts@~1.3.7", + "_id": "accepts@1.3.7", + "_inBundle": false, + "_integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", + "_location": "/accepts", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "accepts@~1.3.7", + "name": "accepts", + "escapedName": "accepts", + "rawSpec": "~1.3.7", + "saveSpec": null, + "fetchSpec": "~1.3.7" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "_shasum": "531bc726517a3b2b41f850021c6cc15eaab507cd", + "_spec": "accepts@~1.3.7", + "_where": "/home/kareml/Desktop/SMS/node_modules/express", + "bugs": { + "url": "https://github.com/jshttp/accepts/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + } + ], + "dependencies": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + }, + "deprecated": false, + "description": "Higher-level content negotiation", + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "homepage": "https://github.com/jshttp/accepts#readme", + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ], + "license": "MIT", + "name": "accepts", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/accepts.git" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + }, + "version": "1.3.7" +} diff --git a/node_modules/ansi-regex/index.js b/node_modules/ansi-regex/index.js new file mode 100644 index 0000000..b9574ed --- /dev/null +++ b/node_modules/ansi-regex/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = function () { + return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g; +}; diff --git a/node_modules/ansi-regex/license b/node_modules/ansi-regex/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/ansi-regex/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ansi-regex/package.json b/node_modules/ansi-regex/package.json new file mode 100644 index 0000000..b0089d5 --- /dev/null +++ b/node_modules/ansi-regex/package.json @@ -0,0 +1,109 @@ +{ + "_from": "ansi-regex@^2.0.0", + "_id": "ansi-regex@2.1.1", + "_inBundle": false, + "_integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "_location": "/ansi-regex", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ansi-regex@^2.0.0", + "name": "ansi-regex", + "escapedName": "ansi-regex", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/has-ansi", + "/strip-ansi" + ], + "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "_shasum": "c3b33ab5ee360d86e0e628f0468ae7ef27d654df", + "_spec": "ansi-regex@^2.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/has-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/chalk/ansi-regex/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Regular expression for matching ANSI escape codes", + "devDependencies": { + "ava": "0.17.0", + "xo": "0.16.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/chalk/ansi-regex#readme", + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "license": "MIT", + "maintainers": [ + { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + { + "name": "Joshua Appelman", + "email": "jappelman@xebia.com", + "url": "jbnicolai.com" + }, + { + "name": "JD Ballard", + "email": "i.am.qix@gmail.com", + "url": "github.com/qix-" + } + ], + "name": "ansi-regex", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/ansi-regex.git" + }, + "scripts": { + "test": "xo && ava --verbose", + "view-supported": "node fixtures/view-codes.js" + }, + "version": "2.1.1", + "xo": { + "rules": { + "guard-for-in": 0, + "no-loop-func": 0 + } + } +} diff --git a/node_modules/ansi-regex/readme.md b/node_modules/ansi-regex/readme.md new file mode 100644 index 0000000..6a928ed --- /dev/null +++ b/node_modules/ansi-regex/readme.md @@ -0,0 +1,39 @@ +# ansi-regex [![Build Status](https://travis-ci.org/chalk/ansi-regex.svg?branch=master)](https://travis-ci.org/chalk/ansi-regex) + +> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save ansi-regex +``` + + +## Usage + +```js +const ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001b[4mcake\u001b[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001b[4mcake\u001b[0m'.match(ansiRegex()); +//=> ['\u001b[4m', '\u001b[0m'] +``` + +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..7894527 --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,65 @@ +'use strict'; + +function assembleStyles () { + var styles = { + modifiers: { + reset: [0, 0], + bold: [1, 22], // 21 isn't widely supported and 22 does the same thing + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + colors: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39] + }, + bgColors: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49] + } + }; + + // fix humans + styles.colors.grey = styles.colors.gray; + + Object.keys(styles).forEach(function (groupName) { + var group = styles[groupName]; + + Object.keys(group).forEach(function (styleName) { + var style = group[styleName]; + + styles[styleName] = group[styleName] = { + open: '\u001b[' + style[0] + 'm', + close: '\u001b[' + style[1] + 'm' + }; + }); + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + }); + + return styles; +} + +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..a96d212 --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,90 @@ +{ + "_from": "ansi-styles@^2.2.1", + "_id": "ansi-styles@2.2.1", + "_inBundle": false, + "_integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "_location": "/ansi-styles", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ansi-styles@^2.2.1", + "name": "ansi-styles", + "escapedName": "ansi-styles", + "rawSpec": "^2.2.1", + "saveSpec": null, + "fetchSpec": "^2.2.1" + }, + "_requiredBy": [ + "/chalk" + ], + "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "_shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe", + "_spec": "ansi-styles@^2.2.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/chalk", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/chalk/ansi-styles/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "ANSI escape codes for styling strings in the terminal", + "devDependencies": { + "mocha": "*" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/chalk/ansi-styles#readme", + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "license": "MIT", + "maintainers": [ + { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + { + "name": "Joshua Appelman", + "email": "jappelman@xebia.com", + "url": "jbnicolai.com" + } + ], + "name": "ansi-styles", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/ansi-styles.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "2.2.1" +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..3f933f6 --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,86 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + +![](screenshot.png) + + +## Install + +``` +$ npm install --save ansi-styles +``` + + +## Usage + +```js +var ansi = require('ansi-styles'); + +console.log(ansi.green.open + 'Hello world!' + ansi.green.close); +``` + + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` + + +## Advanced usage + +By default you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `ansi.modifiers` +- `ansi.colors` +- `ansi.bgColors` + + +###### Example + +```js +console.log(ansi.colors.green.open); +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/array-flatten/LICENSE b/node_modules/array-flatten/LICENSE new file mode 100644 index 0000000..983fbe8 --- /dev/null +++ b/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-flatten/README.md b/node_modules/array-flatten/README.md new file mode 100644 index 0000000..91fa5b6 --- /dev/null +++ b/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/node_modules/array-flatten/array-flatten.js b/node_modules/array-flatten/array-flatten.js new file mode 100644 index 0000000..089117b --- /dev/null +++ b/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/node_modules/array-flatten/package.json b/node_modules/array-flatten/package.json new file mode 100644 index 0000000..e300279 --- /dev/null +++ b/node_modules/array-flatten/package.json @@ -0,0 +1,64 @@ +{ + "_from": "array-flatten@1.1.1", + "_id": "array-flatten@1.1.1", + "_inBundle": false, + "_integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=", + "_location": "/array-flatten", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "array-flatten@1.1.1", + "name": "array-flatten", + "escapedName": "array-flatten", + "rawSpec": "1.1.1", + "saveSpec": null, + "fetchSpec": "1.1.1" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "_shasum": "9a5f699051b1e7073328f2a008968b64ea2955d2", + "_spec": "array-flatten@1.1.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/express", + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Flatten an array of nested arrays into a single flat array", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + }, + "files": [ + "array-flatten.js", + "LICENSE" + ], + "homepage": "https://github.com/blakeembrey/array-flatten", + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "license": "MIT", + "main": "array-flatten.js", + "name": "array-flatten", + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "version": "1.1.1" +} diff --git a/node_modules/balanced-match/.npmignore b/node_modules/balanced-match/.npmignore new file mode 100644 index 0000000..ae5d8c3 --- /dev/null +++ b/node_modules/balanced-match/.npmignore @@ -0,0 +1,5 @@ +test +.gitignore +.travis.yml +Makefile +example.js diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..08e918c --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,91 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..1685a76 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,59 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..312053b --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,77 @@ +{ + "_from": "balanced-match@^1.0.0", + "_id": "balanced-match@1.0.0", + "_inBundle": false, + "_integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "_location": "/balanced-match", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "balanced-match@^1.0.0", + "name": "balanced-match", + "escapedName": "balanced-match", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/brace-expansion" + ], + "_resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "_shasum": "89b4d199ab2bee49de164ea02b89ce462d71b767", + "_spec": "balanced-match@^1.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/brace-expansion", + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "bugs": { + "url": "https://github.com/juliangruber/balanced-match/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "license": "MIT", + "main": "index.js", + "name": "balanced-match", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "scripts": { + "bench": "make bench", + "test": "make test" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "version": "1.0.0" +} diff --git a/node_modules/body-parser/HISTORY.md b/node_modules/body-parser/HISTORY.md new file mode 100644 index 0000000..a1d3fbf --- /dev/null +++ b/node_modules/body-parser/HISTORY.md @@ -0,0 +1,609 @@ +1.19.0 / 2019-04-25 +=================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + * deps: qs@6.7.0 + - Fix parsing array brackets after index + * deps: raw-body@2.4.0 + - deps: bytes@3.1.0 + - deps: http-errors@1.7.2 + - deps: iconv-lite@0.4.24 + * deps: type-is@~1.6.17 + - deps: mime-types@~2.1.24 + - perf: prevent internal `throw` on invalid type + +1.18.3 / 2018-05-14 +=================== + + * Fix stack trace for strict json parse error + * deps: depd@~1.1.2 + - perf: remove argument reassignment + * deps: http-errors@~1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + * deps: qs@6.5.2 + * deps: raw-body@2.3.3 + - deps: http-errors@1.6.3 + - deps: iconv-lite@0.4.23 + * deps: type-is@~1.6.16 + - deps: mime-types@~2.1.18 + +1.18.2 / 2017-09-22 +=================== + + * deps: debug@2.6.9 + * perf: remove argument reassignment + +1.18.1 / 2017-09-12 +=================== + + * deps: content-type@~1.0.4 + - perf: remove argument reassignment + - perf: skip parameter parsing when no parameters + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + * deps: qs@6.5.1 + - Fix parsing & compacting very deep objects + * deps: raw-body@2.3.2 + - deps: iconv-lite@0.4.19 + +1.18.0 / 2017-09-08 +=================== + + * Fix JSON strict violation error to match native parse error + * Include the `body` property on verify errors + * Include the `type` property on all generated errors + * Use `http-errors` to set status code on errors + * deps: bytes@3.0.0 + * deps: debug@2.6.8 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + * deps: http-errors@~1.6.2 + - deps: depd@1.1.1 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + * deps: qs@6.5.0 + * deps: raw-body@2.3.1 + - Use `http-errors` for standard emitted errors + - deps: bytes@3.0.0 + - deps: iconv-lite@0.4.18 + - perf: skip buffer decoding on overage chunk + * perf: prevent internal `throw` when missing charset + +1.17.2 / 2017-05-17 +=================== + + * deps: debug@2.6.7 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + * deps: type-is@~1.6.15 + - deps: mime-types@~2.1.15 + +1.17.1 / 2017-03-06 +=================== + + * deps: qs@6.4.0 + - Fix regression parsing keys starting with `[` + +1.17.0 / 2017-03-01 +=================== + + * deps: http-errors@~1.6.1 + - Make `message` property enumerable for `HttpError`s + - deps: setprototypeof@1.0.3 + * deps: qs@6.3.1 + - Fix compacting nested arrays + +1.16.1 / 2017-02-10 +=================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.16.0 / 2017-01-17 +=================== + + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * deps: http-errors@~1.5.1 + - deps: inherits@2.0.3 + - deps: setprototypeof@1.0.2 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + * deps: qs@6.2.1 + - Fix array parsing from skipping empty values + * deps: raw-body@~2.2.0 + - deps: iconv-lite@0.4.15 + * deps: type-is@~1.6.14 + - deps: mime-types@~2.1.13 + +1.15.2 / 2016-06-19 +=================== + + * deps: bytes@2.4.0 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: http-errors@~1.5.0 + - Use `setprototypeof` module to replace `__proto__` setting + - deps: statuses@'>= 1.3.0 < 2' + - perf: enable strict mode + * deps: qs@6.2.0 + * deps: raw-body@~2.1.7 + - deps: bytes@2.4.0 + - perf: remove double-cleanup on happy path + * deps: type-is@~1.6.13 + - deps: mime-types@~2.1.11 + +1.15.1 / 2016-05-05 +=================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + * deps: raw-body@~2.1.6 + - deps: bytes@2.3.0 + * deps: type-is@~1.6.12 + - deps: mime-types@~2.1.10 + +1.15.0 / 2016-02-10 +=================== + + * deps: http-errors@~1.4.0 + - Add `HttpError` export, for `err instanceof createError.HttpError` + - deps: inherits@2.0.1 + - deps: statuses@'>= 1.2.1 < 2' + * deps: qs@6.1.0 + * deps: type-is@~1.6.11 + - deps: mime-types@~2.1.9 + +1.14.2 / 2015-12-16 +=================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + * deps: qs@5.2.0 + * deps: raw-body@~2.1.5 + - deps: bytes@2.2.0 + - deps: iconv-lite@0.4.13 + * deps: type-is@~1.6.10 + - deps: mime-types@~2.1.8 + +1.14.1 / 2015-09-27 +=================== + + * Fix issue where invalid charset results in 400 when `verify` used + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + * deps: raw-body@~2.1.4 + - Fix masking critical errors from `iconv-lite` + - deps: iconv-lite@0.4.12 + * deps: type-is@~1.6.9 + - deps: mime-types@~2.1.7 + +1.14.0 / 2015-09-16 +=================== + + * Fix JSON strict parse error to match syntax errors + * Provide static `require` analysis in `urlencoded` parser + * deps: depd@~1.1.0 + - Support web browser loading + * deps: qs@5.1.0 + * deps: raw-body@~2.1.3 + - Fix sync callback when attaching data listener causes sync read + * deps: type-is@~1.6.8 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.6 + +1.13.3 / 2015-07-31 +=================== + + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +1.13.2 / 2015-07-05 +=================== + + * deps: iconv-lite@0.4.11 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix user-visible incompatibilities from 3.1.0 + - Fix various parsing edge cases + * deps: raw-body@~2.1.2 + - Fix error stack traces to skip `makeError` + - deps: iconv-lite@0.4.11 + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +1.13.1 / 2015-06-16 +=================== + + * deps: qs@2.4.2 + - Downgraded from 3.1.0 because of user-visible incompatibilities + +1.13.0 / 2015-06-14 +=================== + + * Add `statusCode` property on `Error`s, in addition to `status` + * Change `type` default to `application/json` for JSON parser + * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser + * Provide static `require` analysis + * Use the `http-errors` module to generate errors + * deps: bytes@2.1.0 + - Slight optimizations + * deps: iconv-lite@0.4.10 + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + - Leading BOM is now removed when decoding + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: qs@3.1.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + - Parsed object now has `null` prototype + * deps: raw-body@~2.1.1 + - Use `unpipe` module for unpiping requests + - deps: iconv-lite@0.4.10 + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: remove argument reassignment + * perf: remove delete call + +1.12.4 / 2015-05-10 +=================== + + * deps: debug@~2.2.0 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: on-finished@~2.2.1 + * deps: raw-body@~2.0.1 + - Fix a false-positive when unpiping in Node.js 0.8 + - deps: bytes@2.0.1 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +1.12.3 / 2015-04-15 +=================== + + * Slight efficiency improvement when not debugging + * deps: depd@~1.0.1 + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + * deps: raw-body@1.3.4 + - Fix hanging callback if request aborts during read + - deps: iconv-lite@0.4.8 + +1.12.2 / 2015-03-16 +=================== + + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + +1.12.1 / 2015-03-15 +=================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +1.12.0 / 2015-02-13 +=================== + + * add `debug` messages + * accept a function for the `type` option + * use `content-type` to parse `Content-Type` headers + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + * deps: raw-body@1.3.3 + - deps: iconv-lite@0.4.7 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +1.11.0 / 2015-01-30 +=================== + + * make internal `extended: true` depth limit infinity + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +1.10.2 / 2015-01-20 +=================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + * deps: raw-body@1.3.2 + - deps: iconv-lite@0.4.6 + +1.10.1 / 2015-01-01 +=================== + + * deps: on-finished@~2.2.0 + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +1.10.0 / 2014-12-02 +=================== + + * make internal `extended: true` array limit dynamic + +1.9.3 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + * deps: raw-body@1.3.1 + - deps: iconv-lite@0.4.5 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +1.9.2 / 2014-10-27 +================== + + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +1.9.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +1.9.0 / 2014-09-24 +================== + + * include the charset in "unsupported charset" error message + * include the encoding in "unsupported content encoding" error message + * deps: depd@~1.0.0 + +1.8.4 / 2014-09-23 +================== + + * fix content encoding to be case-insensitive + +1.8.3 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +1.8.2 / 2014-09-15 +================== + + * deps: depd@0.4.5 + +1.8.1 / 2014-09-07 +================== + + * deps: media-typer@0.3.0 + * deps: type-is@~1.5.1 + +1.8.0 / 2014-09-05 +================== + + * make empty-body-handling consistent between chunked requests + - empty `json` produces `{}` + - empty `raw` produces `new Buffer(0)` + - empty `text` produces `''` + - empty `urlencoded` produces `{}` + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: type-is@~1.5.0 + - fix `hasbody` to be true for `content-length: 0` + +1.7.0 / 2014-09-01 +================== + + * add `parameterLimit` option to `urlencoded` parser + * change `urlencoded` extended array limit to 100 + * respond with 413 when over `parameterLimit` in `urlencoded` + +1.6.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +1.6.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +1.6.5 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.6.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + +1.6.3 / 2014-08-10 +================== + + * deps: qs@1.2.1 + +1.6.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +1.6.1 / 2014-08-06 +================== + + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +1.6.0 / 2014-08-05 +================== + + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + +1.5.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.5.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.5.0 / 2014-07-20 +================== + + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + * deps: raw-body@1.3.0 + - deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + - Fix `Cannot switch to old mode now` error on Node.js 0.10+ + * deps: type-is@~1.3.2 + +1.4.3 / 2014-06-19 +================== + + * deps: type-is@1.3.1 + - fix global variable leak + +1.4.2 / 2014-06-19 +================== + + * deps: type-is@1.3.0 + - improve type parsing + +1.4.1 / 2014-06-19 +================== + + * fix urlencoded extended deprecation message + +1.4.0 / 2014-06-19 +================== + + * add `text` parser + * add `raw` parser + * check accepted charset in content-type (accepts utf-8) + * check accepted encoding in content-encoding (accepts identity) + * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed + * deprecate `urlencoded()` without provided `extended` option + * lazy-load urlencoded parsers + * parsers split into files for reduced mem usage + * support gzip and deflate bodies + - set `inflate: false` to turn off + * deps: raw-body@1.2.2 + - Support all encodings from `iconv-lite` + +1.3.1 / 2014-06-11 +================== + + * deps: type-is@1.2.1 + - Switch dependency from mime to mime-types@1.0.0 + +1.3.0 / 2014-05-31 +================== + + * add `extended` option to urlencoded parser + +1.2.2 / 2014-05-27 +================== + + * deps: raw-body@1.1.6 + - assert stream encoding on node.js 0.8 + - assert stream encoding on node.js < 0.10.6 + - deps: bytes@1 + +1.2.1 / 2014-05-26 +================== + + * invoke `next(err)` after request fully read + - prevents hung responses and socket hang ups + +1.2.0 / 2014-05-11 +================== + + * add `verify` option + * deps: type-is@1.2.0 + - support suffix matching + +1.1.2 / 2014-05-11 +================== + + * improve json parser speed + +1.1.1 / 2014-05-11 +================== + + * fix repeated limit parsing with every request + +1.1.0 / 2014-05-10 +================== + + * add `type` option + * deps: pin for safety and consistency + +1.0.2 / 2014-04-14 +================== + + * use `type-is` module + +1.0.1 / 2014-03-20 +================== + + * lower default limits to 100kb diff --git a/node_modules/body-parser/LICENSE b/node_modules/body-parser/LICENSE new file mode 100644 index 0000000..386b7b6 --- /dev/null +++ b/node_modules/body-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/body-parser/README.md b/node_modules/body-parser/README.md new file mode 100644 index 0000000..aba6297 --- /dev/null +++ b/node_modules/body-parser/README.md @@ -0,0 +1,443 @@ +# body-parser + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Node.js body parsing middleware. + +Parse incoming request bodies in a middleware before your handlers, available +under the `req.body` property. + +**Note** As `req.body`'s shape is based on user-controlled input, all +properties and values in this object are untrusted and should be validated +before trusting. For example, `req.body.foo.toString()` may fail in multiple +ways, for example the `foo` property may not be there or may not be a string, +and `toString` may not be a function and instead a string or other user input. + +[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). + +_This does not handle multipart bodies_, due to their complex and typically +large nature. For multipart bodies, you may be interested in the following +modules: + + * [busboy](https://www.npmjs.org/package/busboy#readme) and + [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) + * [multiparty](https://www.npmjs.org/package/multiparty#readme) and + [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) + * [formidable](https://www.npmjs.org/package/formidable#readme) + * [multer](https://www.npmjs.org/package/multer#readme) + +This module provides the following parsers: + + * [JSON body parser](#bodyparserjsonoptions) + * [Raw body parser](#bodyparserrawoptions) + * [Text body parser](#bodyparsertextoptions) + * [URL-encoded form body parser](#bodyparserurlencodedoptions) + +Other body parsers you might be interested in: + +- [body](https://www.npmjs.org/package/body#readme) +- [co-body](https://www.npmjs.org/package/co-body#readme) + +## Installation + +```sh +$ npm install body-parser +``` + +## API + + + +```js +var bodyParser = require('body-parser') +``` + +The `bodyParser` object exposes various factories to create middlewares. All +middlewares will populate the `req.body` property with the parsed body when +the `Content-Type` request header matches the `type` option, or an empty +object (`{}`) if there was no body to parse, the `Content-Type` was not matched, +or an error occurred. + +The various errors returned by this module are described in the +[errors section](#errors). + +### bodyParser.json([options]) + +Returns middleware that only parses `json` and only looks at requests where +the `Content-Type` header matches the `type` option. This parser accepts any +Unicode encoding of the body and supports automatic inflation of `gzip` and +`deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). + +#### Options + +The `json` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### reviver + +The `reviver` option is passed directly to `JSON.parse` as the second +argument. You can find more information on this argument +[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). + +##### strict + +When set to `true`, will only accept arrays and objects; when `false` will +accept anything `JSON.parse` accepts. Defaults to `true`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not a +function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `json`), a mime type (like `application/json`), or +a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a truthy +value. Defaults to `application/json`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.raw([options]) + +Returns middleware that parses all bodies as a `Buffer` and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a `Buffer` object +of the body. + +#### Options + +The `raw` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. +If not a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this +can be an extension name (like `bin`), a mime type (like +`application/octet-stream`), or a mime type with a wildcard (like `*/*` or +`application/*`). If a function, the `type` option is called as `fn(req)` +and the request is parsed if it returns a truthy value. Defaults to +`application/octet-stream`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.text([options]) + +Returns middleware that parses all bodies as a string and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` string containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a string of the +body. + +#### Options + +The `text` function takes an optional `options` object that may contain any of +the following keys: + +##### defaultCharset + +Specify the default character set for the text content if the charset is not +specified in the `Content-Type` header of the request. Defaults to `utf-8`. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `txt`), a mime type (like `text/plain`), or a mime +type with a wildcard (like `*/*` or `text/*`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a +truthy value. Defaults to `text/plain`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.urlencoded([options]) + +Returns middleware that only parses `urlencoded` bodies and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser accepts only UTF-8 encoding of the body and supports automatic +inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This object will contain +key-value pairs, where the value can be a string or array (when `extended` is +`false`), or any type (when `extended` is `true`). + +#### Options + +The `urlencoded` function takes an optional `options` object that may contain +any of the following keys: + +##### extended + +The `extended` option allows to choose between parsing the URL-encoded data +with the `querystring` library (when `false`) or the `qs` library (when +`true`). The "extended" syntax allows for rich objects and arrays to be +encoded into the URL-encoded format, allowing for a JSON-like experience +with URL-encoded. For more information, please +[see the qs library](https://www.npmjs.org/package/qs#readme). + +Defaults to `true`, but using the default has been deprecated. Please +research into the difference between `qs` and `querystring` and choose the +appropriate setting. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### parameterLimit + +The `parameterLimit` option controls the maximum number of parameters that +are allowed in the URL-encoded data. If a request contains more parameters +than this value, a 413 will be returned to the client. Defaults to `1000`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `urlencoded`), a mime type (like +`application/x-www-form-urlencoded`), or a mime type with a wildcard (like +`*/x-www-form-urlencoded`). If a function, the `type` option is called as +`fn(req)` and the request is parsed if it returns a truthy value. Defaults +to `application/x-www-form-urlencoded`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +## Errors + +The middlewares provided by this module create errors depending on the error +condition during parsing. The errors will typically have a `status`/`statusCode` +property that contains the suggested HTTP response code, an `expose` property +to determine if the `message` property should be displayed to the client, a +`type` property to determine the type of error without matching against the +`message`, and a `body` property containing the read body, if available. + +The following are the common errors emitted, though any error can come through +for various reasons. + +### content encoding unsupported + +This error will occur when the request had a `Content-Encoding` header that +contained an encoding but the "inflation" option was set to `false`. The +`status` property is set to `415`, the `type` property is set to +`'encoding.unsupported'`, and the `charset` property will be set to the +encoding that is unsupported. + +### request aborted + +This error will occur when the request is aborted by the client before reading +the body has finished. The `received` property will be set to the number of +bytes received before the request was aborted and the `expected` property is +set to the number of expected bytes. The `status` property is set to `400` +and `type` property is set to `'request.aborted'`. + +### request entity too large + +This error will occur when the request body's size is larger than the "limit" +option. The `limit` property will be set to the byte limit and the `length` +property will be set to the request body's length. The `status` property is +set to `413` and the `type` property is set to `'entity.too.large'`. + +### request size did not match content length + +This error will occur when the request's length did not match the length from +the `Content-Length` header. This typically occurs when the request is malformed, +typically when the `Content-Length` header was calculated based on characters +instead of bytes. The `status` property is set to `400` and the `type` property +is set to `'request.size.invalid'`. + +### stream encoding should not be set + +This error will occur when something called the `req.setEncoding` method prior +to this middleware. This module operates directly on bytes only and you cannot +call `req.setEncoding` when using this module. The `status` property is set to +`500` and the `type` property is set to `'stream.encoding.set'`. + +### too many parameters + +This error will occur when the content of the request exceeds the configured +`parameterLimit` for the `urlencoded` parser. The `status` property is set to +`413` and the `type` property is set to `'parameters.too.many'`. + +### unsupported charset "BOGUS" + +This error will occur when the request had a charset parameter in the +`Content-Type` header, but the `iconv-lite` module does not support it OR the +parser does not support it. The charset is contained in the message as well +as in the `charset` property. The `status` property is set to `415`, the +`type` property is set to `'charset.unsupported'`, and the `charset` property +is set to the charset that is unsupported. + +### unsupported content encoding "bogus" + +This error will occur when the request had a `Content-Encoding` header that +contained an unsupported encoding. The encoding is contained in the message +as well as in the `encoding` property. The `status` property is set to `415`, +the `type` property is set to `'encoding.unsupported'`, and the `encoding` +property is set to the encoding that is unsupported. + +## Examples + +### Express/Connect top-level generic + +This example demonstrates adding a generic JSON and URL-encoded parser as a +top-level middleware, which will parse the bodies of all incoming requests. +This is the simplest setup. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse application/x-www-form-urlencoded +app.use(bodyParser.urlencoded({ extended: false })) + +// parse application/json +app.use(bodyParser.json()) + +app.use(function (req, res) { + res.setHeader('Content-Type', 'text/plain') + res.write('you posted:\n') + res.end(JSON.stringify(req.body, null, 2)) +}) +``` + +### Express route-specific + +This example demonstrates adding body parsers specifically to the routes that +need them. In general, this is the most recommended way to use body-parser with +Express. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// create application/json parser +var jsonParser = bodyParser.json() + +// create application/x-www-form-urlencoded parser +var urlencodedParser = bodyParser.urlencoded({ extended: false }) + +// POST /login gets urlencoded bodies +app.post('/login', urlencodedParser, function (req, res) { + res.send('welcome, ' + req.body.username) +}) + +// POST /api/users gets JSON bodies +app.post('/api/users', jsonParser, function (req, res) { + // create user in req.body +}) +``` + +### Change accepted type for parsers + +All the parsers accept a `type` option which allows you to change the +`Content-Type` that the middleware will parse. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse various different custom JSON types as JSON +app.use(bodyParser.json({ type: 'application/*+json' })) + +// parse some custom thing into a Buffer +app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) + +// parse an HTML body into a string +app.use(bodyParser.text({ type: 'text/html' })) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/body-parser.svg +[npm-url]: https://npmjs.org/package/body-parser +[travis-image]: https://img.shields.io/travis/expressjs/body-parser/master.svg +[travis-url]: https://travis-ci.org/expressjs/body-parser +[coveralls-image]: https://img.shields.io/coveralls/expressjs/body-parser/master.svg +[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master +[downloads-image]: https://img.shields.io/npm/dm/body-parser.svg +[downloads-url]: https://npmjs.org/package/body-parser diff --git a/node_modules/body-parser/index.js b/node_modules/body-parser/index.js new file mode 100644 index 0000000..93c3a1f --- /dev/null +++ b/node_modules/body-parser/index.js @@ -0,0 +1,157 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var deprecate = require('depd')('body-parser') + +/** + * Cache of loaded parsers. + * @private + */ + +var parsers = Object.create(null) + +/** + * @typedef Parsers + * @type {function} + * @property {function} json + * @property {function} raw + * @property {function} text + * @property {function} urlencoded + */ + +/** + * Module exports. + * @type {Parsers} + */ + +exports = module.exports = deprecate.function(bodyParser, + 'bodyParser: use individual json/urlencoded middlewares') + +/** + * JSON parser. + * @public + */ + +Object.defineProperty(exports, 'json', { + configurable: true, + enumerable: true, + get: createParserGetter('json') +}) + +/** + * Raw parser. + * @public + */ + +Object.defineProperty(exports, 'raw', { + configurable: true, + enumerable: true, + get: createParserGetter('raw') +}) + +/** + * Text parser. + * @public + */ + +Object.defineProperty(exports, 'text', { + configurable: true, + enumerable: true, + get: createParserGetter('text') +}) + +/** + * URL-encoded parser. + * @public + */ + +Object.defineProperty(exports, 'urlencoded', { + configurable: true, + enumerable: true, + get: createParserGetter('urlencoded') +}) + +/** + * Create a middleware to parse json and urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @deprecated + * @public + */ + +function bodyParser (options) { + var opts = {} + + // exclude type option + if (options) { + for (var prop in options) { + if (prop !== 'type') { + opts[prop] = options[prop] + } + } + } + + var _urlencoded = exports.urlencoded(opts) + var _json = exports.json(opts) + + return function bodyParser (req, res, next) { + _json(req, res, function (err) { + if (err) return next(err) + _urlencoded(req, res, next) + }) + } +} + +/** + * Create a getter for loading a parser. + * @private + */ + +function createParserGetter (name) { + return function get () { + return loadParser(name) + } +} + +/** + * Load a parser module. + * @private + */ + +function loadParser (parserName) { + var parser = parsers[parserName] + + if (parser !== undefined) { + return parser + } + + // this uses a switch for static require analysis + switch (parserName) { + case 'json': + parser = require('./lib/types/json') + break + case 'raw': + parser = require('./lib/types/raw') + break + case 'text': + parser = require('./lib/types/text') + break + case 'urlencoded': + parser = require('./lib/types/urlencoded') + break + } + + // store to prevent invoking require() + return (parsers[parserName] = parser) +} diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js new file mode 100644 index 0000000..c102609 --- /dev/null +++ b/node_modules/body-parser/lib/read.js @@ -0,0 +1,181 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var createError = require('http-errors') +var getBody = require('raw-body') +var iconv = require('iconv-lite') +var onFinished = require('on-finished') +var zlib = require('zlib') + +/** + * Module exports. + */ + +module.exports = read + +/** + * Read a request into a buffer and parse. + * + * @param {object} req + * @param {object} res + * @param {function} next + * @param {function} parse + * @param {function} debug + * @param {object} options + * @private + */ + +function read (req, res, next, parse, debug, options) { + var length + var opts = options + var stream + + // flag as parsed + req._body = true + + // read options + var encoding = opts.encoding !== null + ? opts.encoding + : null + var verify = opts.verify + + try { + // get the content stream + stream = contentstream(req, debug, opts.inflate) + length = stream.length + stream.length = undefined + } catch (err) { + return next(err) + } + + // set raw-body options + opts.length = length + opts.encoding = verify + ? null + : encoding + + // assert charset is supported + if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { + return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + })) + } + + // read body + debug('read body') + getBody(stream, opts, function (error, body) { + if (error) { + var _error + + if (error.type === 'encoding.unsupported') { + // echo back charset + _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + }) + } else { + // set status code on error + _error = createError(400, error) + } + + // read off entire request + stream.resume() + onFinished(req, function onfinished () { + next(createError(400, _error)) + }) + return + } + + // verify + if (verify) { + try { + debug('verify body') + verify(req, res, body, encoding) + } catch (err) { + next(createError(403, err, { + body: body, + type: err.type || 'entity.verify.failed' + })) + return + } + } + + // parse + var str = body + try { + debug('parse body') + str = typeof body !== 'string' && encoding !== null + ? iconv.decode(body, encoding) + : body + req.body = parse(str) + } catch (err) { + next(createError(400, err, { + body: str, + type: err.type || 'entity.parse.failed' + })) + return + } + + next() + }) +} + +/** + * Get the content stream of the request. + * + * @param {object} req + * @param {function} debug + * @param {boolean} [inflate=true] + * @return {object} + * @api private + */ + +function contentstream (req, debug, inflate) { + var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() + var length = req.headers['content-length'] + var stream + + debug('content-encoding "%s"', encoding) + + if (inflate === false && encoding !== 'identity') { + throw createError(415, 'content encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + switch (encoding) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') + req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') + req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length + break + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + return stream +} diff --git a/node_modules/body-parser/lib/types/json.js b/node_modules/body-parser/lib/types/json.js new file mode 100644 index 0000000..2971dc1 --- /dev/null +++ b/node_modules/body-parser/lib/types/json.js @@ -0,0 +1,230 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:json') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = json + +/** + * RegExp to match the first non-space in a string. + * + * Allowed whitespace is defined in RFC 7159: + * + * ws = *( + * %x20 / ; Space + * %x09 / ; Horizontal tab + * %x0A / ; Line feed or New line + * %x0D ) ; Carriage return + */ + +var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*(.)/ // eslint-disable-line no-control-regex + +/** + * Create a middleware to parse JSON bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function json (options) { + var opts = options || {} + + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var inflate = opts.inflate !== false + var reviver = opts.reviver + var strict = opts.strict !== false + var type = opts.type || 'application/json' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + if (body.length === 0) { + // special-case empty json body, as it's a common client-side mistake + // TODO: maybe make this configurable or part of "strict" option + return {} + } + + if (strict) { + var first = firstchar(body) + + if (first !== '{' && first !== '[') { + debug('strict violation') + throw createStrictSyntaxError(body, first) + } + } + + try { + debug('parse json') + return JSON.parse(body, reviver) + } catch (e) { + throw normalizeJsonSyntaxError(e, { + message: e.message, + stack: e.stack + }) + } + } + + return function jsonParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset per RFC 7159 sec 8.1 + var charset = getCharset(req) || 'utf-8' + if (charset.substr(0, 4) !== 'utf-') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Create strict violation syntax error matching native error. + * + * @param {string} str + * @param {string} char + * @return {Error} + * @private + */ + +function createStrictSyntaxError (str, char) { + var index = str.indexOf(char) + var partial = str.substring(0, index) + '#' + + try { + JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') + } catch (e) { + return normalizeJsonSyntaxError(e, { + message: e.message.replace('#', char), + stack: e.stack + }) + } +} + +/** + * Get the first non-whitespace character in a string. + * + * @param {string} str + * @return {function} + * @private + */ + +function firstchar (str) { + return FIRST_CHAR_REGEXP.exec(str)[1] +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Normalize a SyntaxError for JSON.parse. + * + * @param {SyntaxError} error + * @param {object} obj + * @return {SyntaxError} + */ + +function normalizeJsonSyntaxError (error, obj) { + var keys = Object.getOwnPropertyNames(error) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + if (key !== 'stack' && key !== 'message') { + delete error[key] + } + } + + // replace stack before message for Node.js 0.10 and below + error.stack = obj.stack.replace(error.message, obj.message) + error.message = obj.message + + return error +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/raw.js b/node_modules/body-parser/lib/types/raw.js new file mode 100644 index 0000000..f5d1b67 --- /dev/null +++ b/node_modules/body-parser/lib/types/raw.js @@ -0,0 +1,101 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var debug = require('debug')('body-parser:raw') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = raw + +/** + * Create a middleware to parse raw bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function raw (options) { + var opts = options || {} + + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/octet-stream' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function rawParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // read + read(req, res, next, parse, debug, { + encoding: null, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/text.js b/node_modules/body-parser/lib/types/text.js new file mode 100644 index 0000000..083a009 --- /dev/null +++ b/node_modules/body-parser/lib/types/text.js @@ -0,0 +1,121 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var debug = require('debug')('body-parser:text') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = text + +/** + * Create a middleware to parse text bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function text (options) { + var opts = options || {} + + var defaultCharset = opts.defaultCharset || 'utf-8' + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'text/plain' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function textParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // get charset + var charset = getCharset(req) || defaultCharset + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/urlencoded.js b/node_modules/body-parser/lib/types/urlencoded.js new file mode 100644 index 0000000..b2ca8f1 --- /dev/null +++ b/node_modules/body-parser/lib/types/urlencoded.js @@ -0,0 +1,284 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:urlencoded') +var deprecate = require('depd')('body-parser') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = urlencoded + +/** + * Cache of parser modules. + */ + +var parsers = Object.create(null) + +/** + * Create a middleware to parse urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function urlencoded (options) { + var opts = options || {} + + // notice because option default will flip in next major + if (opts.extended === undefined) { + deprecate('undefined extended: provide extended option') + } + + var extended = opts.extended !== false + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/x-www-form-urlencoded' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate query parser + var queryparse = extended + ? extendedparser(opts) + : simpleparser(opts) + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + return body.length + ? queryparse(body) + : {} + } + + return function urlencodedParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset + var charset = getCharset(req) || 'utf-8' + if (charset !== 'utf-8') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + debug: debug, + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the extended query parser. + * + * @param {object} options + */ + +function extendedparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('qs') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + var arrayLimit = Math.max(100, paramCount) + + debug('parse extended urlencoding') + return parse(body, { + allowPrototypes: true, + arrayLimit: arrayLimit, + depth: Infinity, + parameterLimit: parameterLimit + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Count the number of parameters, stopping once limit reached + * + * @param {string} body + * @param {number} limit + * @api private + */ + +function parameterCount (body, limit) { + var count = 0 + var index = 0 + + while ((index = body.indexOf('&', index)) !== -1) { + count++ + index++ + + if (count === limit) { + return undefined + } + } + + return count +} + +/** + * Get parser for module name dynamically. + * + * @param {string} name + * @return {function} + * @api private + */ + +function parser (name) { + var mod = parsers[name] + + if (mod !== undefined) { + return mod.parse + } + + // this uses a switch for static require analysis + switch (name) { + case 'qs': + mod = require('qs') + break + case 'querystring': + mod = require('querystring') + break + } + + // store to prevent invoking require() + parsers[name] = mod + + return mod.parse +} + +/** + * Get the simple query parser. + * + * @param {object} options + */ + +function simpleparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('querystring') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + debug('parse urlencoding') + return parse(body, undefined, undefined, { maxKeys: parameterLimit }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/package.json b/node_modules/body-parser/package.json new file mode 100644 index 0000000..2d2f9b7 --- /dev/null +++ b/node_modules/body-parser/package.json @@ -0,0 +1,91 @@ +{ + "_from": "body-parser@1.19.0", + "_id": "body-parser@1.19.0", + "_inBundle": false, + "_integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", + "_location": "/body-parser", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "body-parser@1.19.0", + "name": "body-parser", + "escapedName": "body-parser", + "rawSpec": "1.19.0", + "saveSpec": null, + "fetchSpec": "1.19.0" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "_shasum": "96b2709e57c9c4e09a6fd66a8fd979844f69f08a", + "_spec": "body-parser@1.19.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/express", + "bugs": { + "url": "https://github.com/expressjs/body-parser/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + } + ], + "dependencies": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "deprecated": false, + "description": "Node.js body parsing middleware", + "devDependencies": { + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "istanbul": "0.4.5", + "methods": "1.1.2", + "mocha": "6.1.4", + "safe-buffer": "5.1.2", + "supertest": "4.0.2" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "lib/", + "LICENSE", + "HISTORY.md", + "index.js" + ], + "homepage": "https://github.com/expressjs/body-parser#readme", + "license": "MIT", + "name": "body-parser", + "repository": { + "type": "git", + "url": "git+https://github.com/expressjs/body-parser.git" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --require test/support/env --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --require test/support/env --reporter spec --check-leaks test/" + }, + "version": "1.19.0" +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..0478be8 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..4b1859b --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,75 @@ +{ + "_from": "brace-expansion@^1.1.7", + "_id": "brace-expansion@1.1.11", + "_inBundle": false, + "_integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "_location": "/brace-expansion", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "brace-expansion@^1.1.7", + "name": "brace-expansion", + "escapedName": "brace-expansion", + "rawSpec": "^1.1.7", + "saveSpec": null, + "fetchSpec": "^1.1.7" + }, + "_requiredBy": [ + "/minimatch" + ], + "_resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "_shasum": "3c7fcbf529d87226f3d2f52b966ff5271eb441dd", + "_spec": "brace-expansion@^1.1.7", + "_where": "/home/kareml/Desktop/SMS/node_modules/minimatch", + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "bugs": { + "url": "https://github.com/juliangruber/brace-expansion/issues" + }, + "bundleDependencies": false, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "deprecated": false, + "description": "Brace expansion as known from sh/bash", + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "keywords": [], + "license": "MIT", + "main": "index.js", + "name": "brace-expansion", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "scripts": { + "bench": "matcha test/perf/bench.js", + "gentest": "bash test/generate.sh", + "test": "tape test/*.js" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "version": "1.1.11" +} diff --git a/node_modules/buffer-shims/index.js b/node_modules/buffer-shims/index.js new file mode 100644 index 0000000..1cab4c0 --- /dev/null +++ b/node_modules/buffer-shims/index.js @@ -0,0 +1,108 @@ +'use strict'; + +var buffer = require('buffer'); +var Buffer = buffer.Buffer; +var SlowBuffer = buffer.SlowBuffer; +var MAX_LEN = buffer.kMaxLength || 2147483647; +exports.alloc = function alloc(size, fill, encoding) { + if (typeof Buffer.alloc === 'function') { + return Buffer.alloc(size, fill, encoding); + } + if (typeof encoding === 'number') { + throw new TypeError('encoding must not be number'); + } + if (typeof size !== 'number') { + throw new TypeError('size must be a number'); + } + if (size > MAX_LEN) { + throw new RangeError('size is too large'); + } + var enc = encoding; + var _fill = fill; + if (_fill === undefined) { + enc = undefined; + _fill = 0; + } + var buf = new Buffer(size); + if (typeof _fill === 'string') { + var fillBuf = new Buffer(_fill, enc); + var flen = fillBuf.length; + var i = -1; + while (++i < size) { + buf[i] = fillBuf[i % flen]; + } + } else { + buf.fill(_fill); + } + return buf; +} +exports.allocUnsafe = function allocUnsafe(size) { + if (typeof Buffer.allocUnsafe === 'function') { + return Buffer.allocUnsafe(size); + } + if (typeof size !== 'number') { + throw new TypeError('size must be a number'); + } + if (size > MAX_LEN) { + throw new RangeError('size is too large'); + } + return new Buffer(size); +} +exports.from = function from(value, encodingOrOffset, length) { + if (typeof Buffer.from === 'function' && (!global.Uint8Array || Uint8Array.from !== Buffer.from)) { + return Buffer.from(value, encodingOrOffset, length); + } + if (typeof value === 'number') { + throw new TypeError('"value" argument must not be a number'); + } + if (typeof value === 'string') { + return new Buffer(value, encodingOrOffset); + } + if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { + var offset = encodingOrOffset; + if (arguments.length === 1) { + return new Buffer(value); + } + if (typeof offset === 'undefined') { + offset = 0; + } + var len = length; + if (typeof len === 'undefined') { + len = value.byteLength - offset; + } + if (offset >= value.byteLength) { + throw new RangeError('\'offset\' is out of bounds'); + } + if (len > value.byteLength - offset) { + throw new RangeError('\'length\' is out of bounds'); + } + return new Buffer(value.slice(offset, offset + len)); + } + if (Buffer.isBuffer(value)) { + var out = new Buffer(value.length); + value.copy(out, 0, 0, value.length); + return out; + } + if (value) { + if (Array.isArray(value) || (typeof ArrayBuffer !== 'undefined' && value.buffer instanceof ArrayBuffer) || 'length' in value) { + return new Buffer(value); + } + if (value.type === 'Buffer' && Array.isArray(value.data)) { + return new Buffer(value.data); + } + } + + throw new TypeError('First argument must be a string, Buffer, ' + 'ArrayBuffer, Array, or array-like object.'); +} +exports.allocUnsafeSlow = function allocUnsafeSlow(size) { + if (typeof Buffer.allocUnsafeSlow === 'function') { + return Buffer.allocUnsafeSlow(size); + } + if (typeof size !== 'number') { + throw new TypeError('size must be a number'); + } + if (size >= MAX_LEN) { + throw new RangeError('size is too large'); + } + return new SlowBuffer(size); +} diff --git a/node_modules/buffer-shims/license.md b/node_modules/buffer-shims/license.md new file mode 100644 index 0000000..01cfaef --- /dev/null +++ b/node_modules/buffer-shims/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2016 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/buffer-shims/package.json b/node_modules/buffer-shims/package.json new file mode 100644 index 0000000..eb55e5d --- /dev/null +++ b/node_modules/buffer-shims/package.json @@ -0,0 +1,49 @@ +{ + "_from": "buffer-shims@~1.0.0", + "_id": "buffer-shims@1.0.0", + "_inBundle": false, + "_integrity": "sha1-mXjOMXOIxkmth5MCjDR37wRKi1E=", + "_location": "/buffer-shims", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "buffer-shims@~1.0.0", + "name": "buffer-shims", + "escapedName": "buffer-shims", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", + "_shasum": "9978ce317388c649ad8793028c3477ef044a8b51", + "_spec": "buffer-shims@~1.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/calvinmetcalf/buffer-shims/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "some shims for node buffers", + "devDependencies": { + "tape": "^4.5.1" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/calvinmetcalf/buffer-shims#readme", + "license": "MIT", + "main": "index.js", + "name": "buffer-shims", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/calvinmetcalf/buffer-shims.git" + }, + "scripts": { + "test": "tape test/*.js" + }, + "version": "1.0.0" +} diff --git a/node_modules/buffer-shims/readme.md b/node_modules/buffer-shims/readme.md new file mode 100644 index 0000000..7ea6475 --- /dev/null +++ b/node_modules/buffer-shims/readme.md @@ -0,0 +1,21 @@ +buffer-shims +=== + +functions to make sure the new buffer methods work in older browsers. + +```js +var bufferShim = require('buffer-shims'); +bufferShim.from('foo'); +bufferShim.alloc(9, 'cafeface', 'hex'); +bufferShim.allocUnsafe(15); +bufferShim.allocUnsafeSlow(21); +``` + +should just use the original in newer nodes and on older nodes uses fallbacks. + +Known Issues +=== +- this does not patch the buffer object, only the constructor stuff +- it's actually a polyfill + +![](https://i.imgur.com/zxII3jJ.gif) diff --git a/node_modules/buffer-writer/.travis.yml b/node_modules/buffer-writer/.travis.yml new file mode 100644 index 0000000..8e59bb3 --- /dev/null +++ b/node_modules/buffer-writer/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - 4 + - 6 + - 8 + - 10 + - 11 diff --git a/node_modules/buffer-writer/LICENSE b/node_modules/buffer-writer/LICENSE new file mode 100644 index 0000000..72dc60d --- /dev/null +++ b/node_modules/buffer-writer/LICENSE @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/buffer-writer/README.md b/node_modules/buffer-writer/README.md new file mode 100644 index 0000000..81eccc0 --- /dev/null +++ b/node_modules/buffer-writer/README.md @@ -0,0 +1,48 @@ +# buffer-writer + +[![Build Status](https://secure.travis-ci.org/brianc/node-buffer-writer.png?branch=master)](http://travis-ci.org/brianc/node-buffer-writer) + +Fast & efficient buffer writer used to keep memory usage low by internally recycling a single large buffer. + +Used as the binary protocol writer in [node-postgres](https://github.com/brianc/node-postgres) + +Since postgres requires big endian encoding, this only writes big endian numbers for now, but can & probably will easily be extended to write little endian as well. + +I'll admit this has a few postgres specific things I might need to take out in the future, such as `addHeader` + +## api + +`var writer = new (require('buffer-writer')());` + +### writer.addInt32(num) + +Writes a 4-byte big endian binary encoded number to the end of the buffer. + +### writer.addInt16(num) + +Writes a 2-byte big endian binary encoded number to the end of the buffer. + +### writer.addCString(string) + +Writes a string to the buffer `utf8` encoded and adds a null character (`\0`) at the end. + +### var buffer = writer.addHeader(char) + +Writes the 5 byte PostgreSQL required header to the beginning of the buffer. (1 byte for character, 1 BE Int32 for length of the buffer) + +### var buffer = writer.join() + +Collects all data in the writer and joins it into a single, new buffer. + +### var buffer = writer.flush(char) + +Writes the 5 byte postgres required message header, collects all data in the writer and joins it into a single, new buffer, and then resets the writer. + +## thoughts + +This is kind of node-postgres specific. If you're interested in using this for a more general purpose thing, lemme know. +I would love to work with you on getting this more reusable for your needs. + +## license + +MIT diff --git a/node_modules/buffer-writer/index.js b/node_modules/buffer-writer/index.js new file mode 100644 index 0000000..f3c119e --- /dev/null +++ b/node_modules/buffer-writer/index.js @@ -0,0 +1,129 @@ +//binary data writer tuned for creating +//postgres message packets as effeciently as possible by reusing the +//same buffer to avoid memcpy and limit memory allocations +var Writer = module.exports = function (size) { + this.size = size || 1024; + this.buffer = Buffer.alloc(this.size + 5); + this.offset = 5; + this.headerPosition = 0; +}; + +//resizes internal buffer if not enough size left +Writer.prototype._ensure = function (size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.alloc(newSize); + oldBuffer.copy(this.buffer); + } +}; + +Writer.prototype.addInt32 = function (num) { + this._ensure(4); + this.buffer[this.offset++] = (num >>> 24 & 0xFF); + this.buffer[this.offset++] = (num >>> 16 & 0xFF); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; +}; + +Writer.prototype.addInt16 = function (num) { + this._ensure(2); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; +}; + +//for versions of node requiring 'length' as 3rd argument to buffer.write +var writeString = function (buffer, string, offset, len) { + buffer.write(string, offset, len); +}; + +//overwrite function for older versions of node +if (Buffer.prototype.write.length === 3) { + writeString = function (buffer, string, offset, len) { + buffer.write(string, offset); + }; +} + +Writer.prototype.addCString = function (string) { + //just write a 0 for empty or null strings + if (!string) { + this._ensure(1); + } else { + var len = Buffer.byteLength(string); + this._ensure(len + 1); //+1 for null terminator + writeString(this.buffer, string, this.offset, len); + this.offset += len; + } + + this.buffer[this.offset++] = 0; // null terminator + return this; +}; + +Writer.prototype.addChar = function (c) { + this._ensure(1); + writeString(this.buffer, c, this.offset, 1); + this.offset++; + return this; +}; + +Writer.prototype.addString = function (string) { + string = string || ""; + var len = Buffer.byteLength(string); + this._ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; +}; + +Writer.prototype.getByteLength = function () { + return this.offset - 5; +}; + +Writer.prototype.add = function (otherBuffer) { + this._ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; +}; + +Writer.prototype.clear = function () { + this.offset = 5; + this.headerPosition = 0; + this.lastEnd = 0; +}; + +//appends a header block to all the written data since the last +//subsequent header or to the beginning if there is only one data block +Writer.prototype.addHeader = function (code, last) { + var origOffset = this.offset; + this.offset = this.headerPosition; + this.buffer[this.offset++] = code; + //length is everything in this packet minus the code + this.addInt32(origOffset - (this.headerPosition + 1)); + //set next header position + this.headerPosition = origOffset; + //make space for next header + this.offset = origOffset; + if (!last) { + this._ensure(5); + this.offset += 5; + } +}; + +Writer.prototype.join = function (code) { + if (code) { + this.addHeader(code, true); + } + return this.buffer.slice(code ? 0 : 5, this.offset); +}; + +Writer.prototype.flush = function (code) { + var result = this.join(code); + this.clear(); + return result; +}; diff --git a/node_modules/buffer-writer/package.json b/node_modules/buffer-writer/package.json new file mode 100644 index 0000000..8840a1f --- /dev/null +++ b/node_modules/buffer-writer/package.json @@ -0,0 +1,57 @@ +{ + "_from": "buffer-writer@2.0.0", + "_id": "buffer-writer@2.0.0", + "_inBundle": false, + "_integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "_location": "/buffer-writer", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "buffer-writer@2.0.0", + "name": "buffer-writer", + "escapedName": "buffer-writer", + "rawSpec": "2.0.0", + "saveSpec": null, + "fetchSpec": "2.0.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "_shasum": "ce7eb81a38f7829db09c873f2fbb792c0c98ec04", + "_spec": "buffer-writer@2.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-buffer-writer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "a fast, efficient buffer writer", + "devDependencies": { + "mocha": "5.2.0" + }, + "engines": { + "node": ">=4" + }, + "homepage": "https://github.com/brianc/node-buffer-writer#readme", + "keywords": [ + "buffer", + "writer", + "builder" + ], + "license": "MIT", + "main": "index.js", + "name": "buffer-writer", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-buffer-writer.git" + }, + "scripts": { + "test": "mocha --throw-deprecation" + }, + "version": "2.0.0" +} diff --git a/node_modules/buffer-writer/test/mocha.opts b/node_modules/buffer-writer/test/mocha.opts new file mode 100644 index 0000000..5efaf24 --- /dev/null +++ b/node_modules/buffer-writer/test/mocha.opts @@ -0,0 +1 @@ +--ui tdd diff --git a/node_modules/buffer-writer/test/writer-tests.js b/node_modules/buffer-writer/test/writer-tests.js new file mode 100644 index 0000000..ded91c8 --- /dev/null +++ b/node_modules/buffer-writer/test/writer-tests.js @@ -0,0 +1,218 @@ +var Writer = require(__dirname + "/../"); + +var assert = require('assert'); +var util = require('util'); + +assert.equalBuffers = function (actual, expected) { + var spit = function (actual, expected) { + console.log(""); + console.log("actual " + util.inspect(actual)); + console.log("expect " + util.inspect(expected)); + console.log(""); + }; + if (actual.length != expected.length) { + spit(actual, expected); + assert.strictEqual(actual.length, expected.length); + } + for (var i = 0; i < actual.length; i++) { + if (actual[i] != expected[i]) { + spit(actual, expected); + } + assert.strictEqual(actual[i], expected[i]); + } +}; + +suite('adding int32', function () { + var testAddingInt32 = function (int, expectedBuffer) { + test('writes ' + int, function () { + var subject = new Writer(); + var result = subject.addInt32(int).join(); + assert.equalBuffers(result, expectedBuffer); + }); + }; + + testAddingInt32(0, [0, 0, 0, 0]); + testAddingInt32(1, [0, 0, 0, 1]); + testAddingInt32(256, [0, 0, 1, 0]); + test('writes largest int32', function () { + //todo need to find largest int32 when I have internet access + return false; + }); + + test('writing multiple int32s', function () { + var subject = new Writer(); + var result = subject.addInt32(1).addInt32(10).addInt32(0).join(); + assert.equalBuffers(result, [0, 0, 0, 1, 0, 0, 0, 0x0a, 0, 0, 0, 0]); + }); + + suite('having to resize the buffer', function () { + test('after resize correct result returned', function () { + var subject = new Writer(10); + subject.addInt32(1).addInt32(1).addInt32(1); + assert.equalBuffers(subject.join(), [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1]); + }); + }); +}); + +suite('int16', function () { + test('writes 0', function () { + var subject = new Writer(); + var result = subject.addInt16(0).join(); + assert.equalBuffers(result, [0, 0]); + }); + + test('writes 400', function () { + var subject = new Writer(); + var result = subject.addInt16(400).join(); + assert.equalBuffers(result, [1, 0x90]); + }); + + test('writes many', function () { + var subject = new Writer(); + var result = subject.addInt16(0).addInt16(1).addInt16(2).join(); + assert.equalBuffers(result, [0, 0, 0, 1, 0, 2]); + }); + + test('resizes if internal buffer fills up', function () { + var subject = new Writer(3); + var result = subject.addInt16(2).addInt16(3).join(); + assert.equalBuffers(result, [0, 2, 0, 3]); + }); + +}); + +suite('cString', function () { + test('writes empty cstring', function () { + var subject = new Writer(); + var result = subject.addCString().join(); + assert.equalBuffers(result, [0]); + }); + + test('writes two empty cstrings', function () { + var subject = new Writer(); + var result = subject.addCString("").addCString("").join(); + assert.equalBuffers(result, [0, 0]); + }); + + + test('writes non-empty cstring', function () { + var subject = new Writer(); + var result = subject.addCString("!!!").join(); + assert.equalBuffers(result, [33, 33, 33, 0]); + }); + + test('resizes if reached end', function () { + var subject = new Writer(3); + var result = subject.addCString("!!!").join(); + assert.equalBuffers(result, [33, 33, 33, 0]); + }); + + test('writes multiple cstrings', function () { + var subject = new Writer(); + var result = subject.addCString("!").addCString("!").join(); + assert.equalBuffers(result, [33, 0, 33, 0]); + }); + +}); + +test('writes char', function () { + var subject = new Writer(2); + var result = subject.addChar('a').addChar('b').addChar('c').join(); + assert.equalBuffers(result, [0x61, 0x62, 0x63]); +}); + +test('gets correct byte length', function () { + var subject = new Writer(5); + assert.strictEqual(subject.getByteLength(), 0); + subject.addInt32(0); + assert.strictEqual(subject.getByteLength(), 4); + subject.addCString("!"); + assert.strictEqual(subject.getByteLength(), 6); +}); + +test('can add arbitrary buffer to the end', function () { + var subject = new Writer(4); + subject.addCString("!!!") + var result = subject.add(Buffer.from("@@@")).join(); + assert.equalBuffers(result, [33, 33, 33, 0, 0x40, 0x40, 0x40]); +}); + +suite('can write normal string', function () { + var subject = new Writer(4); + var result = subject.addString("!").join(); + assert.equalBuffers(result, [33]); + test('can write cString too', function () { + var result = subject.addCString("!").join(); + assert.equalBuffers(result, [33, 33, 0]); + }); + test('can resize', function () { + var result = subject.addString("!!").join(); + assert.equalBuffers(result, [33, 33, 0, 33, 33]); + }); +}); + + +suite('clearing', function () { + var subject = new Writer(); + subject.addCString("@!!#!#"); + subject.addInt32(10401); + test('clears', function () { + subject.clear(); + assert.equalBuffers(subject.join(), []); + }); + test('writing more', function () { + var joinedResult = subject.addCString("!").addInt32(9).addInt16(2).join(); + assert.equalBuffers(joinedResult, [33, 0, 0, 0, 0, 9, 0, 2]); + }); + test('returns result', function () { + var flushedResult = subject.flush(); + assert.equalBuffers(flushedResult, [33, 0, 0, 0, 0, 9, 0, 2]) + }); + test('clears the writer', function () { + assert.equalBuffers(subject.join(), []) + assert.equalBuffers(subject.flush(), []) + }); +}); + +test("resizing to much larger", function () { + var subject = new Writer(2); + var string = "!!!!!!!!"; + var result = subject.addCString(string).flush(); + assert.equalBuffers(result, [33, 33, 33, 33, 33, 33, 33, 33, 0]); +}); + +suite("flush", function () { + test('added as a hex code to a full writer', function () { + var subject = new Writer(2); + var result = subject.addCString("!").flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); + }); + + test('added as a hex code to a non-full writer', function () { + var subject = new Writer(10).addCString("!"); + var joinedResult = subject.join(0x50); + var result = subject.flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); + }); + + test('added as a hex code to a buffer which requires resizing', function () { + var result = new Writer(2).addCString("!!!!!!!!").flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 0x0D, 33, 33, 33, 33, 33, 33, 33, 33, 0]); + }); +}); + +suite("header", function () { + test('adding two packets with headers', function () { + var subject = new Writer(10).addCString("!"); + subject.addHeader(0x50); + subject.addCString("!!"); + subject.addHeader(0x40); + subject.addCString("!"); + var result = subject.flush(0x10); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0, 0x40, 0, 0, 0, 7, 33, 33, 0, 0x10, 0, 0, 0, 6, 33, 0]); + }); +}); + + + + diff --git a/node_modules/bytes/History.md b/node_modules/bytes/History.md new file mode 100644 index 0000000..cf6a5bb --- /dev/null +++ b/node_modules/bytes/History.md @@ -0,0 +1,87 @@ +3.1.0 / 2019-01-22 +================== + + * Add petabyte (`pb`) support + +3.0.0 / 2017-08-31 +================== + + * Change "kB" to "KB" in format output + * Remove support for Node.js 0.6 + * Remove support for ComponentJS + +2.5.0 / 2017-03-24 +================== + + * Add option "unit" + +2.4.0 / 2016-06-01 +================== + + * Add option "unitSeparator" + +2.3.0 / 2016-02-15 +================== + + * Drop partial bytes on all parsed units + * Fix non-finite numbers to `.format` to return `null` + * Fix parsing byte string that looks like hex + * perf: hoist regular expressions + +2.2.0 / 2015-11-13 +================== + + * add option "decimalPlaces" + * add option "fixedDecimals" + +2.1.0 / 2015-05-21 +================== + + * add `.format` export + * add `.parse` export + +2.0.2 / 2015-05-20 +================== + + * remove map recreation + * remove unnecessary object construction + +2.0.1 / 2015-05-07 +================== + + * fix browserify require + * remove node.extend dependency + +2.0.0 / 2015-04-12 +================== + + * add option "case" + * add option "thousandsSeparator" + * return "null" on invalid parse input + * support proper round-trip: bytes(bytes(num)) === num + * units no longer case sensitive when parsing + +1.0.0 / 2014-05-05 +================== + + * add negative support. fixes #6 + +0.3.0 / 2014-03-19 +================== + + * added terabyte support + +0.2.1 / 2013-04-01 +================== + + * add .component + +0.2.0 / 2012-10-28 +================== + + * bytes(200).should.eql('200b') + +0.1.0 / 2012-07-04 +================== + + * add bytes to string conversion [yields] diff --git a/node_modules/bytes/LICENSE b/node_modules/bytes/LICENSE new file mode 100644 index 0000000..63e95a9 --- /dev/null +++ b/node_modules/bytes/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bytes/Readme.md b/node_modules/bytes/Readme.md new file mode 100644 index 0000000..6ad1ec6 --- /dev/null +++ b/node_modules/bytes/Readme.md @@ -0,0 +1,126 @@ +# Bytes utility + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install bytes +``` + +## Usage + +```js +var bytes = require('bytes'); +``` + +#### bytes.format(number value, [options]): string|null + +Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is + rounded. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number` | Value in bytes | +| options | `Object` | Conversion options | + +**Options** + +| Property | Type | Description | +|-------------------|--------|-----------------------------------------------------------------------------------------| +| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | +| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | +| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `.`... Default value to `''`. | +| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | +| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`null` | Return null upon error. String value otherwise. | + +**Example** + +```js +bytes(1024); +// output: '1KB' + +bytes(1000); +// output: '1000B' + +bytes(1000, {thousandsSeparator: ' '}); +// output: '1 000B' + +bytes(1024 * 1.7, {decimalPlaces: 0}); +// output: '2KB' + +bytes(1024, {unitSeparator: ' '}); +// output: '1 KB' + +``` + +#### bytes.parse(string|number value): number|null + +Parse the string value into an integer in bytes. If no unit is given, or `value` +is a number, it is assumed the value is in bytes. + +Supported units and abbreviations are as follows and are case-insensitive: + + * `b` for bytes + * `kb` for kilobytes + * `mb` for megabytes + * `gb` for gigabytes + * `tb` for terabytes + * `pb` for petabytes + +The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. + +**Arguments** + +| Name | Type | Description | +|---------------|--------|--------------------| +| value | `string`|`number` | String to parse, or number in bytes. | + +**Returns** + +| Name | Type | Description | +|---------|-------------|-------------------------| +| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | + +**Example** + +```js +bytes('1KB'); +// output: 1024 + +bytes('1024'); +// output: 1024 + +bytes(1024); +// output: 1KB +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master +[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master +[downloads-image]: https://badgen.net/npm/dm/bytes +[downloads-url]: https://npmjs.org/package/bytes +[npm-image]: https://badgen.net/npm/node/bytes +[npm-url]: https://npmjs.org/package/bytes +[travis-image]: https://badgen.net/travis/visionmedia/bytes.js/master +[travis-url]: https://travis-ci.org/visionmedia/bytes.js diff --git a/node_modules/bytes/index.js b/node_modules/bytes/index.js new file mode 100644 index 0000000..4975bfb --- /dev/null +++ b/node_modules/bytes/index.js @@ -0,0 +1,162 @@ +/*! + * bytes + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015 Jed Watson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = bytes; +module.exports.format = format; +module.exports.parse = parse; + +/** + * Module variables. + * @private + */ + +var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; + +var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; + +var map = { + b: 1, + kb: 1 << 10, + mb: 1 << 20, + gb: 1 << 30, + tb: Math.pow(1024, 4), + pb: Math.pow(1024, 5), +}; + +var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; + +/** + * Convert the given value in bytes into a string or parse to string to an integer in bytes. + * + * @param {string|number} value + * @param {{ + * case: [string], + * decimalPlaces: [number] + * fixedDecimals: [boolean] + * thousandsSeparator: [string] + * unitSeparator: [string] + * }} [options] bytes options. + * + * @returns {string|number|null} + */ + +function bytes(value, options) { + if (typeof value === 'string') { + return parse(value); + } + + if (typeof value === 'number') { + return format(value, options); + } + + return null; +} + +/** + * Format the given value in bytes into a string. + * + * If the value is negative, it is kept as such. If it is a float, + * it is rounded. + * + * @param {number} value + * @param {object} [options] + * @param {number} [options.decimalPlaces=2] + * @param {number} [options.fixedDecimals=false] + * @param {string} [options.thousandsSeparator=] + * @param {string} [options.unit=] + * @param {string} [options.unitSeparator=] + * + * @returns {string|null} + * @public + */ + +function format(value, options) { + if (!Number.isFinite(value)) { + return null; + } + + var mag = Math.abs(value); + var thousandsSeparator = (options && options.thousandsSeparator) || ''; + var unitSeparator = (options && options.unitSeparator) || ''; + var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; + var fixedDecimals = Boolean(options && options.fixedDecimals); + var unit = (options && options.unit) || ''; + + if (!unit || !map[unit.toLowerCase()]) { + if (mag >= map.pb) { + unit = 'PB'; + } else if (mag >= map.tb) { + unit = 'TB'; + } else if (mag >= map.gb) { + unit = 'GB'; + } else if (mag >= map.mb) { + unit = 'MB'; + } else if (mag >= map.kb) { + unit = 'KB'; + } else { + unit = 'B'; + } + } + + var val = value / map[unit.toLowerCase()]; + var str = val.toFixed(decimalPlaces); + + if (!fixedDecimals) { + str = str.replace(formatDecimalsRegExp, '$1'); + } + + if (thousandsSeparator) { + str = str.replace(formatThousandsRegExp, thousandsSeparator); + } + + return str + unitSeparator + unit; +} + +/** + * Parse the string value into an integer in bytes. + * + * If no unit is given, it is assumed the value is in bytes. + * + * @param {number|string} val + * + * @returns {number|null} + * @public + */ + +function parse(val) { + if (typeof val === 'number' && !isNaN(val)) { + return val; + } + + if (typeof val !== 'string') { + return null; + } + + // Test if the string passed is valid + var results = parseRegExp.exec(val); + var floatValue; + var unit = 'b'; + + if (!results) { + // Nothing could be extracted from the given string + floatValue = parseInt(val, 10); + unit = 'b' + } else { + // Retrieve the value and the unit + floatValue = parseFloat(results[1]); + unit = results[4].toLowerCase(); + } + + return Math.floor(map[unit] * floatValue); +} diff --git a/node_modules/bytes/package.json b/node_modules/bytes/package.json new file mode 100644 index 0000000..31c9f1d --- /dev/null +++ b/node_modules/bytes/package.json @@ -0,0 +1,84 @@ +{ + "_from": "bytes@3.1.0", + "_id": "bytes@3.1.0", + "_inBundle": false, + "_integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", + "_location": "/bytes", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "bytes@3.1.0", + "name": "bytes", + "escapedName": "bytes", + "rawSpec": "3.1.0", + "saveSpec": null, + "fetchSpec": "3.1.0" + }, + "_requiredBy": [ + "/body-parser", + "/raw-body" + ], + "_resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "_shasum": "f6cf7933a360e0588fa9fde85651cdc7f805d1f6", + "_spec": "bytes@3.1.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/body-parser", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bugs": { + "url": "https://github.com/visionmedia/bytes.js/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Jed Watson", + "email": "jed.watson@me.com" + }, + { + "name": "Théo FIDRY", + "email": "theo.fidry@gmail.com" + } + ], + "deprecated": false, + "description": "Utility to parse a string bytes to bytes and vice-versa", + "devDependencies": { + "eslint": "5.12.1", + "mocha": "5.2.0", + "nyc": "13.1.0" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "History.md", + "LICENSE", + "Readme.md", + "index.js" + ], + "homepage": "https://github.com/visionmedia/bytes.js#readme", + "keywords": [ + "byte", + "bytes", + "utility", + "parse", + "parser", + "convert", + "converter" + ], + "license": "MIT", + "name": "bytes", + "repository": { + "type": "git", + "url": "git+https://github.com/visionmedia/bytes.js.git" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --check-leaks --reporter spec", + "test-ci": "nyc --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "version": "3.1.0" +} diff --git a/node_modules/chalk/index.js b/node_modules/chalk/index.js new file mode 100644 index 0000000..2d85a91 --- /dev/null +++ b/node_modules/chalk/index.js @@ -0,0 +1,116 @@ +'use strict'; +var escapeStringRegexp = require('escape-string-regexp'); +var ansiStyles = require('ansi-styles'); +var stripAnsi = require('strip-ansi'); +var hasAnsi = require('has-ansi'); +var supportsColor = require('supports-color'); +var defineProps = Object.defineProperties; +var isSimpleWindowsTerm = process.platform === 'win32' && !/^xterm/i.test(process.env.TERM); + +function Chalk(options) { + // detect mode if not set manually + this.enabled = !options || options.enabled === undefined ? supportsColor : options.enabled; +} + +// use bright blue on Windows as the normal blue color is illegible +if (isSimpleWindowsTerm) { + ansiStyles.blue.open = '\u001b[94m'; +} + +var styles = (function () { + var ret = {}; + + Object.keys(ansiStyles).forEach(function (key) { + ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + + ret[key] = { + get: function () { + return build.call(this, this._styles.concat(key)); + } + }; + }); + + return ret; +})(); + +var proto = defineProps(function chalk() {}, styles); + +function build(_styles) { + var builder = function () { + return applyStyle.apply(builder, arguments); + }; + + builder._styles = _styles; + builder.enabled = this.enabled; + // __proto__ is used because we must return a function, but there is + // no way to create a function with a different prototype. + /* eslint-disable no-proto */ + builder.__proto__ = proto; + + return builder; +} + +function applyStyle() { + // support varags, but simply cast to string in case there's only one arg + var args = arguments; + var argsLen = args.length; + var str = argsLen !== 0 && String(arguments[0]); + + if (argsLen > 1) { + // don't slice `arguments`, it prevents v8 optimizations + for (var a = 1; a < argsLen; a++) { + str += ' ' + args[a]; + } + } + + if (!this.enabled || !str) { + return str; + } + + var nestedStyles = this._styles; + var i = nestedStyles.length; + + // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe, + // see https://github.com/chalk/chalk/issues/58 + // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop. + var originalDim = ansiStyles.dim.open; + if (isSimpleWindowsTerm && (nestedStyles.indexOf('gray') !== -1 || nestedStyles.indexOf('grey') !== -1)) { + ansiStyles.dim.open = ''; + } + + while (i--) { + var code = ansiStyles[nestedStyles[i]]; + + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + str = code.open + str.replace(code.closeRe, code.open) + code.close; + } + + // Reset the original 'dim' if we changed it to work around the Windows dimmed gray issue. + ansiStyles.dim.open = originalDim; + + return str; +} + +function init() { + var ret = {}; + + Object.keys(styles).forEach(function (name) { + ret[name] = { + get: function () { + return build.call(this, [name]); + } + }; + }); + + return ret; +} + +defineProps(Chalk.prototype, init()); + +module.exports = new Chalk(); +module.exports.styles = ansiStyles; +module.exports.hasColor = hasAnsi; +module.exports.stripColor = stripAnsi; +module.exports.supportsColor = supportsColor; diff --git a/node_modules/chalk/license b/node_modules/chalk/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/chalk/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json new file mode 100644 index 0000000..dc9a10a --- /dev/null +++ b/node_modules/chalk/package.json @@ -0,0 +1,114 @@ +{ + "_from": "chalk@^1.0.0", + "_id": "chalk@1.1.3", + "_inBundle": false, + "_integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "_location": "/chalk", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "chalk@^1.0.0", + "name": "chalk", + "escapedName": "chalk", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/tap-spec" + ], + "_resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "_shasum": "a8115c55e4a702fe4d150abd3872822a7e09fc98", + "_spec": "chalk@^1.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/tap-spec", + "bugs": { + "url": "https://github.com/chalk/chalk/issues" + }, + "bundleDependencies": false, + "dependencies": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + }, + "deprecated": false, + "description": "Terminal string styling done right. Much color.", + "devDependencies": { + "coveralls": "^2.11.2", + "matcha": "^0.6.0", + "mocha": "*", + "nyc": "^3.0.0", + "require-uncached": "^1.0.2", + "resolve-from": "^1.0.0", + "semver": "^4.3.3", + "xo": "*" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/chalk/chalk#readme", + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "license": "MIT", + "maintainers": [ + { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + { + "name": "Joshua Appelman", + "email": "jappelman@xebia.com", + "url": "jbnicolai.com" + }, + { + "name": "JD Ballard", + "email": "i.am.qix@gmail.com", + "url": "github.com/qix-" + } + ], + "name": "chalk", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/chalk.git" + }, + "scripts": { + "bench": "matcha benchmark.js", + "coverage": "nyc npm test && nyc report", + "coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls", + "test": "xo && mocha" + }, + "version": "1.1.3", + "xo": { + "envs": [ + "node", + "mocha" + ] + } +} diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md new file mode 100644 index 0000000..5cf111e --- /dev/null +++ b/node_modules/chalk/readme.md @@ -0,0 +1,213 @@ +

+
+
+ chalk +
+
+
+

+ +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) +[![Coverage Status](https://coveralls.io/repos/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/r/chalk/chalk?branch=master) +[![](http://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) + + +[colors.js](https://github.com/Marak/colors.js) used to be the most popular string styling module, but it has serious deficiencies like extending `String.prototype` which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68). Although there are other ones, they either do too much or not enough. + +**Chalk is a clean and focused alternative.** + +![](https://github.com/chalk/ansi-styles/raw/master/screenshot.png) + + +## Why + +- Highly performant +- Doesn't extend `String.prototype` +- Expressive API +- Ability to nest styles +- Clean and focused +- Auto-detects color support +- Actively maintained +- [Used by ~4500 modules](https://www.npmjs.com/browse/depended/chalk) as of July 15, 2015 + + +## Install + +``` +$ npm install --save chalk +``` + + +## Usage + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +var chalk = require('chalk'); + +// style a string +chalk.blue('Hello world!'); + +// combine styled and normal strings +chalk.blue('Hello') + 'World' + chalk.red('!'); + +// compose multiple styles using the chainable API +chalk.blue.bgRed.bold('Hello world!'); + +// pass in multiple arguments +chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz'); + +// nest styles +chalk.red('Hello', chalk.underline.bgBlue('world') + '!'); + +// nest styles of the same type even (color, underline, background) +chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +); +``` + +Easily define your own themes. + +```js +var chalk = require('chalk'); +var error = chalk.bold.red; +console.log(error('Error!')); +``` + +Take advantage of console.log [string substitution](http://nodejs.org/docs/latest/api/console.html#console_console_log_data). + +```js +var name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> Hello Sindre +``` + + +## API + +### chalk.` + + + + + + diff --git a/node_modules/tape/example/static/server.js b/node_modules/tape/example/static/server.js new file mode 100644 index 0000000..80cea43 --- /dev/null +++ b/node_modules/tape/example/static/server.js @@ -0,0 +1,4 @@ +var http = require('http'); +var ecstatic = require('ecstatic')(__dirname); +var server = http.createServer(ecstatic); +server.listen(8000); diff --git a/node_modules/tape/example/stream/object.js b/node_modules/tape/example/stream/object.js new file mode 100644 index 0000000..20f0819 --- /dev/null +++ b/node_modules/tape/example/stream/object.js @@ -0,0 +1,10 @@ +var test = require('../../'); +var path = require('path'); + +test.createStream({ objectMode: true }).on('data', function (row) { + console.log(JSON.stringify(row)); +}); + +process.argv.slice(2).forEach(function (file) { + require(path.resolve(file)); +}); diff --git a/node_modules/tape/example/stream/tap.js b/node_modules/tape/example/stream/tap.js new file mode 100644 index 0000000..9ea9ff7 --- /dev/null +++ b/node_modules/tape/example/stream/tap.js @@ -0,0 +1,8 @@ +var test = require('../../'); +var path = require('path'); + +test.createStream().pipe(process.stdout); + +process.argv.slice(2).forEach(function (file) { + require(path.resolve(file)); +}); diff --git a/node_modules/tape/example/stream/test/x.js b/node_modules/tape/example/stream/test/x.js new file mode 100644 index 0000000..7dbb98a --- /dev/null +++ b/node_modules/tape/example/stream/test/x.js @@ -0,0 +1,5 @@ +var test = require('../../../'); +test(function (t) { + t.plan(1); + t.equal('beep', 'boop'); +}); diff --git a/node_modules/tape/example/stream/test/y.js b/node_modules/tape/example/stream/test/y.js new file mode 100644 index 0000000..28606d5 --- /dev/null +++ b/node_modules/tape/example/stream/test/y.js @@ -0,0 +1,11 @@ +var test = require('../../../'); +test(function (t) { + t.plan(2); + t.equal(1+1, 2); + t.ok(true); +}); + +test('wheee', function (t) { + t.ok(true); + t.end(); +}); diff --git a/node_modules/tape/example/throw.js b/node_modules/tape/example/throw.js new file mode 100644 index 0000000..5fa8b94 --- /dev/null +++ b/node_modules/tape/example/throw.js @@ -0,0 +1,10 @@ +var falafel = require('falafel'); +var test = require('../'); + +test('throw', function (t) { + t.plan(2); + + setTimeout(function () { + throw new Error('doom'); + }, 100); +}); diff --git a/node_modules/tape/example/timing.js b/node_modules/tape/example/timing.js new file mode 100644 index 0000000..614c144 --- /dev/null +++ b/node_modules/tape/example/timing.js @@ -0,0 +1,12 @@ +var test = require('../'); + +test('timing test', function (t) { + t.plan(2); + + t.equal(typeof Date.now, 'function'); + var start = new Date; + + setTimeout(function () { + t.equal(new Date - start, 100); + }, 100); +}); diff --git a/node_modules/tape/example/too_many.js b/node_modules/tape/example/too_many.js new file mode 100644 index 0000000..cdcb5ee --- /dev/null +++ b/node_modules/tape/example/too_many.js @@ -0,0 +1,35 @@ +var falafel = require('falafel'); +var test = require('../'); + +test('array', function (t) { + t.plan(3); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]); + } + ); +}); diff --git a/node_modules/tape/example/two.js b/node_modules/tape/example/two.js new file mode 100644 index 0000000..78e49c3 --- /dev/null +++ b/node_modules/tape/example/two.js @@ -0,0 +1,18 @@ +var test = require('../'); + +test('one', function (t) { + t.plan(2); + t.ok(true); + setTimeout(function () { + t.equal(1+3, 4); + }, 100); +}); + +test('two', function (t) { + t.plan(3); + t.equal(5, 2+3); + setTimeout(function () { + t.equal('a'.charCodeAt(0), 97); + t.ok(true); + }, 50); +}); diff --git a/node_modules/tape/index.js b/node_modules/tape/index.js new file mode 100644 index 0000000..e326a96 --- /dev/null +++ b/node_modules/tape/index.js @@ -0,0 +1,157 @@ +var defined = require('defined'); +var createDefaultStream = require('./lib/default_stream'); +var Test = require('./lib/test'); +var createResult = require('./lib/results'); +var through = require('through'); + +var canEmitExit = typeof process !== 'undefined' && process + && typeof process.on === 'function' && process.browser !== true +; +var canExit = typeof process !== 'undefined' && process + && typeof process.exit === 'function' +; + +var nextTick = typeof setImmediate !== 'undefined' + ? setImmediate + : process.nextTick +; + +exports = module.exports = (function () { + var harness; + var lazyLoad = function () { + return getHarness().apply(this, arguments); + }; + + lazyLoad.only = function () { + return getHarness().only.apply(this, arguments); + }; + + lazyLoad.createStream = function (opts) { + if (!opts) opts = {}; + if (!harness) { + var output = through(); + getHarness({ stream: output, objectMode: opts.objectMode }); + return output; + } + return harness.createStream(opts); + }; + + lazyLoad.onFinish = function () { + return getHarness().onFinish.apply(this, arguments); + }; + + lazyLoad.onFailure = function () { + return getHarness().onFailure.apply(this, arguments); + }; + + lazyLoad.getHarness = getHarness; + + return lazyLoad; + + function getHarness(opts) { + if (!opts) opts = {}; + opts.autoclose = !canEmitExit; + if (!harness) harness = createExitHarness(opts); + return harness; + } +})(); + +function createExitHarness(conf) { + if (!conf) conf = {}; + var harness = createHarness({ + autoclose: defined(conf.autoclose, false) + }); + + var stream = harness.createStream({ objectMode: conf.objectMode }); + var es = stream.pipe(conf.stream || createDefaultStream()); + if (canEmitExit) { + es.on('error', function (err) { harness._exitCode = 1; }); + } + + var ended = false; + stream.on('end', function () { ended = true; }); + + if (conf.exit === false) return harness; + if (!canEmitExit || !canExit) return harness; + + process.on('exit', function (code) { + // let the process exit cleanly. + if (code !== 0) { + return; + } + + if (!ended) { + var only = harness._results._only; + for (var i = 0; i < harness._tests.length; i++) { + var t = harness._tests[i]; + if (only && t !== only) continue; + t._exit(); + } + } + harness.close(); + process.exit(code || harness._exitCode); + }); + + return harness; +} + +exports.createHarness = createHarness; +exports.Test = Test; +exports.test = exports; // tap compat +exports.test.skip = Test.skip; + +var exitInterval; + +function createHarness(conf_) { + if (!conf_) conf_ = {}; + var results = createResult(); + if (conf_.autoclose !== false) { + results.once('done', function () { results.close(); }); + } + + var test = function (name, conf, cb) { + var t = new Test(name, conf, cb); + test._tests.push(t); + + (function inspectCode(st) { + st.on('test', function sub(st_) { + inspectCode(st_); + }); + st.on('result', function (r) { + if (!r.todo && !r.ok && typeof r !== 'string') test._exitCode = 1; + }); + })(t); + + results.push(t); + return t; + }; + test._results = results; + + test._tests = []; + + test.createStream = function (opts) { + return results.createStream(opts); + }; + + test.onFinish = function (cb) { + results.on('done', cb); + }; + + test.onFailure = function (cb) { + results.on('fail', cb); + }; + + var only = false; + test.only = function () { + if (only) throw new Error('there can only be one only test'); + only = true; + var t = test.apply(null, arguments); + results.only(t); + return t; + }; + test._exitCode = 0; + + test.close = function () { results.close(); }; + + return test; +} diff --git a/node_modules/tape/lib/default_stream.js b/node_modules/tape/lib/default_stream.js new file mode 100644 index 0000000..2744258 --- /dev/null +++ b/node_modules/tape/lib/default_stream.js @@ -0,0 +1,30 @@ +var through = require('through'); +var fs = require('fs'); + +module.exports = function () { + var line = ''; + var stream = through(write, flush); + return stream; + + function write(buf) { + for (var i = 0; i < buf.length; i++) { + var c = typeof buf === 'string' + ? buf.charAt(i) + : String.fromCharCode(buf[i]) + ; + if (c === '\n') flush(); + else line += c; + } + } + + function flush() { + if (fs.writeSync && /^win/.test(process.platform)) { + try { fs.writeSync(1, line + '\n'); } + catch (e) { stream.emit('error', e); } + } else { + try { console.log(line); } + catch (e) { stream.emit('error', e); } + } + line = ''; + } +}; diff --git a/node_modules/tape/lib/results.js b/node_modules/tape/lib/results.js new file mode 100644 index 0000000..40e8591 --- /dev/null +++ b/node_modules/tape/lib/results.js @@ -0,0 +1,212 @@ +var defined = require('defined'); +var EventEmitter = require('events').EventEmitter; +var inherits = require('inherits'); +var through = require('through'); +var resumer = require('resumer'); +var inspect = require('object-inspect'); +var bind = require('function-bind'); +var has = require('has'); +var regexpTest = bind.call(Function.call, RegExp.prototype.test); +var yamlIndicators = /\:|\-|\?/; +var nextTick = typeof setImmediate !== 'undefined' + ? setImmediate + : process.nextTick +; + +module.exports = Results; +inherits(Results, EventEmitter); + +function coalesceWhiteSpaces(str) { + return String(str).replace(/\s+/g, ' '); +} + +function Results() { + if (!(this instanceof Results)) return new Results; + this.count = 0; + this.fail = 0; + this.pass = 0; + this.todo = 0; + this._stream = through(); + this.tests = []; + this._only = null; + this._isRunning = false; +} + +Results.prototype.createStream = function (opts) { + if (!opts) opts = {}; + var self = this; + var output, testId = 0; + if (opts.objectMode) { + output = through(); + self.on('_push', function ontest(t, extra) { + if (!extra) extra = {}; + var id = testId++; + t.once('prerun', function () { + var row = { + type: 'test', + name: t.name, + id: id, + skip: t._skip, + todo: t._todo + }; + if (has(extra, 'parent')) { + row.parent = extra.parent; + } + output.queue(row); + }); + t.on('test', function (st) { + ontest(st, { parent: id }); + }); + t.on('result', function (res) { + res.test = id; + res.type = 'assert'; + output.queue(res); + }); + t.on('end', function () { + output.queue({ type: 'end', test: id }); + }); + }); + self.on('done', function () { output.queue(null); }); + } else { + output = resumer(); + output.queue('TAP version 13\n'); + self._stream.pipe(output); + } + + if (!this._isRunning) { + this._isRunning = true; + nextTick(function next() { + var t; + while (t = getNextTest(self)) { + t.run(); + if (!t.ended) return t.once('end', function () { nextTick(next); }); + } + self.emit('done'); + }); + } + + return output; +}; + +Results.prototype.push = function (t) { + var self = this; + self.tests.push(t); + self._watch(t); + self.emit('_push', t); +}; + +Results.prototype.only = function (t) { + this._only = t; +}; + +Results.prototype._watch = function (t) { + var self = this; + var write = function (s) { self._stream.queue(s); }; + t.once('prerun', function () { + var premsg = ''; + if (t._skip) premsg = 'SKIP '; + else if (t._todo) premsg = 'TODO '; + write('# ' + premsg + coalesceWhiteSpaces(t.name) + '\n'); + }); + + t.on('result', function (res) { + if (typeof res === 'string') { + write('# ' + res + '\n'); + return; + } + write(encodeResult(res, self.count + 1)); + self.count ++; + + if (res.ok || res.todo) self.pass ++; + else { + self.fail ++; + self.emit('fail'); + } + }); + + t.on('test', function (st) { self._watch(st); }); +}; + +Results.prototype.close = function () { + var self = this; + if (self.closed) self._stream.emit('error', new Error('ALREADY CLOSED')); + self.closed = true; + var write = function (s) { self._stream.queue(s); }; + + write('\n1..' + self.count + '\n'); + write('# tests ' + self.count + '\n'); + write('# pass ' + (self.pass + self.todo) + '\n'); + if (self.todo) write('# todo ' + self.todo + '\n'); + if (self.fail) write('# fail ' + self.fail + '\n'); + else write('\n# ok\n'); + + self._stream.queue(null); +}; + +function encodeResult(res, count) { + var output = ''; + output += (res.ok ? 'ok ' : 'not ok ') + count; + output += res.name ? ' ' + coalesceWhiteSpaces(res.name) : ''; + + if (res.skip) { + output += ' # SKIP' + ((typeof res.skip === 'string') ? ' ' + coalesceWhiteSpaces(res.skip) : ''); + } else if (res.todo) { + output += ' # TODO' + ((typeof res.todo === 'string') ? ' ' + coalesceWhiteSpaces(res.todo) : ''); + }; + + output += '\n'; + if (res.ok) return output; + + var outer = ' '; + var inner = outer + ' '; + output += outer + '---\n'; + output += inner + 'operator: ' + res.operator + '\n'; + + if (has(res, 'expected') || has(res, 'actual')) { + var ex = inspect(res.expected, {depth: res.objectPrintDepth}); + var ac = inspect(res.actual, {depth: res.objectPrintDepth}); + + if (Math.max(ex.length, ac.length) > 65 || invalidYaml(ex) || invalidYaml(ac)) { + output += inner + 'expected: |-\n' + inner + ' ' + ex + '\n'; + output += inner + 'actual: |-\n' + inner + ' ' + ac + '\n'; + } else { + output += inner + 'expected: ' + ex + '\n'; + output += inner + 'actual: ' + ac + '\n'; + } + } + if (res.at) { + output += inner + 'at: ' + res.at + '\n'; + } + + var actualStack = res.actual && (typeof res.actual === 'object' || typeof res.actual === 'function') ? res.actual.stack : undefined; + var errorStack = res.error && res.error.stack; + var stack = defined(actualStack, errorStack); + if (stack) { + var lines = String(stack).split('\n'); + output += inner + 'stack: |-\n'; + for (var i = 0; i < lines.length; i++) { + output += inner + ' ' + lines[i] + '\n'; + } + } + + output += outer + '...\n'; + return output; +} + +function getNextTest(results) { + if (!results._only) { + return results.tests.shift(); + } + + do { + var t = results.tests.shift(); + if (!t) continue; + if (results._only === t) { + return t; + } + } while (results.tests.length !== 0); +} + +function invalidYaml(str) { + return regexpTest(yamlIndicators, str); +} diff --git a/node_modules/tape/lib/test.js b/node_modules/tape/lib/test.js new file mode 100644 index 0000000..652a4cd --- /dev/null +++ b/node_modules/tape/lib/test.js @@ -0,0 +1,591 @@ +var deepEqual = require('deep-equal'); +var defined = require('defined'); +var path = require('path'); +var inherits = require('inherits'); +var EventEmitter = require('events').EventEmitter; +var has = require('has'); +var isRegExp = require('is-regex'); +var trim = require('string.prototype.trim'); +var bind = require('function-bind'); +var forEach = require('for-each'); +var inspect = require('object-inspect'); +var isEnumerable = bind.call(Function.call, Object.prototype.propertyIsEnumerable); +var toLowerCase = bind.call(Function.call, String.prototype.toLowerCase); +var $test = bind.call(Function.call, RegExp.prototype.test); + +module.exports = Test; + +var nextTick = typeof setImmediate !== 'undefined' + ? setImmediate + : process.nextTick; +var safeSetTimeout = setTimeout; +var safeClearTimeout = clearTimeout; + +inherits(Test, EventEmitter); + +var getTestArgs = function (name_, opts_, cb_) { + var name = '(anonymous)'; + var opts = {}; + var cb; + + for (var i = 0; i < arguments.length; i++) { + var arg = arguments[i]; + var t = typeof arg; + if (t === 'string') { + name = arg; + } else if (t === 'object') { + opts = arg || opts; + } else if (t === 'function') { + cb = arg; + } + } + return { name: name, opts: opts, cb: cb }; +}; + +function Test(name_, opts_, cb_) { + if (! (this instanceof Test)) { + return new Test(name_, opts_, cb_); + } + + var args = getTestArgs(name_, opts_, cb_); + + this.readable = true; + this.name = args.name || '(anonymous)'; + this.assertCount = 0; + this.pendingCount = 0; + this._skip = args.opts.skip || false; + this._todo = args.opts.todo || false; + this._timeout = args.opts.timeout; + this._plan = undefined; + this._cb = args.cb; + this._progeny = []; + this._ok = true; + var depthEnvVar = process.env.NODE_TAPE_OBJECT_PRINT_DEPTH; + if (args.opts.objectPrintDepth) { + this._objectPrintDepth = args.opts.objectPrintDepth; + } else if (depthEnvVar) { + if (toLowerCase(depthEnvVar) === 'infinity') { + this._objectPrintDepth = Infinity; + } else { + this._objectPrintDepth = depthEnvVar; + } + } else { + this._objectPrintDepth = 5; + } + + for (var prop in this) { + this[prop] = (function bind(self, val) { + if (typeof val === 'function') { + return function bound() { + return val.apply(self, arguments); + }; + } + return val; + })(this, this[prop]); + } +} + +Test.prototype.run = function () { + this.emit('prerun'); + if (!this._cb || this._skip) { + return this._end(); + } + if (this._timeout != null) { + this.timeoutAfter(this._timeout); + } + this._cb(this); + this.emit('run'); +}; + +Test.prototype.test = function (name, opts, cb) { + var self = this; + var t = new Test(name, opts, cb); + this._progeny.push(t); + this.pendingCount++; + this.emit('test', t); + t.on('prerun', function () { + self.assertCount++; + }); + + if (!self._pendingAsserts()) { + nextTick(function () { + self._end(); + }); + } + + nextTick(function () { + if (!self._plan && self.pendingCount == self._progeny.length) { + self._end(); + } + }); +}; + +Test.prototype.comment = function (msg) { + var that = this; + forEach(trim(msg).split('\n'), function (aMsg) { + that.emit('result', trim(aMsg).replace(/^#\s*/, '')); + }); +}; + +Test.prototype.plan = function (n) { + this._plan = n; + this.emit('plan', n); +}; + +Test.prototype.timeoutAfter = function (ms) { + if (!ms) throw new Error('timeoutAfter requires a timespan'); + var self = this; + var timeout = safeSetTimeout(function () { + self.fail('test timed out after ' + ms + 'ms'); + self.end(); + }, ms); + this.once('end', function () { + safeClearTimeout(timeout); + }); +}; + +Test.prototype.end = function (err) { + var self = this; + if (arguments.length >= 1 && !!err) { + this.ifError(err); + } + + if (this.calledEnd) { + this.fail('.end() called twice'); + } + this.calledEnd = true; + this._end(); +}; + +Test.prototype._end = function (err) { + var self = this; + if (this._progeny.length) { + var t = this._progeny.shift(); + t.on('end', function () { self._end(); }); + t.run(); + return; + } + + if (!this.ended) this.emit('end'); + var pendingAsserts = this._pendingAsserts(); + if (!this._planError && this._plan !== undefined && pendingAsserts) { + this._planError = true; + this.fail('plan != count', { + expected: this._plan, + actual: this.assertCount + }); + } + this.ended = true; +}; + +Test.prototype._exit = function () { + if (this._plan !== undefined && + !this._planError && this.assertCount !== this._plan) { + this._planError = true; + this.fail('plan != count', { + expected: this._plan, + actual: this.assertCount, + exiting: true + }); + } else if (!this.ended) { + this.fail('test exited without ending', { + exiting: true + }); + } +}; + +Test.prototype._pendingAsserts = function () { + if (this._plan === undefined) { + return 1; + } + return this._plan - (this._progeny.length + this.assertCount); +}; + +Test.prototype._assert = function assert(ok, opts) { + var self = this; + var extra = opts.extra || {}; + + ok = !!ok || !!extra.skip; + + var res = { + id: self.assertCount++, + ok: ok, + skip: defined(extra.skip, opts.skip), + todo: defined(extra.todo, opts.todo, self._todo), + name: defined(extra.message, opts.message, '(unnamed assert)'), + operator: defined(extra.operator, opts.operator), + objectPrintDepth: self._objectPrintDepth + }; + if (has(opts, 'actual') || has(extra, 'actual')) { + res.actual = defined(extra.actual, opts.actual); + } + if (has(opts, 'expected') || has(extra, 'expected')) { + res.expected = defined(extra.expected, opts.expected); + } + this._ok = !!(this._ok && ok); + + if (!ok && !res.todo) { + res.error = defined(extra.error, opts.error, new Error(res.name)); + } + + if (!ok) { + var e = new Error('exception'); + var err = (e.stack || '').split('\n'); + var dir = __dirname + path.sep; + + for (var i = 0; i < err.length; i++) { + /* + Stack trace lines may resemble one of the following. We need + to should correctly extract a function name (if any) and + path / line no. for each line. + + at myFunction (/path/to/file.js:123:45) + at myFunction (/path/to/file.other-ext:123:45) + at myFunction (/path to/file.js:123:45) + at myFunction (C:\path\to\file.js:123:45) + at myFunction (/path/to/file.js:123) + at Test. (/path/to/file.js:123:45) + at Test.bound [as run] (/path/to/file.js:123:45) + at /path/to/file.js:123:45 + + Regex has three parts. First is non-capturing group for 'at ' + (plus anything preceding it). + + /^(?:[^\s]*\s*\bat\s+)/ + + Second captures function call description (optional). This is + not necessarily a valid JS function name, but just what the + stack trace is using to represent a function call. It may look + like `` or 'Test.bound [as run]'. + + For our purposes, we assume that, if there is a function + name, it's everything leading up to the first open + parentheses (trimmed) before our pathname. + + /(?:(.*)\s+\()?/ + + Last part captures file path plus line no (and optional + column no). + + /((?:\/|[a-zA-Z]:\\)[^:\)]+:(\d+)(?::(\d+))?)/ + */ + var re = /^(?:[^\s]*\s*\bat\s+)(?:(.*)\s+\()?((?:\/|[a-zA-Z]:\\)[^:\)]+:(\d+)(?::(\d+))?)\)$/; + var lineWithTokens = err[i].replace(process.cwd(), '/\$CWD').replace(__dirname, '/\$TEST'); + var m = re.exec(lineWithTokens); + + if (!m) { + continue; + } + + var callDescription = m[1] || ''; + var filePath = m[2].replace('/$CWD', process.cwd()).replace('/$TEST', __dirname); + + if (filePath.slice(0, dir.length) === dir) { + continue; + } + + // Function call description may not (just) be a function name. + // Try to extract function name by looking at first "word" only. + res.functionName = callDescription.split(/\s+/)[0]; + res.file = filePath; + res.line = Number(m[3]); + if (m[4]) res.column = Number(m[4]); + + res.at = callDescription + ' (' + filePath + ')'; + break; + } + } + + self.emit('result', res); + + var pendingAsserts = self._pendingAsserts(); + if (!pendingAsserts) { + if (extra.exiting) { + self._end(); + } else { + nextTick(function () { + self._end(); + }); + } + } + + if (!self._planError && pendingAsserts < 0) { + self._planError = true; + self.fail('plan != count', { + expected: self._plan, + actual: self._plan - pendingAsserts + }); + } +}; + +Test.prototype.fail = function (msg, extra) { + this._assert(false, { + message: msg, + operator: 'fail', + extra: extra + }); +}; + +Test.prototype.pass = function (msg, extra) { + this._assert(true, { + message: msg, + operator: 'pass', + extra: extra + }); +}; + +Test.prototype.skip = function (msg, extra) { + this._assert(true, { + message: msg, + operator: 'skip', + skip: true, + extra: extra + }); +}; + +function assert(value, msg, extra) { + this._assert(value, { + message: defined(msg, 'should be truthy'), + operator: 'ok', + expected: true, + actual: value, + extra: extra + }); +} +Test.prototype.ok += Test.prototype['true'] += Test.prototype.assert += assert; + +function notOK(value, msg, extra) { + this._assert(!value, { + message: defined(msg, 'should be falsy'), + operator: 'notOk', + expected: false, + actual: value, + extra: extra + }); +} +Test.prototype.notOk += Test.prototype['false'] += Test.prototype.notok += notOK; + +function error(err, msg, extra) { + this._assert(!err, { + message: defined(msg, String(err)), + operator: 'error', + actual: err, + extra: extra + }); +} +Test.prototype.error += Test.prototype.ifError += Test.prototype.ifErr += Test.prototype.iferror += error; + +function equal(a, b, msg, extra) { + this._assert(a === b, { + message: defined(msg, 'should be equal'), + operator: 'equal', + actual: a, + expected: b, + extra: extra + }); +} +Test.prototype.equal += Test.prototype.equals += Test.prototype.isEqual += Test.prototype.is += Test.prototype.strictEqual += Test.prototype.strictEquals += equal; + +function notEqual(a, b, msg, extra) { + this._assert(a !== b, { + message: defined(msg, 'should not be equal'), + operator: 'notEqual', + actual: a, + expected: b, + extra: extra + }); +} +Test.prototype.notEqual += Test.prototype.notEquals += Test.prototype.notStrictEqual += Test.prototype.notStrictEquals += Test.prototype.isNotEqual += Test.prototype.isNot += Test.prototype.not += Test.prototype.doesNotEqual += Test.prototype.isInequal += notEqual; + +function tapeDeepEqual(a, b, msg, extra) { + this._assert(deepEqual(a, b, { strict: true }), { + message: defined(msg, 'should be equivalent'), + operator: 'deepEqual', + actual: a, + expected: b, + extra: extra + }); +} +Test.prototype.deepEqual += Test.prototype.deepEquals += Test.prototype.isEquivalent += Test.prototype.same += tapeDeepEqual; + +function deepLooseEqual(a, b, msg, extra) { + this._assert(deepEqual(a, b), { + message: defined(msg, 'should be equivalent'), + operator: 'deepLooseEqual', + actual: a, + expected: b, + extra: extra + }); +} +Test.prototype.deepLooseEqual += Test.prototype.looseEqual += Test.prototype.looseEquals += deepLooseEqual; + +function notDeepEqual(a, b, msg, extra) { + this._assert(!deepEqual(a, b, { strict: true }), { + message: defined(msg, 'should not be equivalent'), + operator: 'notDeepEqual', + actual: a, + expected: b, + extra: extra + }); +} +Test.prototype.notDeepEqual += Test.prototype.notDeepEquals += Test.prototype.notEquivalent += Test.prototype.notDeeply += Test.prototype.notSame += Test.prototype.isNotDeepEqual += Test.prototype.isNotDeeply += Test.prototype.isNotEquivalent += Test.prototype.isInequivalent += notDeepEqual; + +function notDeepLooseEqual(a, b, msg, extra) { + this._assert(!deepEqual(a, b), { + message: defined(msg, 'should be equivalent'), + operator: 'notDeepLooseEqual', + actual: a, + expected: b, + extra: extra + }); +} +Test.prototype.notDeepLooseEqual += Test.prototype.notLooseEqual += Test.prototype.notLooseEquals += notDeepLooseEqual; + +Test.prototype['throws'] = function (fn, expected, msg, extra) { + if (typeof expected === 'string') { + msg = expected; + expected = undefined; + } + + var caught = undefined; + + try { + fn(); + } catch (err) { + caught = { error: err }; + if ((err != null) && (!isEnumerable(err, 'message') || !has(err, 'message'))) { + var message = err.message; + delete err.message; + err.message = message; + } + } + + var passed = caught; + + if (isRegExp(expected)) { + passed = expected.test(caught && caught.error); + expected = String(expected); + } + + if (typeof expected === 'function' && caught) { + passed = caught.error instanceof expected; + } + + this._assert(typeof fn === 'function' && passed, { + message: defined(msg, 'should throw'), + operator: 'throws', + actual: caught && caught.error, + expected: expected, + error: !passed && caught && caught.error, + extra: extra + }); +}; + +Test.prototype.doesNotThrow = function (fn, expected, msg, extra) { + if (typeof expected === 'string') { + msg = expected; + expected = undefined; + } + var caught = undefined; + try { + fn(); + } + catch (err) { + caught = { error: err }; + } + this._assert(!caught, { + message: defined(msg, 'should not throw'), + operator: 'throws', + actual: caught && caught.error, + expected: expected, + error: caught && caught.error, + extra: extra + }); +}; + +Test.prototype.match = function match(string, regexp, msg, extra) { + if (!isRegExp(regexp)) { + throw new TypeError('The "regexp" argument must be an instance of RegExp. Received type ' + typeof regexp + ' (' + inspect(regexp) + ')'); + } + if (typeof string !== 'string') { + throw new TypeError('The "string" argument must be of type string. Received type ' + typeof string + ' (' + inspect(string) + ')'); + } + + var matches = $test(regexp, string); + this._assert(matches, { + message: defined(msg, 'The input did not match the regular expression ' + inspect(regexp) + '. Input: ' + inspect(string)), + operator: 'match', + actual: string, + expected: regexp, + extra: extra + }); +}; + +Test.prototype.doesNotMatch = function doesNotMatch(string, regexp, msg, extra) { + if (!isRegExp(regexp)) { + throw new TypeError('The "regexp" argument must be an instance of RegExp. Received type ' + typeof regexp + ' (' + inspect(regexp) + ')'); + } + if (typeof string !== 'string') { + throw new TypeError('The "string" argument must be of type string. Received type ' + typeof string + ' (' + inspect(string) + ')'); + } + var matches = $test(regexp, string); + this._assert(!matches, { + message: defined(msg, 'The input was expected to not match the regular expression ' + inspect(regexp) + '. Input: ' + inspect(string)), + operator: 'doesNotMatch', + actual: string, + expected: regexp, + extra: extra + }); +}; + +Test.skip = function (name_, _opts, _cb) { + var args = getTestArgs.apply(null, arguments); + args.opts.skip = true; + return Test(args.name, args.opts, args.cb); +}; + +// vim: set softtabstop=4 shiftwidth=4: diff --git a/node_modules/tape/node_modules/inherits/LICENSE b/node_modules/tape/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/node_modules/tape/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/tape/node_modules/inherits/README.md b/node_modules/tape/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/node_modules/tape/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/tape/node_modules/inherits/inherits.js b/node_modules/tape/node_modules/inherits/inherits.js new file mode 100644 index 0000000..f71f2d9 --- /dev/null +++ b/node_modules/tape/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/tape/node_modules/inherits/inherits_browser.js b/node_modules/tape/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..86bbb3d --- /dev/null +++ b/node_modules/tape/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/node_modules/tape/node_modules/inherits/package.json b/node_modules/tape/node_modules/inherits/package.json new file mode 100644 index 0000000..e0d11d6 --- /dev/null +++ b/node_modules/tape/node_modules/inherits/package.json @@ -0,0 +1,61 @@ +{ + "_from": "inherits@~2.0.4", + "_id": "inherits@2.0.4", + "_inBundle": false, + "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "_location": "/tape/inherits", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "inherits@~2.0.4", + "name": "inherits", + "escapedName": "inherits", + "rawSpec": "~2.0.4", + "saveSpec": null, + "fetchSpec": "~2.0.4" + }, + "_requiredBy": [ + "/tape" + ], + "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", + "_spec": "inherits@~2.0.4", + "_where": "/home/kareml/Desktop/SMS/node_modules/tape", + "browser": "./inherits_browser.js", + "bugs": { + "url": "https://github.com/isaacs/inherits/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ], + "homepage": "https://github.com/isaacs/inherits#readme", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "license": "ISC", + "main": "./inherits.js", + "name": "inherits", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/inherits.git" + }, + "scripts": { + "test": "tap" + }, + "version": "2.0.4" +} diff --git a/node_modules/tape/package.json b/node_modules/tape/package.json new file mode 100644 index 0000000..f4bc167 --- /dev/null +++ b/node_modules/tape/package.json @@ -0,0 +1,105 @@ +{ + "_from": "tape", + "_id": "tape@4.13.0", + "_inBundle": false, + "_integrity": "sha512-J/hvA+GJnuWJ0Sj8Z0dmu3JgMNU+MmusvkCT7+SN4/2TklW18FNCp/UuHIEhPZwHfy4sXfKYgC7kypKg4umbOw==", + "_location": "/tape", + "_phantomChildren": {}, + "_requested": { + "type": "tag", + "registry": true, + "raw": "tape", + "name": "tape", + "escapedName": "tape", + "rawSpec": "", + "saveSpec": null, + "fetchSpec": "latest" + }, + "_requiredBy": [ + "#DEV:/", + "#USER" + ], + "_resolved": "https://registry.npmjs.org/tape/-/tape-4.13.0.tgz", + "_shasum": "e2f581ff5f12a7cbd787e9f83c76c2851782fce2", + "_spec": "tape", + "_where": "/home/kareml/Desktop/SMS", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "bin": { + "tape": "bin/tape" + }, + "bugs": { + "url": "https://github.com/substack/tape/issues" + }, + "bundleDependencies": false, + "dependencies": { + "deep-equal": "~1.1.1", + "defined": "~1.0.0", + "dotignore": "~0.1.2", + "for-each": "~0.3.3", + "function-bind": "~1.1.1", + "glob": "~7.1.6", + "has": "~1.0.3", + "inherits": "~2.0.4", + "is-regex": "~1.0.5", + "minimist": "~1.2.0", + "object-inspect": "~1.7.0", + "resolve": "~1.14.2", + "resumer": "~0.0.0", + "string.prototype.trim": "~1.2.1", + "through": "~2.3.8" + }, + "deprecated": false, + "description": "tap-producing test harness for node and browsers", + "devDependencies": { + "concat-stream": "^1.6.2", + "eclint": "^2.8.1", + "eslint": "^6.8.0", + "falafel": "^2.1.0", + "js-yaml": "^3.13.1", + "tap": "^8.0.1", + "tap-parser": "^3.0.5" + }, + "directories": { + "example": "example", + "test": "test" + }, + "homepage": "https://github.com/substack/tape", + "keywords": [ + "tap", + "test", + "harness", + "assert", + "browser" + ], + "license": "MIT", + "main": "index.js", + "name": "tape", + "repository": { + "type": "git", + "url": "git://github.com/substack/tape.git" + }, + "scripts": { + "lint": "eslint .", + "prelint": "eclint check", + "pretest": "npm run lint", + "test": "npm run tests-only", + "tests-only": "tap test/*.js" + }, + "testling": { + "files": "test/browser/*.js", + "browsers": [ + "ie/6..latest", + "chrome/20..latest", + "firefox/10..latest", + "safari/latest", + "opera/11.0..latest", + "iphone/6", + "ipad/6" + ] + }, + "version": "4.13.0" +} diff --git a/node_modules/tape/readme.markdown b/node_modules/tape/readme.markdown new file mode 100644 index 0000000..6854811 --- /dev/null +++ b/node_modules/tape/readme.markdown @@ -0,0 +1,403 @@ +# tape + +tap-producing test harness for node and browsers + +[![browser support](https://ci.testling.com/substack/tape.png)](http://ci.testling.com/substack/tape) + +[![build status](https://secure.travis-ci.org/substack/tape.svg?branch=master)](http://travis-ci.org/substack/tape) + +![tape](https://web.archive.org/web/20170612184731if_/http://substack.net/images/tape_drive.png) + +# example + +``` js +var test = require('tape'); + +test('timing test', function (t) { + t.plan(2); + + t.equal(typeof Date.now, 'function'); + var start = Date.now(); + + setTimeout(function () { + t.equal(Date.now() - start, 100); + }, 100); +}); +``` + +``` +$ node example/timing.js +TAP version 13 +# timing test +ok 1 should be equal +not ok 2 should be equal + --- + operator: equal + expected: 100 + actual: 107 + ... + +1..2 +# tests 2 +# pass 1 +# fail 1 +``` + +# usage + +You always need to `require('tape')` in test files. You can run the tests by +usual node means (`require('test-file.js')` or `node test-file.js`). You can +also run tests using the `tape` binary to utilize globbing, on Windows for +example: + +```sh +$ tape tests/**/*.js +``` + +`tape`'s arguments are passed to the +[`glob`](https://www.npmjs.com/package/glob) module. If you want `glob` to +perform the expansion on a system where the shell performs such expansion, quote +the arguments as necessary: + +```sh +$ tape 'tests/**/*.js' +$ tape "tests/**/*.js" +``` + +## Preloading modules + +Additionally, it is possible to make `tape` load one or more modules before running any tests, by using the `-r` or `--require` flag. Here's an example that loads [babel-register](http://babeljs.io/docs/usage/require/) before running any tests, to allow for JIT compilation: + +```sh +$ tape -r babel-register tests/**/*.js +``` + +Depending on the module you're loading, you may be able to parameterize it using environment variables or auxiliary files. Babel, for instance, will load options from [`.babelrc`](http://babeljs.io/docs/usage/babelrc/) at runtime. + +The `-r` flag behaves exactly like node's `require`, and uses the same module resolution algorithm. This means that if you need to load local modules, you have to prepend their path with `./` or `../` accordingly. + +For example: + +```sh +$ tape -r ./my/local/module tests/**/*.js +``` + +Please note that all modules loaded using the `-r` flag will run *before* any tests, regardless of when they are specified. For example, `tape -r a b -r c` will actually load `a` and `c` *before* loading `b`, since they are flagged as required modules. + +# things that go well with tape + +`tape` maintains a fairly minimal core. Additional features are usually added by using another module alongside `tape`. + +## pretty reporters + +The default TAP output is good for machines and humans that are robots. + +If you want a more colorful / pretty output there are lots of modules on npm +that will output something pretty if you pipe TAP into them: + +- [tap-spec](https://github.com/scottcorgan/tap-spec) +- [tap-dot](https://github.com/scottcorgan/tap-dot) +- [faucet](https://github.com/substack/faucet) +- [tap-bail](https://github.com/juliangruber/tap-bail) +- [tap-browser-color](https://github.com/kirbysayshi/tap-browser-color) +- [tap-json](https://github.com/gummesson/tap-json) +- [tap-min](https://github.com/derhuerst/tap-min) +- [tap-nyan](https://github.com/calvinmetcalf/tap-nyan) +- [tap-pessimist](https://www.npmjs.org/package/tap-pessimist) +- [tap-prettify](https://github.com/toolness/tap-prettify) +- [colortape](https://github.com/shuhei/colortape) +- [tap-xunit](https://github.com/aghassemi/tap-xunit) +- [tap-difflet](https://github.com/namuol/tap-difflet) +- [tape-dom](https://github.com/gritzko/tape-dom) +- [tap-diff](https://github.com/axross/tap-diff) +- [tap-notify](https://github.com/axross/tap-notify) +- [tap-summary](https://github.com/zoubin/tap-summary) +- [tap-markdown](https://github.com/Hypercubed/tap-markdown) +- [tap-html](https://github.com/gabrielcsapo/tap-html) +- [tap-react-browser](https://github.com/mcnuttandrew/tap-react-browser) +- [tap-junit](https://github.com/dhershman1/tap-junit) +- [tap-nyc](https://github.com/MegaArman/tap-nyc) + +To use them, try `node test/index.js | tap-spec` or pipe it into one +of the modules of your choice! + +## uncaught exceptions + +By default, uncaught exceptions in your tests will not be intercepted, and will cause `tape` to crash. If you find this behavior undesirable, use [`tape-catch`](https://github.com/michaelrhodes/tape-catch) to report any exceptions as TAP errors. + +## other + +- CoffeeScript support with https://www.npmjs.com/package/coffeetape +- Promise support with https://www.npmjs.com/package/blue-tape or https://www.npmjs.com/package/tape-promise +- ES6 support with https://www.npmjs.com/package/babel-tape-runner or https://www.npmjs.com/package/buble-tape-runner +- Different test syntax with https://github.com/pguth/flip-tape (warning: mutates String.prototype) +- Electron test runner with https://github.com/tundrax/electron-tap +- Concurrency support with https://github.com/imsnif/mixed-tape + +# methods + +The assertion methods in `tape` are heavily influenced or copied from the methods +in [node-tap](https://github.com/isaacs/node-tap). + +```js +var test = require('tape') +``` + +## test([name], [opts], cb) + +Create a new test with an optional `name` string and optional `opts` object. +`cb(t)` fires with the new test object `t` once all preceding tests have +finished. Tests execute serially. + +Available `opts` options are: +- opts.skip = true/false. See test.skip. +- opts.timeout = 500. Set a timeout for the test, after which it will fail. See test.timeoutAfter. +- opts.objectPrintDepth = 5. Configure max depth of expected / actual object printing. Environmental variable `NODE_TAPE_OBJECT_PRINT_DEPTH` can set the desired default depth for all tests; locally-set values will take precedence. +- opts.todo = true/false. Test will be allowed to fail. + +If you forget to `t.plan()` out how many assertions you are going to run and you +don't call `t.end()` explicitly, your test will hang. + +## test.skip([name], [opts], cb) + +Generate a new test that will be skipped over. + +## test.onFinish(fn) + +The onFinish hook will get invoked when ALL `tape` tests have finished +right before `tape` is about to print the test summary. + +## test.onFailure(fn) + +The onFailure hook will get invoked whenever any `tape` tests has failed. + +## t.plan(n) + +Declare that `n` assertions should be run. `t.end()` will be called +automatically after the `n`th assertion. If there are any more assertions after +the `n`th, or after `t.end()` is called, they will generate errors. + +## t.end(err) + +Declare the end of a test explicitly. If `err` is passed in `t.end` will assert +that it is falsey. + +## t.fail(msg) + +Generate a failing assertion with a message `msg`. + +## t.pass(msg) + +Generate a passing assertion with a message `msg`. + +## t.timeoutAfter(ms) + +Automatically timeout the test after X ms. + +## t.skip(msg) + +Generate an assertion that will be skipped over. + +## t.ok(value, msg) + +Assert that `value` is truthy with an optional description of the assertion `msg`. + +Aliases: `t.true()`, `t.assert()` + +## t.notOk(value, msg) + +Assert that `value` is falsy with an optional description of the assertion `msg`. + +Aliases: `t.false()`, `t.notok()` + +## t.error(err, msg) + +Assert that `err` is falsy. If `err` is non-falsy, use its `err.message` as the +description message. + +Aliases: `t.ifError()`, `t.ifErr()`, `t.iferror()` + +## t.equal(actual, expected, msg) + +Assert that `actual === expected` with an optional description of the assertion `msg`. + +Aliases: `t.equals()`, `t.isEqual()`, `t.is()`, `t.strictEqual()`, +`t.strictEquals()` + +## t.notEqual(actual, expected, msg) + +Assert that `actual !== expected` with an optional description of the assertion `msg`. + +Aliases: `t.notEquals()`, `t.notStrictEqual()`, `t.notStrictEquals()`, +`t.isNotEqual()`, `t.isNot()`, `t.not()`, `t.doesNotEqual()`, `t.isInequal()` + +## t.deepEqual(actual, expected, msg) + +Assert that `actual` and `expected` have the same structure and nested values using +[node's deepEqual() algorithm](https://github.com/substack/node-deep-equal) +with strict comparisons (`===`) on leaf nodes and an optional description of the assertion `msg`. + +Aliases: `t.deepEquals()`, `t.isEquivalent()`, `t.same()` + +## t.notDeepEqual(actual, expected, msg) + +Assert that `actual` and `expected` do not have the same structure and nested values using +[node's deepEqual() algorithm](https://github.com/substack/node-deep-equal) +with strict comparisons (`===`) on leaf nodes and an optional description of the assertion `msg`. + +Aliases: `t.notDeepEquals`, `t.notEquivalent()`, `t.notDeeply()`, `t.notSame()`, +`t.isNotDeepEqual()`, `t.isNotDeeply()`, `t.isNotEquivalent()`, +`t.isInequivalent()` + +## t.deepLooseEqual(actual, expected, msg) + +Assert that `actual` and `expected` have the same structure and nested values using +[node's deepEqual() algorithm](https://github.com/substack/node-deep-equal) +with loose comparisons (`==`) on leaf nodes and an optional description of the assertion `msg`. + +Aliases: `t.looseEqual()`, `t.looseEquals()` + +## t.notDeepLooseEqual(actual, expected, msg) + +Assert that `actual` and `expected` do not have the same structure and nested values using +[node's deepEqual() algorithm](https://github.com/substack/node-deep-equal) +with loose comparisons (`==`) on leaf nodes and an optional description of the assertion `msg`. + +Aliases: `t.notLooseEqual()`, `t.notLooseEquals()` + +## t.throws(fn, expected, msg) + +Assert that the function call `fn()` throws an exception. `expected`, if present, must be a `RegExp` or `Function`. The `RegExp` matches the string representation of the exception, as generated by `err.toString()`. The `Function` is the exception thrown (e.g. `Error`). `msg` is an optional description of the assertion. + +## t.doesNotThrow(fn, expected, msg) + +Assert that the function call `fn()` does not throw an exception. `expected`, if present, limits what should not be thrown. For example, set `expected` to `/user/` to fail the test only if the string representation of the exception contains the word `user`. Any other exception would pass the test. If `expected` is omitted, any exception will fail the test. `msg` is an optional description of the assertion. + +## t.test(name, [opts], cb) + +Create a subtest with a new test handle `st` from `cb(st)` inside the current +test `t`. `cb(st)` will only fire when `t` finishes. Additional tests queued up +after `t` will not be run until all subtests finish. + +You may pass the same options that [`test()`](#testname-opts-cb) accepts. + +## t.comment(message) + +Print a message without breaking the tap output. (Useful when using e.g. `tap-colorize` where output is buffered & `console.log` will print in incorrect order vis-a-vis tap output.) + +## t.match(string, regexp, message) + +Assert that `string` matches the RegExp `regexp`. Will throw (not just fail) when the first two arguments are the wrong type. + +## t.doesNotMatch(string, regexp, message) + +Assert that `string` does not match the RegExp `regexp`. Will throw (not just fail) when the first two arguments are the wrong type. + +## var htest = test.createHarness() + +Create a new test harness instance, which is a function like `test()`, but with +a new pending stack and test state. + +By default the TAP output goes to `console.log()`. You can pipe the output to +someplace else if you `htest.createStream().pipe()` to a destination stream on +the first tick. + +## test.only([name], [opts], cb) + +Like `test([name], [opts], cb)` except if you use `.only` this is the only test case +that will run for the entire process, all other test cases using `tape` will +be ignored. + +## var stream = test.createStream(opts) + +Create a stream of output, bypassing the default output stream that writes +messages to `console.log()`. By default `stream` will be a text stream of TAP +output, but you can get an object stream instead by setting `opts.objectMode` to +`true`. + +### tap stream reporter + +You can create your own custom test reporter using this `createStream()` api: + +``` js +var test = require('tape'); +var path = require('path'); + +test.createStream().pipe(process.stdout); + +process.argv.slice(2).forEach(function (file) { + require(path.resolve(file)); +}); +``` + +You could substitute `process.stdout` for whatever other output stream you want, +like a network connection or a file. + +Pass in test files to run as arguments: + +```sh +$ node tap.js test/x.js test/y.js +TAP version 13 +# (anonymous) +not ok 1 should be equal + --- + operator: equal + expected: "boop" + actual: "beep" + ... +# (anonymous) +ok 2 should be equal +ok 3 (unnamed assert) +# wheee +ok 4 (unnamed assert) + +1..4 +# tests 4 +# pass 3 +# fail 1 +``` + +### object stream reporter + +Here's how you can render an object stream instead of TAP: + +``` js +var test = require('tape'); +var path = require('path'); + +test.createStream({ objectMode: true }).on('data', function (row) { + console.log(JSON.stringify(row)) +}); + +process.argv.slice(2).forEach(function (file) { + require(path.resolve(file)); +}); +``` + +The output for this runner is: + +```sh +$ node object.js test/x.js test/y.js +{"type":"test","name":"(anonymous)","id":0} +{"id":0,"ok":false,"name":"should be equal","operator":"equal","actual":"beep","expected":"boop","error":{},"test":0,"type":"assert"} +{"type":"end","test":0} +{"type":"test","name":"(anonymous)","id":1} +{"id":0,"ok":true,"name":"should be equal","operator":"equal","actual":2,"expected":2,"test":1,"type":"assert"} +{"id":1,"ok":true,"name":"(unnamed assert)","operator":"ok","actual":true,"expected":true,"test":1,"type":"assert"} +{"type":"end","test":1} +{"type":"test","name":"wheee","id":2} +{"id":0,"ok":true,"name":"(unnamed assert)","operator":"ok","actual":true,"expected":true,"test":2,"type":"assert"} +{"type":"end","test":2} +``` + +# install + +With [npm](https://npmjs.org) do: + +```sh +npm install tape --save-dev +``` + +# license + +MIT diff --git a/node_modules/tape/test/add-subtest-async.js b/node_modules/tape/test/add-subtest-async.js new file mode 100644 index 0000000..12b2e2d --- /dev/null +++ b/node_modules/tape/test/add-subtest-async.js @@ -0,0 +1,11 @@ +var test = require('../'); + +test('parent', function (t) { + t.pass('parent'); + setTimeout(function () { + t.test('child', function (st) { + st.pass('child'); + st.end(); + }); + }, 100); +}); diff --git a/node_modules/tape/test/anonymous-fn.js b/node_modules/tape/test/anonymous-fn.js new file mode 100644 index 0000000..3e5da15 --- /dev/null +++ b/node_modules/tape/test/anonymous-fn.js @@ -0,0 +1,43 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; +var testWrapper = require('./anonymous-fn/test-wrapper'); + +tap.test('inside anonymous functions', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + var tc = function (rows) { + var body = stripFullStack(rows.toString('utf8')); + + tt.same(body, [ + 'TAP version 13', + '# wrapped test failure', + 'not ok 1 fail', + ' ---', + ' operator: fail', + ' at: Test. ($TEST/anonymous-fn/test-wrapper.js:$LINE:$COL)', + ' stack: |-', + ' Error: fail', + ' [... stack stripped ...]', + ' at $TEST/anonymous-fn.js:$LINE:$COL', + ' at Test. ($TEST/anonymous-fn/test-wrapper.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '', + '1..1', + '# tests 1', + '# pass 0', + '# fail 1' + ].join('\n') + '\n'); + }; + + test.createStream().pipe(concat(tc)); + + test('wrapped test failure', testWrapper(function (t) { + t.fail('fail'); + t.end(); + })); +}); diff --git a/node_modules/tape/test/anonymous-fn/test-wrapper.js b/node_modules/tape/test/anonymous-fn/test-wrapper.js new file mode 100644 index 0000000..38e8ba1 --- /dev/null +++ b/node_modules/tape/test/anonymous-fn/test-wrapper.js @@ -0,0 +1,16 @@ +// Example of wrapper function that would invoke tape +module.exports = function (testCase) { + return function (t) { + setUp(); + testCase(t); + tearDown(); + }; +}; + +function setUp() { + // ... example ... +} + +function tearDown() { + // ... example ... +} diff --git a/node_modules/tape/test/array.js b/node_modules/tape/test/array.js new file mode 100644 index 0000000..dc4a4a4 --- /dev/null +++ b/node_modules/tape/test/array.js @@ -0,0 +1,61 @@ +var falafel = require('falafel'); +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +tap.test('array test', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + + test.createStream().pipe(concat(function (rows) { + tt.same(rows.toString('utf8'), [ + 'TAP version 13', + '# array', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'ok 5 should be equivalent', + '', + '1..5', + '# tests 5', + '# pass 5', + '', + '# ok' + ].join('\n') + '\n'); + })); + + test('array', function (t) { + t.plan(5); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]); + } + ); + }); +}); diff --git a/node_modules/tape/test/bound.js b/node_modules/tape/test/bound.js new file mode 100644 index 0000000..58f2692 --- /dev/null +++ b/node_modules/tape/test/bound.js @@ -0,0 +1,10 @@ +var test = require('../'); + +test('bind works', function (t) { + t.plan(2); + var equal = t.equal; + var deepEqual = t.deepEqual; + equal(3, 3); + deepEqual([4], [4]); + t.end(); +}); diff --git a/node_modules/tape/test/browser/asserts.js b/node_modules/tape/test/browser/asserts.js new file mode 100644 index 0000000..a1b24f6 --- /dev/null +++ b/node_modules/tape/test/browser/asserts.js @@ -0,0 +1,9 @@ +var test = require('../../'); + +test(function (t) { + t.plan(4); + t.ok(true); + t.equal(3, 1+2); + t.deepEqual([1,2,[3,4]], [1,2,[3,4]]); + t.notDeepEqual([1,2,[3,4,5]], [1,2,[3,4]]); +}); diff --git a/node_modules/tape/test/child_ordering.js b/node_modules/tape/test/child_ordering.js new file mode 100644 index 0000000..08d3c76 --- /dev/null +++ b/node_modules/tape/test/child_ordering.js @@ -0,0 +1,54 @@ +var test = require('../'); + +var childRan = false; + +test('parent', function (t) { + t.test('child', function (t) { + childRan = true; + t.pass('child ran'); + t.end(); + }); + t.end(); +}); + +test('uncle', function (t) { + t.ok(childRan, 'Child should run before next top-level test'); + t.end(); +}); + +var grandParentRan = false; +var parentRan = false; +var grandChildRan = false; +test('grandparent', function (t) { + t.ok(!grandParentRan, 'grand parent ran twice'); + grandParentRan = true; + t.test('parent', function (t) { + t.ok(!parentRan, 'parent ran twice'); + parentRan = true; + t.test('grandchild', function (t) { + t.ok(!grandChildRan, 'grand child ran twice'); + grandChildRan = true; + t.pass('grand child ran'); + t.end(); + }); + t.pass('parent ran'); + t.end(); + }); + t.test('other parent', function (t) { + t.ok(parentRan, 'first parent runs before second parent'); + t.ok(grandChildRan, 'grandchild runs before second parent'); + t.end(); + }); + t.pass('grandparent ran'); + t.end(); +}); + +test('second grandparent', function (t) { + t.ok(grandParentRan, 'grandparent ran'); + t.ok(parentRan, 'parent ran'); + t.ok(grandChildRan, 'grandchild ran'); + t.pass('other grandparent ran'); + t.end(); +}); + +// vim: set softtabstop=4 shiftwidth=4: diff --git a/node_modules/tape/test/circular-things.js b/node_modules/tape/test/circular-things.js new file mode 100644 index 0000000..98e57a3 --- /dev/null +++ b/node_modules/tape/test/circular-things.js @@ -0,0 +1,44 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('circular test', function (assert) { + var test = tape.createHarness({ exit: false }); + assert.plan(1); + + test.createStream().pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# circular\n' + + 'not ok 1 should be equal\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: |-\n' + + ' {}\n' + + ' actual: |-\n' + + ' { circular: [Circular] }\n' + + ' at: Test. ($TEST/circular-things.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should be equal\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/circular-things.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + })); + + test("circular", function (t) { + t.plan(1); + var circular = {}; + circular.circular = circular; + t.equal(circular, {}); + }); +}); diff --git a/node_modules/tape/test/comment.js b/node_modules/tape/test/comment.js new file mode 100644 index 0000000..b3f9bcc --- /dev/null +++ b/node_modules/tape/test/comment.js @@ -0,0 +1,175 @@ +var concat = require('concat-stream'); +var tap = require('tap'); +var tape = require('../'); + +// Exploratory test to ascertain proper output when no t.comment() call +// is made. +tap.test('no comment', function (assert) { + assert.plan(1); + + var verify = function (output) { + assert.equal(output.toString('utf8'), [ + 'TAP version 13', + '# no comment', + '', + '1..0', + '# tests 0', + '# pass 0', + '', + '# ok', + '' + ].join('\n')); + }; + + var test = tape.createHarness(); + test.createStream().pipe(concat(verify)); + test('no comment', function (t) { + t.end(); + }); +}); + +// Exploratory test, can we call t.comment() passing nothing? +tap.test('missing argument', function (assert) { + assert.plan(1); + var test = tape.createHarness(); + test.createStream(); + test('missing argument', function (t) { + try { + t.comment(); + t.end(); + } catch (err) { + assert.equal(err.constructor, TypeError); + } finally { + assert.end(); + } + }); +}); + +// Exploratory test, can we call t.comment() passing nothing? +tap.test('null argument', function (assert) { + assert.plan(1); + var test = tape.createHarness(); + test.createStream(); + test('null argument', function (t) { + try { + t.comment(null); + t.end(); + } catch (err) { + assert.equal(err.constructor, TypeError); + } finally { + assert.end(); + } + }); +}); + + +// Exploratory test, how is whitespace treated? +tap.test('whitespace', function (assert) { + assert.plan(1); + + var verify = function (output) { + assert.equal(output.toString('utf8'), [ + 'TAP version 13', + '# whitespace', + '# ', + '# a', + '# a', + '# a', + '', + '1..0', + '# tests 0', + '# pass 0', + '', + '# ok', + '' + ].join('\n')); + }; + + var test = tape.createHarness(); + test.createStream().pipe(concat(verify)); + test('whitespace', function (t) { + t.comment(' '); + t.comment(' a'); + t.comment('a '); + t.comment(' a '); + t.end(); + }); +}); + +// Exploratory test, how about passing types other than strings? +tap.test('non-string types', function (assert) { + assert.plan(1); + + var verify = function (output) { + assert.equal(output.toString('utf8'), [ + 'TAP version 13', + '# non-string types', + '# true', + '# false', + '# 42', + '# 6.66', + '# [object Object]', + '# [object Object]', + '# [object Object]', + '# function ConstructorFunction() {}', + '', + '1..0', + '# tests 0', + '# pass 0', + '', + '# ok', + '' + ].join('\n')); + }; + + var test = tape.createHarness(); + test.createStream().pipe(concat(verify)); + test('non-string types', function (t) { + t.comment(true); + t.comment(false); + t.comment(42); + t.comment(6.66); + t.comment({}); + t.comment({"answer": 42}); + function ConstructorFunction() {} + t.comment(new ConstructorFunction()); + t.comment(ConstructorFunction); + t.end(); + }); +}); + +tap.test('multiline string', function (assert) { + assert.plan(1); + + var verify = function (output) { + assert.equal(output.toString('utf8'), [ + 'TAP version 13', + '# multiline strings', + '# a', + '# b', + '# c', + '# d', + '', + '1..0', + '# tests 0', + '# pass 0', + '', + '# ok', + '' + ].join('\n')); + }; + + var test = tape.createHarness(); + test.createStream().pipe(concat(verify)); + test('multiline strings', function (t) { + t.comment([ + 'a', + 'b', + ].join('\n')); + t.comment([ + 'c', + 'd', + ].join('\r\n')); + t.end(); + }); +}); diff --git a/node_modules/tape/test/common.js b/node_modules/tape/test/common.js new file mode 100644 index 0000000..ae1681e --- /dev/null +++ b/node_modules/tape/test/common.js @@ -0,0 +1,65 @@ +var path = require('path'); +var yaml = require('js-yaml'); + +module.exports.getDiag = function (body) { + var yamlStart = body.indexOf(' ---'); + var yamlEnd = body.indexOf(' ...\n'); + var diag = body.slice(yamlStart, yamlEnd).split('\n').map(function (line) { + return line.slice(2); + }).join('\n'); + + // The stack trace and at variable will vary depending on where the code + // is run, so just strip it out. + var withStack = yaml.safeLoad(diag); + delete withStack.stack; + delete withStack.at; + return withStack; +}; + +// There are three challenges associated with checking the stack traces included +// in errors: +// 1) The base checkout directory of tape might change. Because stack traces +// include absolute paths, the stack traces will change depending on the +// checkout path. We handle this by replacing the base test directory with a +// placeholder $TEST variable and the package root with a placehodler +// $TAPE variable. +// 2) Line positions within the file might change. We handle this by replacing +// line and column markers with placeholder $LINE and $COL "variables" +// a) node 0.8 does not provide nested eval line numbers, so we remove them +// 3) Stacks themselves change frequently with refactoring. We've even run into +// issues with node library refactorings "breaking" stack traces. Most of +// these changes are irrelevant to the tests themselves. To counter this, we +// strip out all stack frames that aren't directly under our test directory, +// and replace them with placeholders. + +var stripChangingData = function (line) { + var withoutTestDir = line.replace(__dirname, '$TEST'); + var withoutPackageDir = withoutTestDir.replace(path.dirname(__dirname), '$TAPE'); + var withoutPathSep = withoutPackageDir.replace(new RegExp('\\' + path.sep, 'g'), '/'); + var withoutLineNumbers = withoutPathSep.replace(/:\d+:\d+/g, ':$LINE:$COL'); + var withoutNestedLineNumbers = withoutLineNumbers.replace(/, \:\$LINE:\$COL\)$/, ')'); + return withoutNestedLineNumbers; +}; + +module.exports.stripFullStack = function (output) { + var stripped = ' [... stack stripped ...]'; + var withDuplicates = output.split('\n').map(stripChangingData).map(function (line) { + var m = line.match(/[ ]{8}at .*\((.*)\)/); + + if (m && m[1].slice(0, 5) !== '$TEST') { + return stripped; + } + return line; + }); + + var deduped = withDuplicates.filter(function (line, ix) { + var hasPrior = line === stripped && withDuplicates[ix - 1] === stripped; + return !hasPrior; + }); + + return deduped.join('\n').replace( + // Handle stack trace variation in Node v0.8 + /at(:?) Test\.(?:module\.exports|tap\.test\.err\.code)/g, + 'at$1 Test.' + ); +}; diff --git a/node_modules/tape/test/create_multiple_streams.js b/node_modules/tape/test/create_multiple_streams.js new file mode 100644 index 0000000..53f4309 --- /dev/null +++ b/node_modules/tape/test/create_multiple_streams.js @@ -0,0 +1,31 @@ +var tape = require('../'); + +tape.test('createMultipleStreams', function (tt) { + tt.plan(2); + + var th = tape.createHarness(); + th.createStream(); + th.createStream(); + + var testOneComplete = false; + + th('test one', function (tht) { + tht.plan(1); + setTimeout( function () { + tht.pass(); + testOneComplete = true; + }, 100); + }); + + th('test two', function (tht) { + tht.ok(testOneComplete, 'test 1 completed before test 2'); + tht.end(); + }); + + th.onFinish(function () { + tt.equal(th._results.count, 2, "harness test ran"); + tt.equal(th._results.fail, 0, "harness test didn't fail"); + }); +}); + + diff --git a/node_modules/tape/test/deep-equal-failure.js b/node_modules/tape/test/deep-equal-failure.js new file mode 100644 index 0000000..6bf91d8 --- /dev/null +++ b/node_modules/tape/test/deep-equal-failure.js @@ -0,0 +1,191 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); +var tapParser = require('tap-parser'); +var common = require('./common'); + +var getDiag = common.getDiag; +var stripFullStack = common.stripFullStack; + +tap.test('deep equal failure', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# deep equal\n' + + 'not ok 1 should be equal\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: |-\n' + + ' { b: 2 }\n' + + ' actual: |-\n' + + ' { a: 1 }\n' + + ' at: Test. ($TEST/deep-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should be equal\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/deep-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'equal', + expected: '{ b: 2 }', + actual: '{ a: 1 }' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should be equal', + diag: { + operator: 'equal', + expected: '{ b: 2 }', + actual: '{ a: 1 }' + } + }); + }); + + test("deep equal", function (t) { + t.plan(1); + t.equal({a: 1}, {b: 2}); + }); +}); + +tap.test('deep equal failure, depth 6, with option', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# deep equal\n' + + 'not ok 1 should be equal\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: |-\n' + + ' { a: { a1: { a2: { a3: { a4: { a5: 2 } } } } } }\n' + + ' actual: |-\n' + + ' { a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }\n' + + ' at: Test. ($TEST/deep-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should be equal\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/deep-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'equal', + expected: '{ a: { a1: { a2: { a3: { a4: { a5: 2 } } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should be equal', + diag: { + operator: 'equal', + expected: '{ a: { a1: { a2: { a3: { a4: { a5: 2 } } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }' + } + }); + }); + + test("deep equal", {objectPrintDepth: 6}, function (t) { + t.plan(1); + t.equal({ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }, { a: { a1: { a2: { a3: { a4: { a5: 2 } } } } } }); + }); +}); + +tap.test('deep equal failure, depth 6, without option', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# deep equal\n' + + 'not ok 1 should be equal\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: |-\n' + + ' { a: { a1: { a2: { a3: { a4: [Object] } } } } }\n' + + ' actual: |-\n' + + ' { a: { a1: { a2: { a3: { a4: [Object] } } } } }\n' + + ' at: Test. ($TEST/deep-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should be equal\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/deep-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'equal', + expected: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should be equal', + diag: { + operator: 'equal', + expected: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }' + } + }); + }); + + test("deep equal", function (t) { + t.plan(1); + t.equal({ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }, { a: { a1: { a2: { a3: { a4: { a5: 2 } } } } } }); + }); +}); diff --git a/node_modules/tape/test/deep.js b/node_modules/tape/test/deep.js new file mode 100644 index 0000000..909ebe1 --- /dev/null +++ b/node_modules/tape/test/deep.js @@ -0,0 +1,17 @@ +var test = require('../'); + +test('deep strict equal', function (t) { + t.notDeepEqual( + [ { a: '3' } ], + [ { a: 3 } ] + ); + t.end(); +}); + +test('deep loose equal', function (t) { + t.deepLooseEqual( + [ { a: '3' } ], + [ { a: 3 } ] + ); + t.end(); +}); diff --git a/node_modules/tape/test/default-messages.js b/node_modules/tape/test/default-messages.js new file mode 100644 index 0000000..dcfdbff --- /dev/null +++ b/node_modules/tape/test/default-messages.js @@ -0,0 +1,31 @@ +var tap = require('tap'); +var path = require('path'); +var spawn = require('child_process').spawn; +var concat = require('concat-stream'); + +tap.test('default messages', function (t) { + t.plan(1); + + var ps = spawn(process.execPath, [path.join(__dirname, 'messages', 'defaults.js')]); + + ps.stdout.pipe(concat(function (rows) { + + t.same(rows.toString('utf8'), [ + 'TAP version 13', + '# default messages', + 'ok 1 should be truthy', + 'ok 2 should be falsy', + 'ok 3 should be equal', + 'ok 4 should not be equal', + 'ok 5 should be equivalent', + 'ok 6 should be equivalent', + 'ok 7 should be equivalent', + '', + '1..7', + '# tests 7', + '# pass 7', + '', + '# ok' + ].join('\n') + '\n\n'); + })); +}); diff --git a/node_modules/tape/test/double_end.js b/node_modules/tape/test/double_end.js new file mode 100644 index 0000000..b2fbc5b --- /dev/null +++ b/node_modules/tape/test/double_end.js @@ -0,0 +1,60 @@ +var test = require('tap').test; +var path = require('path'); +var concat = require('concat-stream'); +var spawn = require('child_process').spawn; + +var stripFullStack = require('./common').stripFullStack; + +test(function (t) { + t.plan(2); + var ps = spawn(process.execPath, [path.join(__dirname, 'double_end', 'double.js')]); + ps.on('exit', function (code) { + t.equal(code, 1); + }); + ps.stdout.pipe(concat(function (body) { + // The implementation of node's timer library has changed over time. We + // need to reverse engineer the error we expect to see. + + // This code is unfortunately by necessity highly coupled to node + // versions, and may require tweaking with future versions of the timers + // library. + function doEnd() { throw new Error(); }; + var to = setTimeout(doEnd, 5000); + clearTimeout(to); + to._onTimeout = doEnd; + + var stackExpected; + var atExpected; + try { + to._onTimeout(); + } + catch (e) { + stackExpected = stripFullStack(e.stack).split('\n')[1]; + stackExpected = stackExpected.replace('double_end.js', 'double_end/double.js'); + stackExpected = stackExpected.trim(); + atExpected = stackExpected.replace(/^at\s+/, 'at: '); + } + + var stripped = stripFullStack(body.toString('utf8')); + t.equal(stripped, [ + 'TAP version 13', + '# double end', + 'ok 1 should be equal', + 'not ok 2 .end() called twice', + ' ---', + ' operator: fail', + ' ' + atExpected, + ' stack: |-', + ' Error: .end() called twice', + ' [... stack stripped ...]', + ' ' + stackExpected, + ' [... stack stripped ...]', + ' ...', + '', + '1..2', + '# tests 2', + '# pass 1', + '# fail 1', + ].join('\n') + '\n\n'); + })); +}); diff --git a/node_modules/tape/test/double_end/double.js b/node_modules/tape/test/double_end/double.js new file mode 100644 index 0000000..43929e5 --- /dev/null +++ b/node_modules/tape/test/double_end/double.js @@ -0,0 +1,11 @@ +var test = require('../../'); + +test('double end', function (t) { + function doEnd() { + t.end(); + } + + t.equal(1 + 1, 2); + t.end(); + setTimeout(doEnd, 5); +}); diff --git a/node_modules/tape/test/end-as-callback.js b/node_modules/tape/test/end-as-callback.js new file mode 100644 index 0000000..a978688 --- /dev/null +++ b/node_modules/tape/test/end-as-callback.js @@ -0,0 +1,88 @@ +var tap = require("tap"); +var forEach = require("for-each"); +var tape = require("../"); +var concat = require('concat-stream'); + +tap.test("tape assert.end as callback", function (tt) { + var test = tape.createHarness({ exit: false }); + + test.createStream().pipe(concat(function (rows) { + tt.equal(rows.toString('utf8'), [ + 'TAP version 13', + '# do a task and write', + 'ok 1 null', + 'ok 2 should be equal', + '# do a task and write fail', + 'ok 3 null', + 'ok 4 should be equal', + 'not ok 5 Error: fail', + getStackTrace(rows), // tap error stack + '', + '1..5', + '# tests 5', + '# pass 4', + '# fail 1' + ].join('\n') + '\n'); + tt.end(); + })); + + test("do a task and write", function (assert) { + fakeAsyncTask("foo", function (err, value) { + assert.ifError(err); + assert.equal(value, "taskfoo"); + + fakeAsyncWrite("bar", assert.end); + }); + }); + + test("do a task and write fail", function (assert) { + fakeAsyncTask("bar", function (err, value) { + assert.ifError(err); + assert.equal(value, "taskbar"); + + fakeAsyncWriteFail("baz", assert.end); + }); + }); +}); + +function fakeAsyncTask(name, cb) { + cb(null, "task" + name); +} + +function fakeAsyncWrite(name, cb) { + cb(null); +} + +function fakeAsyncWriteFail(name, cb) { + cb(new Error("fail")); +} + +/** + * extract the stack trace for the failed test. + * this will change dependent on the environment + * so no point hard-coding it in the test assertion + * see: https://git.io/v6hGG for example + * @param String rows - the tap output lines + * @returns String stacktrace - just the error stack part + */ +function getStackTrace(rows) { + var stacktrace = ' ---\n'; + var extract = false; + forEach(rows.toString('utf8').split('\n'), function (row) { + if (!extract) { + if (row.indexOf('---') > -1) { // start of stack trace + extract = true; + } + } else { + if (row.indexOf('...') > -1) { // end of stack trace + extract = false; + stacktrace += ' ...'; + } else { + stacktrace += row + '\n'; + } + + } + }); + // console.log(stacktrace); + return stacktrace; +} diff --git a/node_modules/tape/test/exit.js b/node_modules/tape/test/exit.js new file mode 100644 index 0000000..fba56b2 --- /dev/null +++ b/node_modules/tape/test/exit.js @@ -0,0 +1,204 @@ +var tap = require('tap'); +var path = require('path'); +var spawn = require('child_process').spawn; +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('exit ok', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(rows.toString('utf8'), [ + 'TAP version 13', + '# array', + '# hi', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'ok 5 should be equivalent', + '', + '1..5', + '# tests 5', + '# pass 5', + '', + '# ok', + '', // yes, these double-blank-lines at the end are required. + '' // if you can figure out how to remove them, please do! + ].join('\n')); + }; + + var ps = spawn(process.execPath, [path.join(__dirname, 'exit', 'ok.js')]); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.equal(code, 0); + }); +}); + +tap.test('exit fail', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# array', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'not ok 5 should be equivalent', + ' ---', + ' operator: deepEqual', + ' expected: [ [ 1, 2, [ 3, 4444 ] ], [ 5, 6 ] ]', + ' actual: [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]', + ' at: Test. ($TEST/exit/fail.js:$LINE:$COL)', + ' stack: |-', + ' Error: should be equivalent', + ' [... stack stripped ...]', + ' at $TEST/exit/fail.js:$LINE:$COL', + ' at eval (eval at ($TEST/exit/fail.js:$LINE:$COL))', + ' at eval (eval at ($TEST/exit/fail.js:$LINE:$COL))', + ' at Test. ($TEST/exit/fail.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '', + '1..5', + '# tests 5', + '# pass 4', + '# fail 1' + ].join('\n') + '\n\n'); + }; + + var ps = spawn(process.execPath, [path.join(__dirname, 'exit', 'fail.js')]); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.notEqual(code, 0); + }); +}); + +tap.test('too few exit', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# array', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'ok 5 should be equivalent', + 'not ok 6 plan != count', + ' ---', + ' operator: fail', + ' expected: 6', + ' actual: 5', + ' at: process. ($TAPE/index.js:$LINE:$COL)', + ' stack: |-', + ' Error: plan != count', + ' [... stack stripped ...]', + ' ...', + '', + '1..6', + '# tests 6', + '# pass 5', + '# fail 1' + ].join('\n') + '\n\n'); + }; + + var ps = spawn(process.execPath, [path.join(__dirname, '/exit/too_few.js')]); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.notEqual(code, 0); + }); +}); + +tap.test('more planned in a second test', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# first', + 'ok 1 should be truthy', + '# second', + 'ok 2 should be truthy', + 'not ok 3 plan != count', + ' ---', + ' operator: fail', + ' expected: 2', + ' actual: 1', + ' at: process. ($TAPE/index.js:$LINE:$COL)', + ' stack: |-', + ' Error: plan != count', + ' [... stack stripped ...]', + ' ...', + '', + '1..3', + '# tests 3', + '# pass 2', + '# fail 1' + ].join('\n') + '\n\n'); + }; + + var ps = spawn(process.execPath, [path.join(__dirname, '/exit/second.js')]); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.notEqual(code, 0); + }); +}); + +tap.test('todo passing', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# TODO todo pass', + 'ok 1 should be truthy # TODO', + '', + '1..1', + '# tests 1', + '# pass 1', + '', + '# ok' + ].join('\n') + '\n\n'); + }; + + var ps = spawn(process.execPath, [path.join(__dirname, '/exit/todo.js')]); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.equal(code, 0); + }); +}); + +tap.test('todo failing', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# TODO todo fail', + 'not ok 1 should be truthy # TODO', + ' ---', + ' operator: ok', + ' expected: true', + ' actual: false', + ' at: Test. ($TEST/exit/todo_fail.js:$LINE:$COL)', + ' ...', + '', + '1..1', + '# tests 1', + '# pass 1', + '', + '# ok' + ].join('\n') + '\n\n'); + }; + + var ps = spawn(process.execPath, [path.join(__dirname, '/exit/todo_fail.js')]); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.equal(code, 0); + }); +}); diff --git a/node_modules/tape/test/exit/fail.js b/node_modules/tape/test/exit/fail.js new file mode 100644 index 0000000..07a65ca --- /dev/null +++ b/node_modules/tape/test/exit/fail.js @@ -0,0 +1,35 @@ +var test = require('../../'); +var falafel = require('falafel'); + +test('array', function (t) { + t.plan(5); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4444 ] ], [ 5, 6 ] ]); + } + ); +}); diff --git a/node_modules/tape/test/exit/ok.js b/node_modules/tape/test/exit/ok.js new file mode 100644 index 0000000..6d405c7 --- /dev/null +++ b/node_modules/tape/test/exit/ok.js @@ -0,0 +1,36 @@ +var falafel = require('falafel'); +var test = require('../../'); + +test('array', function (t) { + t.comment('hi'); + t.plan(5); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]); + } + ); +}); diff --git a/node_modules/tape/test/exit/second.js b/node_modules/tape/test/exit/second.js new file mode 100644 index 0000000..8a206bb --- /dev/null +++ b/node_modules/tape/test/exit/second.js @@ -0,0 +1,11 @@ +var test = require('../../'); + +test('first', function (t) { + t.plan(1); + t.ok(true); +}); + +test('second', function (t) { + t.plan(2); + t.ok(true); +}); diff --git a/node_modules/tape/test/exit/todo.js b/node_modules/tape/test/exit/todo.js new file mode 100644 index 0000000..acbf960 --- /dev/null +++ b/node_modules/tape/test/exit/todo.js @@ -0,0 +1,6 @@ +var test = require('../../'); + +test('todo pass', { todo: true }, function (t) { + t.plan(1); + t.ok(true); +}); diff --git a/node_modules/tape/test/exit/todo_fail.js b/node_modules/tape/test/exit/todo_fail.js new file mode 100644 index 0000000..f8ffa67 --- /dev/null +++ b/node_modules/tape/test/exit/todo_fail.js @@ -0,0 +1,6 @@ +var test = require('../../'); + +test('todo fail', { todo: true }, function (t) { + t.plan(1); + t.ok(false); +}); diff --git a/node_modules/tape/test/exit/too_few.js b/node_modules/tape/test/exit/too_few.js new file mode 100644 index 0000000..68ba71d --- /dev/null +++ b/node_modules/tape/test/exit/too_few.js @@ -0,0 +1,35 @@ +var falafel = require('falafel'); +var test = require('../../'); + +test('array', function (t) { + t.plan(6); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]); + } + ); +}); diff --git a/node_modules/tape/test/exposed-harness.js b/node_modules/tape/test/exposed-harness.js new file mode 100644 index 0000000..1056ddb --- /dev/null +++ b/node_modules/tape/test/exposed-harness.js @@ -0,0 +1,12 @@ +var tape = require('../'); +var tap = require('tap'); + +tap.test('main harness object is exposed', function (assert) { + + assert.equal(typeof tape.getHarness, 'function', 'tape.getHarness is a function'); + + assert.equal(tape.getHarness()._results.pass, 0); + + assert.end(); + +}); diff --git a/node_modules/tape/test/fail.js b/node_modules/tape/test/fail.js new file mode 100644 index 0000000..9676934 --- /dev/null +++ b/node_modules/tape/test/fail.js @@ -0,0 +1,78 @@ +var falafel = require('falafel'); +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('array test', function (tt) { + tt.plan(1); + + var test = tape.createHarness({ exit: false }); + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# array', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'not ok 5 should be equivalent', + ' ---', + ' operator: deepEqual', + ' expected: [ [ 1, 2, [ 3, 4444 ] ], [ 5, 6 ] ]', + ' actual: [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]', + ' at: Test. ($TEST/fail.js:$LINE:$COL)', + ' stack: |-', + ' Error: should be equivalent', + ' [... stack stripped ...]', + ' at $TEST/fail.js:$LINE:$COL', + ' at eval (eval at ($TEST/fail.js:$LINE:$COL))', + ' at eval (eval at ($TEST/fail.js:$LINE:$COL))', + ' at Test. ($TEST/fail.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '', + '1..5', + '# tests 5', + '# pass 4', + '# fail 1', + '' + ].join('\n')); + }; + + test.createStream().pipe(concat(tc)); + + test('array', function (t) { + t.plan(5); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4444 ] ], [ 5, 6 ] ]); + } + ); + }); +}); diff --git a/node_modules/tape/test/has spaces.js b/node_modules/tape/test/has spaces.js new file mode 100644 index 0000000..f8630de --- /dev/null +++ b/node_modules/tape/test/has spaces.js @@ -0,0 +1,40 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('array test', function (tt) { + tt.plan(1); + + var test = tape.createHarness({ exit: false }); + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# fail', + 'not ok 1 this should fail', + ' ---', + ' operator: fail', + ' at: Test. ($TEST/has spaces.js:$LINE:$COL)', + ' stack: |-', + ' Error: this should fail', + ' [... stack stripped ...]', + ' at Test. ($TEST/has spaces.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '', + '1..1', + '# tests 1', + '# pass 0', + '# fail 1', + '' + ].join('\n')); + }; + + test.createStream().pipe(concat(tc)); + + test('fail', function (t) { + t.fail('this should fail'); + t.end(); + }); +}); diff --git a/node_modules/tape/test/ignore/.ignore b/node_modules/tape/test/ignore/.ignore new file mode 100644 index 0000000..ec1cc29 --- /dev/null +++ b/node_modules/tape/test/ignore/.ignore @@ -0,0 +1 @@ +fake_node_modules diff --git a/node_modules/tape/test/ignore/fake_node_modules/stub1.js b/node_modules/tape/test/ignore/fake_node_modules/stub1.js new file mode 100644 index 0000000..6ef0b71 --- /dev/null +++ b/node_modules/tape/test/ignore/fake_node_modules/stub1.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../../'); + +tape.test(function (t) { + t.plan(1); + t.fail('Should not print'); +}); diff --git a/node_modules/tape/test/ignore/fake_node_modules/stub2.js b/node_modules/tape/test/ignore/fake_node_modules/stub2.js new file mode 100644 index 0000000..27bebd8 --- /dev/null +++ b/node_modules/tape/test/ignore/fake_node_modules/stub2.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../../'); + +tape.test(function (t) { + t.fail('Should not print'); + t.end(); +}); diff --git a/node_modules/tape/test/ignore/test.js b/node_modules/tape/test/ignore/test.js new file mode 100644 index 0000000..f2d6f57 --- /dev/null +++ b/node_modules/tape/test/ignore/test.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../'); + +tape.test(function (t) { + t.plan(1); + t.ok('Okay'); +}); diff --git a/node_modules/tape/test/ignore/test/stub1.js b/node_modules/tape/test/ignore/test/stub1.js new file mode 100644 index 0000000..e91244e --- /dev/null +++ b/node_modules/tape/test/ignore/test/stub1.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../../'); + +tape.test(function (t) { + t.plan(1); + t.pass('test/stub1'); +}); diff --git a/node_modules/tape/test/ignore/test/stub2.js b/node_modules/tape/test/ignore/test/stub2.js new file mode 100644 index 0000000..f4661b8 --- /dev/null +++ b/node_modules/tape/test/ignore/test/stub2.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../../'); + +tape.test(function (t) { + t.pass('test/stub2'); + t.end(); +}); diff --git a/node_modules/tape/test/ignore/test/sub/sub.stub1.js b/node_modules/tape/test/ignore/test/sub/sub.stub1.js new file mode 100644 index 0000000..0f39cfe --- /dev/null +++ b/node_modules/tape/test/ignore/test/sub/sub.stub1.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../../../'); + +tape.test(function (t) { + t.plan(1); + t.pass('test/sub/stub1'); +}); diff --git a/node_modules/tape/test/ignore/test/sub/sub.stub2.js b/node_modules/tape/test/ignore/test/sub/sub.stub2.js new file mode 100644 index 0000000..bec910d --- /dev/null +++ b/node_modules/tape/test/ignore/test/sub/sub.stub2.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../../../'); + +tape.test(function (t) { + t.pass('test/sub/stub2'); + t.end(); +}); diff --git a/node_modules/tape/test/ignore/test2.js b/node_modules/tape/test/ignore/test2.js new file mode 100644 index 0000000..a2ceecc --- /dev/null +++ b/node_modules/tape/test/ignore/test2.js @@ -0,0 +1,8 @@ +'use strict'; + +var tape = require('../../'); + +tape.test(function (t) { + t.pass('Should print'); + t.end(); +}); diff --git a/node_modules/tape/test/ignore_from_gitignore.js b/node_modules/tape/test/ignore_from_gitignore.js new file mode 100644 index 0000000..16b076a --- /dev/null +++ b/node_modules/tape/test/ignore_from_gitignore.js @@ -0,0 +1,122 @@ +'use strict'; + +var tap = require('tap'); +var path = require('path'); +var spawn = require('child_process').spawn; +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +var tapeBin = path.join(process.cwd(), 'bin/tape'); + +tap.test('Should pass with ignoring', { skip: process.platform === 'win32' }, function (tt) { + tt.plan(2); + + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# (anonymous)', + 'ok 1 should be truthy', + '# (anonymous)', + 'ok 2 test/stub1', + '# (anonymous)', + 'ok 3 test/stub2', + '# (anonymous)', + 'ok 4 test/sub/stub1', + '# (anonymous)', + 'ok 5 test/sub/stub2', + '# (anonymous)', + 'ok 6 Should print', + '', + '1..6', + '# tests 6', + '# pass 6', + '', + '# ok', + '', + '' + ].join('\n')); + }; + + var ps = spawn(tapeBin, ['**/*.js', '-i', '.ignore'], {cwd: path.join(__dirname, 'ignore')}); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + tt.equal(code, 0); // code 0 + }); +}); + +tap.test('Should pass', { skip: process.platform === 'win32' }, function (tt) { + tt.plan(2); + + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# (anonymous)', + 'not ok 1 Should not print', + ' ---', + ' operator: fail', + ' at: Test. ($TEST/ignore/fake_node_modules/stub1.js:$LINE:$COL)', + ' stack: |-', + ' Error: Should not print', + ' [... stack stripped ...]', + ' at Test. ($TEST/ignore/fake_node_modules/stub1.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '# (anonymous)', + 'not ok 2 Should not print', + ' ---', + ' operator: fail', + ' at: Test. ($TEST/ignore/fake_node_modules/stub2.js:$LINE:$COL)', + ' stack: |-', + ' Error: Should not print', + ' [... stack stripped ...]', + ' at Test. ($TEST/ignore/fake_node_modules/stub2.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '# (anonymous)', + 'ok 3 should be truthy', + '# (anonymous)', + 'ok 4 test/stub1', + '# (anonymous)', + 'ok 5 test/stub2', + '# (anonymous)', + 'ok 6 test/sub/stub1', + '# (anonymous)', + 'ok 7 test/sub/stub2', + '# (anonymous)', + 'ok 8 Should print', + '', + '1..8', + '# tests 8', + '# pass 6', + '# fail 2', + '', + '' + ].join('\n')); + }; + + var ps = spawn(tapeBin, ['**/*.js'], {cwd: path.join(__dirname, 'ignore')}); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + tt.equal(code, 1); + }); +}); + +tap.test('Should fail when ignore file does not exist', { skip: process.platform === 'win32' }, function (tt) { + tt.plan(3); + + var testStdout = function (rows) { + tt.same(rows.toString('utf8'), ''); + }; + + var testStderr = function (rows) { + tt.ok(/^ENOENT[:,] no such file or directory,? (?:open )?'\$TEST\/ignore\/.gitignore'\n$/m.test(stripFullStack(rows.toString('utf8')))); + }; + + var ps = spawn(tapeBin, ['**/*.js', '-i'], {cwd: path.join(__dirname, 'ignore')}); + ps.stdout.pipe(concat(testStdout)); + ps.stderr.pipe(concat(testStderr)); + ps.on('exit', function (code) { + tt.equal(code, 2); + }); +}); diff --git a/node_modules/tape/test/many.js b/node_modules/tape/test/many.js new file mode 100644 index 0000000..d6e8515 --- /dev/null +++ b/node_modules/tape/test/many.js @@ -0,0 +1,8 @@ +var test = require('../'); + +test('many tests', function (t) { + t.plan(100); + for (var i = 0; i < 100; i++) { + setTimeout(function () { t.pass(); }, Math.random() * 50); + } +}); diff --git a/node_modules/tape/test/match.js b/node_modules/tape/test/match.js new file mode 100644 index 0000000..22d576d --- /dev/null +++ b/node_modules/tape/test/match.js @@ -0,0 +1,173 @@ +'use strict'; + +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('match', function (tt) { + tt.plan(1); + + var test = tape.createHarness({ exit: false }); + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# match', + 'ok 1 regex arg must be a regex', + 'ok 2 string arg must be a string', + 'not ok 3 The input did not match the regular expression /abc/. Input: \'string\'', + ' ---', + ' operator: match', + ' expected: /abc/', + ' actual: \'string\'', + ' at: Test. ($TEST/match.js:$LINE:$COL)', + ' stack: |-', + ' Error: The input did not match the regular expression /abc/. Input: \'string\'', + ' [... stack stripped ...]', + ' at Test. ($TEST/match.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + 'not ok 4 "string" does not match /abc/', + ' ---', + ' operator: match', + ' expected: /abc/', + ' actual: \'string\'', + ' at: Test. ($TEST/match.js:$LINE:$COL)', + ' stack: |-', + ' Error: "string" does not match /abc/', + ' [... stack stripped ...]', + ' at Test. ($TEST/match.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + 'ok 5 The input did not match the regular expression /pass$/. Input: \'I will pass\'', + 'ok 6 "I will pass" matches /pass$/', + '', + '1..6', + '# tests 6', + '# pass 4', + '# fail 2', + '' + ].join('\n')); + }; + + test.createStream().pipe(concat(tc)); + + test('match', function (t) { + t.plan(6); + + t.throws( + function () { t.match(/abc/, 'string'); }, + TypeError, + 'regex arg must be a regex' + ); + + t.throws( + function () { t.match({ abc: 123 }, /abc/); }, + TypeError, + 'string arg must be a string' + ); + + t.match('string', /abc/); + t.match('string', /abc/, '"string" does not match /abc/'); + + t.match('I will pass', /pass$/); + t.match('I will pass', /pass$/, '"I will pass" matches /pass$/'); + + t.end(); + }); +}); + +tap.test('doesNotMatch', function (tt) { + tt.plan(1); + + var test = tape.createHarness({ exit: false }); + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# doesNotMatch', + 'ok 1 regex arg must be a regex', + 'ok 2 string arg must be a string', + 'not ok 3 The input was expected to not match the regular expression /string/. Input: \'string\'', + ' ---', + ' operator: doesNotMatch', + ' expected: /string/', + ' actual: \'string\'', + ' at: Test. ($TEST/match.js:$LINE:$COL)', + ' stack: |-', + ' Error: The input was expected to not match the regular expression /string/. Input: \'string\'', + ' [... stack stripped ...]', + ' at Test. ($TEST/match.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + 'not ok 4 "string" should not match /string/', + ' ---', + ' operator: doesNotMatch', + ' expected: /string/', + ' actual: \'string\'', + ' at: Test. ($TEST/match.js:$LINE:$COL)', + ' stack: |-', + ' Error: "string" should not match /string/', + ' [... stack stripped ...]', + ' at Test. ($TEST/match.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + 'not ok 5 The input was expected to not match the regular expression /pass$/. Input: \'I will pass\'', + ' ---', + ' operator: doesNotMatch', + ' expected: /pass$/', + ' actual: \'I will pass\'', + ' at: Test. ($TEST/match.js:$LINE:$COL)', + ' stack: |-', + ' Error: The input was expected to not match the regular expression /pass$/. Input: \'I will pass\'', + ' [... stack stripped ...]', + ' at Test. ($TEST/match.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + 'not ok 6 "I will pass" should not match /pass$/', + ' ---', + ' operator: doesNotMatch', + ' expected: /pass$/', + ' actual: \'I will pass\'', + ' at: Test. ($TEST/match.js:$LINE:$COL)', + ' stack: |-', + ' Error: "I will pass" should not match /pass$/', + ' [... stack stripped ...]', + ' at Test. ($TEST/match.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + '', + '1..6', + '# tests 6', + '# pass 2', + '# fail 4', + '' + ].join('\n')); + }; + + test.createStream().pipe(concat(tc)); + + test('doesNotMatch', function (t) { + t.plan(6); + + t.throws( + function () { t.doesNotMatch(/abc/, 'string'); }, + TypeError, + 'regex arg must be a regex' + ); + + t.throws( + function () { t.doesNotMatch({ abc: 123 }, /abc/); }, + TypeError, + 'string arg must be a string' + ); + + t.doesNotMatch('string', /string/); + t.doesNotMatch('string', /string/, '"string" should not match /string/'); + + t.doesNotMatch('I will pass', /pass$/); + t.doesNotMatch('I will pass', /pass$/, '"I will pass" should not match /pass$/'); + + t.end(); + }); +}); diff --git a/node_modules/tape/test/max_listeners.js b/node_modules/tape/test/max_listeners.js new file mode 100644 index 0000000..23ebea8 --- /dev/null +++ b/node_modules/tape/test/max_listeners.js @@ -0,0 +1,10 @@ +var spawn = require('child_process').spawn; +var path = require('path'); + +var ps = spawn(process.execPath, [path.join(__dirname, 'max_listeners', 'source.js')]); + +ps.stdout.pipe(process.stdout, { end: false }); + +ps.stderr.on('data', function (buf) { + console.log('not ok ' + buf); +}); diff --git a/node_modules/tape/test/max_listeners/source.js b/node_modules/tape/test/max_listeners/source.js new file mode 100644 index 0000000..2179f97 --- /dev/null +++ b/node_modules/tape/test/max_listeners/source.js @@ -0,0 +1,5 @@ +var test = require('../../'); + +for (var i = 0; i < 11; i ++) { + test(function (t) { t.ok(true, 'true is truthy'); t.end(); }); +} diff --git a/node_modules/tape/test/messages/defaults.js b/node_modules/tape/test/messages/defaults.js new file mode 100644 index 0000000..836a34a --- /dev/null +++ b/node_modules/tape/test/messages/defaults.js @@ -0,0 +1,12 @@ +var test = require('../../'); + +test('default messages', function (t) { + t.plan(7); + t.ok(true); + t.notOk(false); + t.equal(true, true); + t.notEqual(true, false); + t.deepEqual(true, true); + t.deepLooseEqual(true, true); + t.notDeepLooseEqual(true, false); +}); diff --git a/node_modules/tape/test/nested-async-plan-noend.js b/node_modules/tape/test/nested-async-plan-noend.js new file mode 100644 index 0000000..1acc141 --- /dev/null +++ b/node_modules/tape/test/nested-async-plan-noend.js @@ -0,0 +1,36 @@ +var test = require('../'); + +test('Harness async test support', function (t) { + t.plan(3); + + t.ok(true, 'sync child A'); + + t.test('sync child B', function (tt) { + tt.plan(2); + + setTimeout(function () { + tt.test('async grandchild A', function (ttt) { + ttt.plan(1); + ttt.ok(true); + }); + }, 50); + + setTimeout(function () { + tt.test('async grandchild B', function (ttt) { + ttt.plan(1); + ttt.ok(true); + }); + }, 100); + }); + + setTimeout(function () { + t.test('async child', function (tt) { + tt.plan(2); + tt.ok(true, 'sync grandchild in async child A'); + tt.test('sync grandchild in async child B', function (ttt) { + ttt.plan(1); + ttt.ok(true); + }); + }); + }, 200); +}); diff --git a/node_modules/tape/test/nested-sync-noplan-noend.js b/node_modules/tape/test/nested-sync-noplan-noend.js new file mode 100644 index 0000000..c0e5d4a --- /dev/null +++ b/node_modules/tape/test/nested-sync-noplan-noend.js @@ -0,0 +1,43 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +tap.test('nested sync test without plan or end', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + var tc = function (rows) { + tt.same(rows.toString('utf8'), [ + 'TAP version 13', + '# nested without plan or end', + '# first', + 'ok 1 should be truthy', + '# second', + 'ok 2 should be truthy', + '', + '1..2', + '# tests 2', + '# pass 2', + '', + '# ok' + ].join('\n') + '\n'); + }; + + test.createStream().pipe(concat(tc)); + + test('nested without plan or end', function (t) { + t.test('first', function (q) { + setTimeout(function first() { + q.ok(true); + q.end(); + }, 10); + }); + t.test('second', function (q) { + setTimeout(function second() { + q.ok(true); + q.end(); + }, 10); + }); + }); + +}); diff --git a/node_modules/tape/test/nested.js b/node_modules/tape/test/nested.js new file mode 100644 index 0000000..4682259 --- /dev/null +++ b/node_modules/tape/test/nested.js @@ -0,0 +1,83 @@ +var falafel = require('falafel'); +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +tap.test('array test', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + var tc = function (rows) { + tt.same(rows.toString('utf8'), [ + 'TAP version 13', + '# nested array test', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'ok 5 should be equivalent', + '# inside test', + 'ok 6 should be truthy', + 'ok 7 should be truthy', + '# another', + 'ok 8 should be truthy', + '', + '1..8', + '# tests 8', + '# pass 8', + '', + '# ok' + ].join('\n') + '\n'); + }; + + test.createStream().pipe(concat(tc)); + + test('nested array test', function (t) { + t.plan(6); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + t.test('inside test', function (q) { + q.plan(2); + q.ok(true); + + setTimeout(function () { + q.ok(true); + }, 100); + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]); + } + ); + }); + + test('another', function (t) { + t.plan(1); + setTimeout(function () { + t.ok(true); + }, 50); + }); +}); diff --git a/node_modules/tape/test/nested2.js b/node_modules/tape/test/nested2.js new file mode 100644 index 0000000..1612b9b --- /dev/null +++ b/node_modules/tape/test/nested2.js @@ -0,0 +1,19 @@ +var test = require('../'); + +test(function (t) { + var i = 0; + t.test('setup', function (t) { + process.nextTick(function () { + t.equal(i, 0, 'called once'); + i++; + t.end(); + }); + }); + + + t.test('teardown', function (t) { + t.end(); + }); + + t.end(); +}); diff --git a/node_modules/tape/test/no_callback.js b/node_modules/tape/test/no_callback.js new file mode 100644 index 0000000..760ff26 --- /dev/null +++ b/node_modules/tape/test/no_callback.js @@ -0,0 +1,3 @@ +var test = require('../'); + +test('No callback.'); diff --git a/node_modules/tape/test/not-deep-equal-failure.js b/node_modules/tape/test/not-deep-equal-failure.js new file mode 100644 index 0000000..166f7e0 --- /dev/null +++ b/node_modules/tape/test/not-deep-equal-failure.js @@ -0,0 +1,191 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); +var tapParser = require('tap-parser'); +var common = require('./common'); + +var getDiag = common.getDiag; +var stripFullStack = common.stripFullStack; + +tap.test('deep equal failure', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# not deep equal\n' + + 'not ok 1 should not be equivalent\n' + + ' ---\n' + + ' operator: notDeepEqual\n' + + ' expected: |-\n' + + ' { b: 2 }\n' + + ' actual: |-\n' + + ' { b: 2 }\n' + + ' at: Test. ($TEST/not-deep-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should not be equivalent\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/not-deep-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'notDeepEqual', + expected: '{ b: 2 }', + actual: '{ b: 2 }' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should not be equivalent', + diag: { + operator: 'notDeepEqual', + expected: '{ b: 2 }', + actual: '{ b: 2 }' + } + }); + }); + + test("not deep equal", function (t) { + t.plan(1); + t.notDeepEqual({b: 2}, {b: 2}); + }); +}); + +tap.test('not deep equal failure, depth 6, with option', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# not deep equal\n' + + 'not ok 1 should not be equivalent\n' + + ' ---\n' + + ' operator: notDeepEqual\n' + + ' expected: |-\n' + + ' { a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }\n' + + ' actual: |-\n' + + ' { a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }\n' + + ' at: Test. ($TEST/not-deep-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should not be equivalent\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/not-deep-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'notDeepEqual', + expected: '{ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should not be equivalent', + diag: { + operator: 'notDeepEqual', + expected: '{ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }' + } + }); + }); + + test("not deep equal", {objectPrintDepth: 6}, function (t) { + t.plan(1); + t.notDeepEqual({ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }, { a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }); + }); +}); + +tap.test('not deep equal failure, depth 6, without option', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# not deep equal\n' + + 'not ok 1 should not be equivalent\n' + + ' ---\n' + + ' operator: notDeepEqual\n' + + ' expected: |-\n' + + ' { a: { a1: { a2: { a3: { a4: [Object] } } } } }\n' + + ' actual: |-\n' + + ' { a: { a1: { a2: { a3: { a4: [Object] } } } } }\n' + + ' at: Test. ($TEST/not-deep-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should not be equivalent\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/not-deep-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'notDeepEqual', + expected: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should not be equivalent', + diag: { + operator: 'notDeepEqual', + expected: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }', + actual: '{ a: { a1: { a2: { a3: { a4: [Object] } } } } }' + } + }); + }); + + test("not deep equal", function (t) { + t.plan(1); + t.notDeepEqual({ a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }, { a: { a1: { a2: { a3: { a4: { a5: 1 } } } } } }); + }); +}); diff --git a/node_modules/tape/test/not-equal-failure.js b/node_modules/tape/test/not-equal-failure.js new file mode 100644 index 0000000..83fc0a4 --- /dev/null +++ b/node_modules/tape/test/not-equal-failure.js @@ -0,0 +1,67 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); +var tapParser = require('tap-parser'); +var common = require('./common'); + +var getDiag = common.getDiag; +var stripFullStack = common.stripFullStack; + +tap.test('not equal failure', function (assert) { + var test = tape.createHarness({ exit: false }); + var stream = test.createStream(); + var parser = tapParser(); + assert.plan(3); + + stream.pipe(parser); + stream.pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# not equal\n' + + 'not ok 1 should not be equal\n' + + ' ---\n' + + ' operator: notEqual\n' + + ' expected: 2\n' + + ' actual: 2\n' + + ' at: Test. ($TEST/not-equal-failure.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should not be equal\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/not-equal-failure.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + assert.deepEqual(getDiag(body), { + operator: 'notEqual', + expected: '2', + actual: '2' + }); + })); + + parser.once('assert', function (data) { + delete data.diag.stack; + delete data.diag.at; + assert.deepEqual(data, { + ok: false, + id: 1, + name: 'should not be equal', + diag: { + operator: 'notEqual', + expected: '2', + actual: '2' + } + }); + }); + + test("not equal", function (t) { + t.plan(1); + t.notEqual(2, 2); + }); +}); diff --git a/node_modules/tape/test/objectMode.js b/node_modules/tape/test/objectMode.js new file mode 100644 index 0000000..d9e1f6e --- /dev/null +++ b/node_modules/tape/test/objectMode.js @@ -0,0 +1,70 @@ +var tap = require('tap'); +var tape = require('../'); +var forEach = require('for-each'); +var through = require('through'); + +tap.test('object results', function (assert) { + var printer = through({ objectMode: true }); + var objects = []; + + printer.write = function (obj) { + objects.push(obj); + }; + + printer.end = function (obj) { + if (obj) objects.push(obj); + + var todos = 0; + var skips = 0; + var testIds = []; + var endIds = []; + var asserts = 0; + + assert.equal(objects.length, 13); + + forEach(objects, function (obj) { + if (obj.type === 'assert') { + asserts++; + } else if (obj.type === 'test') { + testIds.push(obj.id); + + if (obj.skip) { + skips++; + } else if (obj.todo) { + todos++; + } + } else if (obj.type === 'end') { + endIds.push(obj.text); + // test object should exist + assert.notEqual(testIds.indexOf(obj.test), -1); + } + }); + + assert.equal(asserts, 5); + assert.equal(skips, 1); + assert.equal(todos, 2); + assert.equal(testIds.length, endIds.length); + assert.end(); + }; + + tape.createStream({ objectMode: true }) + .pipe(printer); + + tape('parent', function (t1) { + t1.equal(true, true); + t1.test('child1', {skip: true}, function (t2) { + t2.equal(true, true); + t2.equal(true, false); + t2.end(); + }); + t1.test('child2', {todo: true}, function (t3) { + t3.equal(true, false); + t3.equal(true, true); + t3.end(); + }); + t1.test('child3', {todo: true}); + t1.equal(true, true); + t1.equal(true, true); + t1.end(); + }); +}); diff --git a/node_modules/tape/test/onFailure.js b/node_modules/tape/test/onFailure.js new file mode 100644 index 0000000..ec5d934 --- /dev/null +++ b/node_modules/tape/test/onFailure.js @@ -0,0 +1,21 @@ +var tap = require("tap"); +var tape = require("../").createHarness(); + +//Because this test passing depends on a failure, +//we must direct the failing output of the inner test +var noop = function () {}; +var mockSink = {on: noop, removeListener: noop, emit: noop, end: noop}; +tape.createStream().pipe(mockSink); + +tap.test("on failure", { timeout: 1000 }, function (tt) { + tt.plan(1); + + tape("dummy test", function (t) { + t.fail(); + t.end(); + }); + + tape.onFailure(function () { + tt.pass("tape ended"); + }); +}); diff --git a/node_modules/tape/test/onFinish.js b/node_modules/tape/test/onFinish.js new file mode 100644 index 0000000..80ccf05 --- /dev/null +++ b/node_modules/tape/test/onFinish.js @@ -0,0 +1,12 @@ +var tap = require("tap"); +var tape = require("../"); + +tap.test("on finish", {timeout: 1000}, function (tt) { + tt.plan(1); + tape.onFinish(function () { + tt.pass('tape ended'); + }); + tape('dummy test', function (t) { + t.end(); + }); +}); diff --git a/node_modules/tape/test/only-twice.js b/node_modules/tape/test/only-twice.js new file mode 100644 index 0000000..47c97f5 --- /dev/null +++ b/node_modules/tape/test/only-twice.js @@ -0,0 +1,20 @@ +var tape = require('../'); +var tap = require('tap'); + +tap.test('only twice error', function (assert) { + var test = tape.createHarness({ exit: false }); + + test.only("first only", function (t) { + t.end(); + }); + + assert.throws(function () { + test.only('second only', function (t) { + t.end(); + }); + }, { + name: 'Error', + message: 'there can only be one only test' + }); + assert.end(); +}); diff --git a/node_modules/tape/test/only.js b/node_modules/tape/test/only.js new file mode 100644 index 0000000..bcfc905 --- /dev/null +++ b/node_modules/tape/test/only.js @@ -0,0 +1,45 @@ +var tap = require('tap'); +var tape = require('../'); +var concat = require('concat-stream'); + +tap.test('tape only test', function (tt) { + var test = tape.createHarness({ exit: false }); + var ran = []; + + var tc = function (rows) { + tt.deepEqual(rows.toString('utf8'), [ + 'TAP version 13', + '# run success', + 'ok 1 assert name', + '', + '1..1', + '# tests 1', + '# pass 1', + '', + '# ok' + ].join('\n') + '\n'); + tt.deepEqual(ran, [ 3 ]); + + tt.end(); + }; + + test.createStream().pipe(concat(tc)); + + test("never run fail", function (t) { + ran.push(1); + t.equal(true, false); + t.end(); + }); + + test("never run success", function (t) { + ran.push(2); + t.equal(true, true); + t.end(); + }); + + test.only("run success", function (t) { + ran.push(3); + t.ok(true, "assert name"); + t.end(); + }); +}); diff --git a/node_modules/tape/test/only2.js b/node_modules/tape/test/only2.js new file mode 100644 index 0000000..fcf4f43 --- /dev/null +++ b/node_modules/tape/test/only2.js @@ -0,0 +1,9 @@ +var test = require('../'); + +test('only2 test 1', function (t) { + t.end(); +}); + +test.only('only2 test 2', function (t) { + t.end(); +}); diff --git a/node_modules/tape/test/only3.js b/node_modules/tape/test/only3.js new file mode 100644 index 0000000..b192a4e --- /dev/null +++ b/node_modules/tape/test/only3.js @@ -0,0 +1,15 @@ +var test = require('../'); + +test('only3 test 1', function (t) { + t.fail('not 1'); + t.end(); +}); + +test.only('only3 test 2', function (t) { + t.end(); +}); + +test('only3 test 3', function (t) { + t.fail('not 3'); + t.end(); +}); diff --git a/node_modules/tape/test/only4.js b/node_modules/tape/test/only4.js new file mode 100644 index 0000000..d570b5b --- /dev/null +++ b/node_modules/tape/test/only4.js @@ -0,0 +1,10 @@ +var test = require('../'); + +test('only4 duplicate test name', function (t) { + t.fail('not 1'); + t.end(); +}); + +test.only('only4 duplicate test name', function (t) { + t.end(); +}); diff --git a/node_modules/tape/test/only5.js b/node_modules/tape/test/only5.js new file mode 100644 index 0000000..0e15887 --- /dev/null +++ b/node_modules/tape/test/only5.js @@ -0,0 +1,10 @@ +var test = require('../'); + +test.only('only5 duplicate test name', function (t) { + t.end(); +}); + +test('only5 duplicate test name', function (t) { + t.fail('not 2'); + t.end(); +}); diff --git a/node_modules/tape/test/order.js b/node_modules/tape/test/order.js new file mode 100644 index 0000000..02aaa05 --- /dev/null +++ b/node_modules/tape/test/order.js @@ -0,0 +1,17 @@ +var test = require('../'); +var current = 0; + +test(function (t) { + t.equal(current++, 0); + t.end(); +}); +test(function (t) { + t.plan(1); + setTimeout(function () { + t.equal(current++, 1); + }, 100); +}); +test(function (t) { + t.equal(current++, 2); + t.end(); +}); diff --git a/node_modules/tape/test/plan_optional.js b/node_modules/tape/test/plan_optional.js new file mode 100644 index 0000000..680dbcb --- /dev/null +++ b/node_modules/tape/test/plan_optional.js @@ -0,0 +1,15 @@ +var test = require('../'); + +test('plan should be optional', function (t) { + t.pass('no plan here'); + t.end(); +}); + +test('no plan async', function (t) { + setTimeout(function () { + t.pass('ok'); + t.end(); + }, 100); +}); + +// vim: set softtabstop=4 shiftwidth=4: diff --git a/node_modules/tape/test/require.js b/node_modules/tape/test/require.js new file mode 100644 index 0000000..7420bb7 --- /dev/null +++ b/node_modules/tape/test/require.js @@ -0,0 +1,69 @@ +var tap = require('tap'); +var spawn = require('child_process').spawn; +var concat = require('concat-stream'); + +tap.test('requiring a single module', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(rows.toString('utf8'), [ + 'TAP version 13', + '# module-a', + 'ok 1 loaded module a', + '# test-a', + 'ok 2 module-a loaded in same context', + 'ok 3 test ran after module-a was loaded', + '', + '1..3', + '# tests 3', + '# pass 3', + '', + '# ok' + ].join('\n') + '\n\n'); + }; + + var ps = tape('-r ./require/a require/test-a.js'); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.equal(code, 0); + }); +}); + +tap.test('requiring multiple modules', function (t) { + t.plan(2); + + var tc = function (rows) { + t.same(rows.toString('utf8'), [ + 'TAP version 13', + '# module-a', + 'ok 1 loaded module a', + '# module-b', + 'ok 2 loaded module b', + '# test-a', + 'ok 3 module-a loaded in same context', + 'ok 4 test ran after module-a was loaded', + '# test-b', + 'ok 5 module-b loaded in same context', + 'ok 6 test ran after module-b was loaded', + '', + '1..6', + '# tests 6', + '# pass 6', + '', + '# ok' + ].join('\n') + '\n\n'); + }; + + var ps = tape('-r ./require/a -r ./require/b require/test-a.js require/test-b.js'); + ps.stdout.pipe(concat(tc)); + ps.on('exit', function (code) { + t.equal(code, 0); + }); +}); + +function tape(args) { + var proc = require('child_process'); + var bin = __dirname + '/../bin/tape'; + + return proc.spawn('node', [bin].concat(args.split(' ')), { cwd: __dirname }); +} diff --git a/node_modules/tape/test/require/a.js b/node_modules/tape/test/require/a.js new file mode 100644 index 0000000..86e0296 --- /dev/null +++ b/node_modules/tape/test/require/a.js @@ -0,0 +1,8 @@ +var tape = require('../..'); + +tape.test('module-a', function (t) { + t.plan(1); + t.pass('loaded module a'); +}); + +global.module_a = true; diff --git a/node_modules/tape/test/require/b.js b/node_modules/tape/test/require/b.js new file mode 100644 index 0000000..808fc9e --- /dev/null +++ b/node_modules/tape/test/require/b.js @@ -0,0 +1,8 @@ +var tape = require('../..'); + +tape.test('module-b', function (t) { + t.plan(1); + t.pass('loaded module b'); +}); + +global.module_b = true; diff --git a/node_modules/tape/test/require/test-a.js b/node_modules/tape/test/require/test-a.js new file mode 100644 index 0000000..03d5a84 --- /dev/null +++ b/node_modules/tape/test/require/test-a.js @@ -0,0 +1,7 @@ +var tape = require('../..'); + +tape.test('test-a', function (t) { + t.ok(global.module_a, 'module-a loaded in same context'); + t.pass('test ran after module-a was loaded'); + t.end(); +}); diff --git a/node_modules/tape/test/require/test-b.js b/node_modules/tape/test/require/test-b.js new file mode 100644 index 0000000..5069b55 --- /dev/null +++ b/node_modules/tape/test/require/test-b.js @@ -0,0 +1,7 @@ +var tape = require('../..'); + +tape.test('test-b', function (t) { + t.ok(global.module_b, 'module-b loaded in same context'); + t.pass('test ran after module-b was loaded'); + t.end(); +}); diff --git a/node_modules/tape/test/skip.js b/node_modules/tape/test/skip.js new file mode 100644 index 0000000..234c75a --- /dev/null +++ b/node_modules/tape/test/skip.js @@ -0,0 +1,52 @@ +var test = require('../'); +var ran = 0; + +var concat = require('concat-stream'); +var tap = require('tap'); + +tap.test('test SKIP comment', function (assert) { + assert.plan(1); + + var verify = function (output) { + assert.equal(output.toString('utf8'), [ + 'TAP version 13', + '# SKIP skipped', + '', + '1..0', + '# tests 0', + '# pass 0', + '', + '# ok', + '' + ].join('\n')); + }; + + var tapeTest = test.createHarness(); + tapeTest.createStream().pipe(concat(verify)); + tapeTest('skipped', { skip: true }, function (t) { + t.end(); + }); +}); + +test('skip this', { skip: true }, function (t) { + t.fail('this should not even run'); + ran++; + t.end(); +}); + +test.skip('skip this too', function (t) { + t.fail('this should not even run'); + ran++; + t.end(); +}); + +test('skip subtest', function (t) { + ran++; + t.test('skip this', { skip: true }, function (t) { + t.fail('this should not even run'); + t.end(); + }); + t.end(); +}); + +// vim: set softtabstop=4 shiftwidth=4: diff --git a/node_modules/tape/test/skip_explanation.js b/node_modules/tape/test/skip_explanation.js new file mode 100644 index 0000000..09928ff --- /dev/null +++ b/node_modules/tape/test/skip_explanation.js @@ -0,0 +1,82 @@ +var tap = require('tap'); +var test = require('../'); +var concat = require('concat-stream'); +var stripFullStack = require('./common').stripFullStack; + +tap.test('test skip explanations', function (assert) { + assert.plan(1); + + var verify = function (output) { + assert.equal(stripFullStack(output.toString('utf8')), [ + 'TAP version 13', + '# SKIP (this skips)', + '# some tests might skip', + 'ok 1 this runs', + 'ok 2 failing assert is skipped # SKIP', + 'ok 3 this runs', + '# incomplete test', + 'ok 4 run sh', + 'ok 5 run openssl # SKIP', + '# incomplete test with explanation', + 'ok 6 run sh (conditional skip) # SKIP', + 'ok 7 run openssl # SKIP can\'t run on windows platforms', + 'ok 8 this runs', + '# too much explanation', + 'ok 9 run openssl # SKIP Installer cannot work on windows and fails to add to PATH Err: (2401) denied', + '', + '1..9', + '# tests 9', + '# pass 9', + '', + '# ok', + '' + ].join('\n')); + }; + + var tapeTest = test.createHarness(); + tapeTest.createStream().pipe(concat(verify)); + + tapeTest('(this skips)', { skip: true }, function (t) { + t.fail('doesn\'t run'); + t.fail('this doesn\'t run too', { skip: false }); + t.end(); + }); + + tapeTest('some tests might skip', function (t) { + t.pass('this runs'); + t.fail('failing assert is skipped', { skip: true }); + t.pass('this runs'); + t.end(); + }); + + tapeTest('incomplete test', function (t) { + // var platform = process.platform; something like this needed + var platform = 'win32'; + + t.pass('run sh', { skip: platform !== 'win32' }); + t.pass('run openssl', { skip: platform === 'win32' }); + t.end(); + }); + + tapeTest('incomplete test with explanation', function (t) { + // var platform = process.platform; something like this needed + var platform = 'win32'; + + t.fail('run sh (conditional skip)', { skip: platform === 'win32' }); + t.fail('run openssl', { skip: platform === 'win32' && 'can\'t run on windows platforms' }); + t.pass('this runs'); + t.end(); + }); + + tapeTest('too much explanation', function (t) { + // var platform = process.platform; something like this needed + var platform = 'win32'; + + t.fail('run openssl', + { skip: platform === 'win32' && 'Installer cannot work on windows\nand fails to add to PATH\n\n Err: (2401) denied' } + ); + t.end(); + }); +}); + +// vim: set softtabstop=4 shiftwidth=4: diff --git a/node_modules/tape/test/stackTrace.js b/node_modules/tape/test/stackTrace.js new file mode 100644 index 0000000..e7f8d4b --- /dev/null +++ b/node_modules/tape/test/stackTrace.js @@ -0,0 +1,253 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); +var tapParser = require('tap-parser'); +var yaml = require('js-yaml'); + +tap.test('preserves stack trace with newlines', function (tt) { + tt.plan(3); + + var test = tape.createHarness(); + var stream = test.createStream(); + var parser = stream.pipe(tapParser()); + var stackTrace = 'foo\n bar'; + + parser.once('assert', function (data) { + delete data.diag.at; + tt.deepEqual(data, { + ok: false, + id: 1, + name: "Error: Preserve stack", + diag: { + stack: stackTrace, + operator: 'error', + expected: 'undefined', + actual: '[Error: Preserve stack]' + } + }); + }); + + stream.pipe(concat(function (body) { + var body = body.toString('utf8'); + body = stripAt(body); + tt.equal( + body, + 'TAP version 13\n' + + '# multiline stack trace\n' + + 'not ok 1 Error: Preserve stack\n' + + ' ---\n' + + ' operator: error\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + ' [Error: Preserve stack]\n' + + ' stack: |-\n' + + ' foo\n' + + ' bar\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + tt.deepEqual(getDiag(body), { + stack: stackTrace, + operator: 'error', + expected: 'undefined', + actual: '[Error: Preserve stack]' + }); + })); + + test('multiline stack trace', function (t) { + t.plan(1); + var err = new Error('Preserve stack'); + err.stack = stackTrace; + t.error(err); + }); +}); + +tap.test('parses function name from original stack', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + test.createStream(); + + test._results._watch = function (t) { + t.on('result', function (res) { + tt.equal('Test.testFunctionNameParsing', res.functionName); + }); + }; + + test('t.equal stack trace', function testFunctionNameParsing(t) { + t.equal(true, false, 'true should be false'); + t.end(); + }); +}); + +tap.test('parses function name from original stack for anonymous function', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + test.createStream(); + + test._results._watch = function (t) { + t.on('result', function (res) { + tt.equal('Test.', res.functionName); + }); + }; + + test('t.equal stack trace', function (t) { + t.equal(true, false, 'true should be false'); + t.end(); + }); +}); + +tap.test('preserves stack trace for failed assertions', function (tt) { + tt.plan(6); + + var test = tape.createHarness(); + var stream = test.createStream(); + var parser = stream.pipe(tapParser()); + + var stack = ''; + parser.once('assert', function (data) { + tt.equal(typeof data.diag.at, 'string'); + tt.equal(typeof data.diag.stack, 'string'); + at = data.diag.at || ''; + stack = data.diag.stack || ''; + tt.ok(/^Error: true should be false(\n at .+)+/.exec(stack), 'stack should be a stack'); + tt.deepEqual(data, { + ok: false, + id: 1, + name: "true should be false", + diag: { + at: at, + stack: stack, + operator: 'equal', + expected: false, + actual: true + } + }); + }); + + stream.pipe(concat(function (body) { + var body = body.toString('utf8'); + body = stripAt(body); + tt.equal( + body, + 'TAP version 13\n' + + '# t.equal stack trace\n' + + 'not ok 1 true should be false\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: false\n' + + ' actual: true\n' + + ' stack: |-\n' + + ' ' + + stack.replace(/\n/g, '\n ') + '\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + tt.deepEqual(getDiag(body), { + stack: stack, + operator: 'equal', + expected: false, + actual: true + }); + })); + + test('t.equal stack trace', function (t) { + t.plan(1); + t.equal(true, false, 'true should be false'); + }); +}); + +tap.test('preserves stack trace for failed assertions where actual===falsy', function (tt) { + tt.plan(6); + + var test = tape.createHarness(); + var stream = test.createStream(); + var parser = stream.pipe(tapParser()); + + var stack = ''; + parser.once('assert', function (data) { + tt.equal(typeof data.diag.at, 'string'); + tt.equal(typeof data.diag.stack, 'string'); + at = data.diag.at || ''; + stack = data.diag.stack || ''; + tt.ok(/^Error: false should be true(\n at .+)+/.exec(stack), 'stack should be a stack'); + tt.deepEqual(data, { + ok: false, + id: 1, + name: "false should be true", + diag: { + at: at, + stack: stack, + operator: 'equal', + expected: true, + actual: false + } + }); + }); + + stream.pipe(concat(function (body) { + var body = body.toString('utf8'); + body = stripAt(body); + tt.equal( + body, + 'TAP version 13\n' + + '# t.equal stack trace\n' + + 'not ok 1 false should be true\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: true\n' + + ' actual: false\n' + + ' stack: |-\n' + + ' ' + + stack.replace(/\n/g, '\n ') + '\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + + tt.deepEqual(getDiag(body), { + stack: stack, + operator: 'equal', + expected: true, + actual: false + }); + })); + + test('t.equal stack trace', function (t) { + t.plan(1); + t.equal(false, true, 'false should be true'); + }); +}); + +function getDiag(body) { + var yamlStart = body.indexOf(' ---'); + var yamlEnd = body.indexOf(' ...\n'); + var diag = body.slice(yamlStart, yamlEnd).split('\n').map(function (line) { + return line.slice(2); + }).join('\n'); + + // Get rid of 'at' variable (which has a line number / path of its own that's + // difficult to check). + var withStack = yaml.safeLoad(diag); + delete withStack.at; + return withStack; +} + +function stripAt(body) { + return body.replace(/^\s*at:\s+Test.*$\n/m, ''); +} diff --git a/node_modules/tape/test/subcount.js b/node_modules/tape/test/subcount.js new file mode 100644 index 0000000..8985e6d --- /dev/null +++ b/node_modules/tape/test/subcount.js @@ -0,0 +1,14 @@ +var test = require('../'); + +test('parent test', function (t) { + t.plan(2); + t.test('first child', function (t) { + t.plan(1); + t.pass('pass first child'); + }); + + t.test(function (t) { + t.plan(1); + t.pass('pass second child'); + }); +}); diff --git a/node_modules/tape/test/subtest_and_async.js b/node_modules/tape/test/subtest_and_async.js new file mode 100644 index 0000000..5d92285 --- /dev/null +++ b/node_modules/tape/test/subtest_and_async.js @@ -0,0 +1,25 @@ +var test = require('../'); + +var asyncFunction = function (callback) { + setTimeout(callback, Math.random * 50); +}; + +test('master test', function (t) { + t.test('subtest 1', function (st) { + st.pass('subtest 1 before async call'); + asyncFunction(function () { + st.pass('subtest 1 in async callback'); + st.end(); + }); + }); + + t.test('subtest 2', function (st) { + st.pass('subtest 2 before async call'); + asyncFunction(function () { + st.pass('subtest 2 in async callback'); + st.end(); + }); + }); + + t.end(); +}); diff --git a/node_modules/tape/test/subtest_plan.js b/node_modules/tape/test/subtest_plan.js new file mode 100644 index 0000000..e0f906e --- /dev/null +++ b/node_modules/tape/test/subtest_plan.js @@ -0,0 +1,21 @@ +var test = require('../'); + +test('parent', function (t) { + t.plan(3); + + var firstChildRan = false; + + t.pass('assertion in parent'); + + t.test('first child', function (t) { + t.plan(1); + t.pass('pass first child'); + firstChildRan = true; + }); + + t.test('second child', function (t) { + t.plan(2); + t.ok(firstChildRan, 'first child ran first'); + t.pass('pass second child'); + }); +}); diff --git a/node_modules/tape/test/throws.js b/node_modules/tape/test/throws.js new file mode 100644 index 0000000..a919b4e --- /dev/null +++ b/node_modules/tape/test/throws.js @@ -0,0 +1,224 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +function fn() { + throw new TypeError('RegExp'); +} + +function getNonFunctionMessage(fn) { + try { + fn(); + } catch (e) { + return e.message; + } +} + +var getter = function () { return 'message'; }; +var messageGetterError = Object.defineProperty( + { custom: 'error' }, + 'message', + { configurable: true, enumerable: true, get: getter } +); +var thrower = function () { throw messageGetterError; }; + +tap.test('failures', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + test.createStream().pipe(concat(function (body) { + tt.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# non functions\n' + + 'not ok 1 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage() + "] message: '" + getNonFunctionMessage() + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage(undefined) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 2 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage(null) + "] message: '" + getNonFunctionMessage(null) + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage(null) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 3 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage(true) + "] message: '" + getNonFunctionMessage(true) + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage(true) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 4 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage(false) + "] message: '" + getNonFunctionMessage(false) + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage(false) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 5 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage('abc') + "] message: '" + getNonFunctionMessage('abc') + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage('abc') + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 6 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage(/a/g) + "] message: '" + getNonFunctionMessage(/a/g) + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage(/a/g) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 7 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage([]) + "] message: '" + getNonFunctionMessage([]) + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage([]) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + 'not ok 8 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' undefined\n' + + ' actual: |-\n' + + " { [TypeError: " + getNonFunctionMessage({}) + "] message: '" + getNonFunctionMessage({}) + "' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' TypeError: ' + getNonFunctionMessage({}) + '\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '# function\n' + + 'not ok 9 should throw\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: undefined\n' + + ' actual: undefined\n' + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should throw\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '# custom error messages\n' + + 'ok 10 "message" is enumerable\n' + + "ok 11 { custom: 'error', message: 'message' }\n" + + 'ok 12 getter is still the same\n' + + '# throws null\n' + + 'ok 13 throws null\n' + + '# wrong type of error\n' + + 'not ok 14 throws actual\n' + + ' ---\n' + + ' operator: throws\n' + + ' expected: |-\n' + + ' [Function: TypeError]\n' + + ' actual: |-\n' + + " { [RangeError: actual!] message: 'actual!' }\n" + + ' at: Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' RangeError: actual!\n' + + ' at Test. ($TEST/throws.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n1..14\n' + + '# tests 14\n' + + '# pass 4\n' + + '# fail 10\n' + ); + })); + + test('non functions', function (t) { + t.plan(8); + t.throws(); + t.throws(null); + t.throws(true); + t.throws(false); + t.throws('abc'); + t.throws(/a/g); + t.throws([]); + t.throws({}); + }); + + test('function', function (t) { + t.plan(1); + t.throws(function () {}); + }); + + test('custom error messages', function (t) { + t.plan(3); + t.equal(Object.prototype.propertyIsEnumerable.call(messageGetterError, 'message'), true, '"message" is enumerable'); + t.throws(thrower, "{ custom: 'error', message: 'message' }"); + t.equal(Object.getOwnPropertyDescriptor(messageGetterError, 'message').get, getter, 'getter is still the same'); + }); + + test('throws null', function (t) { + t.plan(1); + t.throws(function () { throw null; }, 'throws null'); + t.end(); + }); + + test('wrong type of error', function (t) { + t.plan(1); + var actual = new RangeError('actual!'); + t.throws(function () { throw actual; }, TypeError, 'throws actual'); + t.end(); + }); +}); diff --git a/node_modules/tape/test/timeout.js b/node_modules/tape/test/timeout.js new file mode 100644 index 0000000..b74b11a --- /dev/null +++ b/node_modules/tape/test/timeout.js @@ -0,0 +1,15 @@ +var test = require('../'); +var ran = 0; + +test('timeout', function (t) { + t.pass('this should run'); + ran++; + setTimeout(function () { + t.end(); + }, 100); +}); + +test('should still run', { timeout: 50 }, function (t) { + t.equal(ran, 1); + t.end(); +}); diff --git a/node_modules/tape/test/timeoutAfter.js b/node_modules/tape/test/timeoutAfter.js new file mode 100644 index 0000000..80752d2 --- /dev/null +++ b/node_modules/tape/test/timeoutAfter.js @@ -0,0 +1,36 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('timeoutAfter test', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# timeoutAfter', + 'not ok 1 test timed out after 1ms', + ' ---', + ' operator: fail', + ' stack: |-', + ' Error: test timed out after 1ms', + ' [... stack stripped ...]', + ' ...', + '', + '1..1', + '# tests 1', + '# pass 0', + '# fail 1' + ].join('\n') + '\n'); + }; + + test.createStream().pipe(concat(tc)); + + test('timeoutAfter', function (t) { + t.plan(1); + t.timeoutAfter(1); + }); +}); diff --git a/node_modules/tape/test/todo.js b/node_modules/tape/test/todo.js new file mode 100644 index 0000000..73a8f6a --- /dev/null +++ b/node_modules/tape/test/todo.js @@ -0,0 +1,42 @@ +var tap = require('tap'); +var tape = require('../'); +var concat = require('concat-stream'); + +var common = require('./common'); +var stripFullStack = common.stripFullStack; + +tap.test('tape todo test', function (assert) { + var test = tape.createHarness({ exit: false }); + assert.plan(1); + + test.createStream().pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# success\n' + + 'ok 1 this test runs\n' + + '# TODO failure\n' + + 'not ok 2 should never happen # TODO\n' + + ' ---\n' + + ' operator: fail\n' + + ' at: Test. ($TEST/todo.js:$LINE:$COL)\n' + + ' ...\n' + + '\n' + + '1..2\n' + + '# tests 2\n' + + '# pass 2\n' + + '\n' + + '# ok\n' + ); + })); + + test('success', function (t) { + t.equal(true, true, 'this test runs'); + t.end(); + }); + + test('failure', { todo: true }, function (t) { + t.fail('should never happen'); + t.end(); + }); +}); diff --git a/node_modules/tape/test/todo_explanation.js b/node_modules/tape/test/todo_explanation.js new file mode 100644 index 0000000..5269b08 --- /dev/null +++ b/node_modules/tape/test/todo_explanation.js @@ -0,0 +1,70 @@ +var tap = require('tap'); +var tape = require('../'); +var concat = require('concat-stream'); + +var common = require('./common'); +var stripFullStack = common.stripFullStack; + +tap.test('tape todo test', { todo: process.versions.node.match(/0\.8\.\d+/) ? 'Fails on node 0.8': false }, function (assert) { + var test = tape.createHarness({ exit: false }); + assert.plan(1); + + test.createStream().pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), [ + 'TAP version 13', + '# success', + 'ok 1 this test runs', + '# TODO incomplete test1', + 'not ok 2 check output # TODO', + ' ---', + ' operator: equal', + ' expected: false', + ' actual: true', + ' at: Test. ($TEST/todo_explanation.js:$LINE:$COL)', + ' ...', + 'not ok 3 check vars output # TODO name conflict', + ' ---', + ' operator: equal', + ' expected: 0', + ' actual: 1', + ' at: Test. ($TEST/todo_explanation.js:$LINE:$COL)', + ' ...', + '# incomplete test2', + 'not ok 4 run openssl # TODO installer needs fix', + ' ---', + ' operator: fail', + ' at: Test. ($TEST/todo_explanation.js:$LINE:$COL)', + ' ...', + '# TODO passing test', + '', + '1..4', + '# tests 4', + '# pass 4', + '', + '# ok', + '' + ].join('\n') + ); + })); + + test('success', function (t) { + t.equal(true, true, 'this test runs'); + t.end(); + }); + + test('incomplete test1', { todo: true }, function (t) { + t.equal(true, false, 'check output'); + t.equal(1, 0, 'check vars output', { todo: 'name conflict' }); + t.end(); + }); + + test('incomplete test2', function (t) { + t.fail('run openssl', { todo: 'installer needs fix' }); + t.end(); + }); + + test('passing test', { todo: 'yet incomplete' }, function (t) { + t.end(); + }); +}); diff --git a/node_modules/tape/test/todo_single.js b/node_modules/tape/test/todo_single.js new file mode 100644 index 0000000..48cd287 --- /dev/null +++ b/node_modules/tape/test/todo_single.js @@ -0,0 +1,37 @@ +var tap = require('tap'); +var tape = require('../'); +var concat = require('concat-stream'); + +var common = require('./common'); +var stripFullStack = common.stripFullStack; + +tap.test('tape todo test', function (assert) { + var test = tape.createHarness({ exit: false }); + assert.plan(1); + + test.createStream().pipe(concat(function (body) { + assert.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# TODO failure\n' + + 'not ok 1 should be equal # TODO\n' + + ' ---\n' + + ' operator: equal\n' + + ' expected: false\n' + + ' actual: true\n' + + ' at: Test. ($TEST/todo_single.js:$LINE:$COL)\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 1\n' + + '\n' + + '# ok\n' + ); + })); + + test('failure', { todo: true }, function (t) { + t.equal(true, false); + t.end(); + }); +}); diff --git a/node_modules/tape/test/too_many.js b/node_modules/tape/test/too_many.js new file mode 100644 index 0000000..a3a780a --- /dev/null +++ b/node_modules/tape/test/too_many.js @@ -0,0 +1,78 @@ +var falafel = require('falafel'); +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('array test', function (tt) { + tt.plan(1); + + var test = tape.createHarness({ exit: false }); + var tc = function (rows) { + tt.same(stripFullStack(rows.toString('utf8')), [ + 'TAP version 13', + '# array', + 'ok 1 should be equivalent', + 'ok 2 should be equivalent', + 'ok 3 should be equivalent', + 'ok 4 should be equivalent', + 'not ok 5 plan != count', + ' ---', + ' operator: fail', + ' expected: 3', + ' actual: 4', + ' at: Test. ($TEST/too_many.js:$LINE:$COL)', + ' stack: |-', + ' Error: plan != count', + ' [... stack stripped ...]', + ' at $TEST/too_many.js:$LINE:$COL', + ' at eval (eval at ($TEST/too_many.js:$LINE:$COL))', + ' at eval (eval at ($TEST/too_many.js:$LINE:$COL))', + ' at Test. ($TEST/too_many.js:$LINE:$COL)', + ' [... stack stripped ...]', + ' ...', + 'ok 6 should be equivalent', + '', + '1..6', + '# tests 6', + '# pass 5', + '# fail 1' + ].join('\n') + '\n'); + }; + + test.createStream().pipe(concat(tc)); + + test('array', function (t) { + t.plan(3); + + var src = '(' + function () { + var xs = [ 1, 2, [ 3, 4 ] ]; + var ys = [ 5, 6 ]; + g([ xs, ys ]); + } + ')()'; + + var output = falafel(src, function (node) { + if (node.type === 'ArrayExpression') { + node.update('fn(' + node.source() + ')'); + } + }); + + var arrays = [ + [ 3, 4 ], + [ 1, 2, [ 3, 4 ] ], + [ 5, 6 ], + [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ], + ]; + + Function(['fn','g'], output)( + function (xs) { + t.same(arrays.shift(), xs); + return xs; + }, + function (xs) { + t.same(xs, [ [ 1, 2, [ 3, 4 ] ], [ 5, 6 ] ]); + } + ); + }); +}); diff --git a/node_modules/tape/test/undef.js b/node_modules/tape/test/undef.js new file mode 100644 index 0000000..52759ac --- /dev/null +++ b/node_modules/tape/test/undef.js @@ -0,0 +1,42 @@ +var tape = require('../'); +var tap = require('tap'); +var concat = require('concat-stream'); + +var stripFullStack = require('./common').stripFullStack; + +tap.test('array test', function (tt) { + tt.plan(1); + + var test = tape.createHarness(); + test.createStream().pipe(concat(function (body) { + tt.equal( + stripFullStack(body.toString('utf8')), + 'TAP version 13\n' + + '# undef\n' + + 'not ok 1 should be equivalent\n' + + ' ---\n' + + ' operator: deepEqual\n' + + ' expected: |-\n' + + ' { beep: undefined }\n' + + ' actual: |-\n' + + ' {}\n' + + ' at: Test. ($TEST/undef.js:$LINE:$COL)\n' + + ' stack: |-\n' + + ' Error: should be equivalent\n' + + ' [... stack stripped ...]\n' + + ' at Test. ($TEST/undef.js:$LINE:$COL)\n' + + ' [... stack stripped ...]\n' + + ' ...\n' + + '\n' + + '1..1\n' + + '# tests 1\n' + + '# pass 0\n' + + '# fail 1\n' + ); + })); + + test('undef', function (t) { + t.plan(1); + t.deepEqual({}, { beep: undefined }); + }); +}); diff --git a/node_modules/through/.travis.yml b/node_modules/through/.travis.yml new file mode 100644 index 0000000..c693a93 --- /dev/null +++ b/node_modules/through/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - 0.6 + - 0.8 + - "0.10" diff --git a/node_modules/through/LICENSE.APACHE2 b/node_modules/through/LICENSE.APACHE2 new file mode 100644 index 0000000..6366c04 --- /dev/null +++ b/node_modules/through/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/through/LICENSE.MIT b/node_modules/through/LICENSE.MIT new file mode 100644 index 0000000..6eafbd7 --- /dev/null +++ b/node_modules/through/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/through/index.js b/node_modules/through/index.js new file mode 100644 index 0000000..ca5fc59 --- /dev/null +++ b/node_modules/through/index.js @@ -0,0 +1,108 @@ +var Stream = require('stream') + +// through +// +// a stream that does nothing but re-emit the input. +// useful for aggregating a series of changing but not ending streams into one stream) + +exports = module.exports = through +through.through = through + +//create a readable writable stream. + +function through (write, end, opts) { + write = write || function (data) { this.queue(data) } + end = end || function () { this.queue(null) } + + var ended = false, destroyed = false, buffer = [], _ended = false + var stream = new Stream() + stream.readable = stream.writable = true + stream.paused = false + +// stream.autoPause = !(opts && opts.autoPause === false) + stream.autoDestroy = !(opts && opts.autoDestroy === false) + + stream.write = function (data) { + write.call(this, data) + return !stream.paused + } + + function drain() { + while(buffer.length && !stream.paused) { + var data = buffer.shift() + if(null === data) + return stream.emit('end') + else + stream.emit('data', data) + } + } + + stream.queue = stream.push = function (data) { +// console.error(ended) + if(_ended) return stream + if(data === null) _ended = true + buffer.push(data) + drain() + return stream + } + + //this will be registered as the first 'end' listener + //must call destroy next tick, to make sure we're after any + //stream piped from here. + //this is only a problem if end is not emitted synchronously. + //a nicer way to do this is to make sure this is the last listener for 'end' + + stream.on('end', function () { + stream.readable = false + if(!stream.writable && stream.autoDestroy) + process.nextTick(function () { + stream.destroy() + }) + }) + + function _end () { + stream.writable = false + end.call(stream) + if(!stream.readable && stream.autoDestroy) + stream.destroy() + } + + stream.end = function (data) { + if(ended) return + ended = true + if(arguments.length) stream.write(data) + _end() // will emit or queue + return stream + } + + stream.destroy = function () { + if(destroyed) return + destroyed = true + ended = true + buffer.length = 0 + stream.writable = stream.readable = false + stream.emit('close') + return stream + } + + stream.pause = function () { + if(stream.paused) return + stream.paused = true + return stream + } + + stream.resume = function () { + if(stream.paused) { + stream.paused = false + stream.emit('resume') + } + drain() + //may have become paused again, + //as drain emits 'data'. + if(!stream.paused) + stream.emit('drain') + return stream + } + return stream +} + diff --git a/node_modules/through/package.json b/node_modules/through/package.json new file mode 100644 index 0000000..96b493a --- /dev/null +++ b/node_modules/through/package.json @@ -0,0 +1,70 @@ +{ + "_from": "through@2", + "_id": "through@2.3.8", + "_inBundle": false, + "_integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "_location": "/through", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "through@2", + "name": "through", + "escapedName": "through", + "rawSpec": "2", + "saveSpec": null, + "fetchSpec": "2" + }, + "_requiredBy": [ + "/resumer", + "/split", + "/tape" + ], + "_resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "_shasum": "0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5", + "_spec": "through@2", + "_where": "/home/kareml/Desktop/SMS/node_modules/split", + "author": { + "name": "Dominic Tarr", + "email": "dominic.tarr@gmail.com", + "url": "dominictarr.com" + }, + "bugs": { + "url": "https://github.com/dominictarr/through/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "simplified stream construction", + "devDependencies": { + "from": "~0.1.3", + "stream-spec": "~0.3.5", + "tape": "~2.3.2" + }, + "homepage": "https://github.com/dominictarr/through", + "keywords": [ + "stream", + "streams", + "user-streams", + "pipe" + ], + "license": "MIT", + "main": "index.js", + "name": "through", + "repository": { + "type": "git", + "url": "git+https://github.com/dominictarr/through.git" + }, + "scripts": { + "test": "set -e; for t in test/*.js; do node $t; done" + }, + "testling": { + "browsers": [ + "ie/8..latest", + "ff/15..latest", + "chrome/20..latest", + "safari/5.1..latest" + ], + "files": "test/*.js" + }, + "version": "2.3.8" +} diff --git a/node_modules/through/readme.markdown b/node_modules/through/readme.markdown new file mode 100644 index 0000000..cb34c81 --- /dev/null +++ b/node_modules/through/readme.markdown @@ -0,0 +1,64 @@ +#through + +[![build status](https://secure.travis-ci.org/dominictarr/through.png)](http://travis-ci.org/dominictarr/through) +[![testling badge](https://ci.testling.com/dominictarr/through.png)](https://ci.testling.com/dominictarr/through) + +Easy way to create a `Stream` that is both `readable` and `writable`. + +* Pass in optional `write` and `end` methods. +* `through` takes care of pause/resume logic if you use `this.queue(data)` instead of `this.emit('data', data)`. +* Use `this.pause()` and `this.resume()` to manage flow. +* Check `this.paused` to see current flow state. (`write` always returns `!this.paused`). + +This function is the basis for most of the synchronous streams in +[event-stream](http://github.com/dominictarr/event-stream). + +``` js +var through = require('through') + +through(function write(data) { + this.queue(data) //data *must* not be null + }, + function end () { //optional + this.queue(null) + }) +``` + +Or, can also be used _without_ buffering on pause, use `this.emit('data', data)`, +and this.emit('end') + +``` js +var through = require('through') + +through(function write(data) { + this.emit('data', data) + //this.pause() + }, + function end () { //optional + this.emit('end') + }) +``` + +## Extended Options + +You will probably not need these 99% of the time. + +### autoDestroy=false + +By default, `through` emits close when the writable +and readable side of the stream has ended. +If that is not desired, set `autoDestroy=false`. + +``` js +var through = require('through') + +//like this +var ts = through(write, end, {autoDestroy: false}) +//or like this +var ts = through(write, end) +ts.autoDestroy = false +``` + +## License + +MIT / Apache2 diff --git a/node_modules/through/test/async.js b/node_modules/through/test/async.js new file mode 100644 index 0000000..46bdbae --- /dev/null +++ b/node_modules/through/test/async.js @@ -0,0 +1,28 @@ +var from = require('from') +var through = require('../') + +var tape = require('tape') + +tape('simple async example', function (t) { + + var n = 0, expected = [1,2,3,4,5], actual = [] + from(expected) + .pipe(through(function(data) { + this.pause() + n ++ + setTimeout(function(){ + console.log('pushing data', data) + this.push(data) + this.resume() + }.bind(this), 300) + })).pipe(through(function(data) { + console.log('pushing data second time', data); + this.push(data) + })).on('data', function (d) { + actual.push(d) + }).on('end', function() { + t.deepEqual(actual, expected) + t.end() + }) + +}) diff --git a/node_modules/through/test/auto-destroy.js b/node_modules/through/test/auto-destroy.js new file mode 100644 index 0000000..9a8fd00 --- /dev/null +++ b/node_modules/through/test/auto-destroy.js @@ -0,0 +1,30 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('end before close', function (assert) { + var ts = through() + ts.autoDestroy = false + var ended = false, closed = false + + ts.on('end', function () { + assert.ok(!closed) + ended = true + }) + ts.on('close', function () { + assert.ok(ended) + closed = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.ok(ended) + assert.notOk(closed) + ts.destroy() + assert.ok(closed) + assert.end() +}) + diff --git a/node_modules/through/test/buffering.js b/node_modules/through/test/buffering.js new file mode 100644 index 0000000..b0084bf --- /dev/null +++ b/node_modules/through/test/buffering.js @@ -0,0 +1,71 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('buffering', function(assert) { + var ts = through(function (data) { + this.queue(data) + }, function () { + this.queue(null) + }) + + var ended = false, actual = [] + + ts.on('data', actual.push.bind(actual)) + ts.on('end', function () { + ended = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + assert.deepEqual(actual, [1, 2, 3]) + ts.pause() + ts.write(4) + ts.write(5) + ts.write(6) + assert.deepEqual(actual, [1, 2, 3]) + ts.resume() + assert.deepEqual(actual, [1, 2, 3, 4, 5, 6]) + ts.pause() + ts.end() + assert.ok(!ended) + ts.resume() + assert.ok(ended) + assert.end() +}) + +test('buffering has data in queue, when ends', function (assert) { + + /* + * If stream ends while paused with data in the queue, + * stream should still emit end after all data is written + * on resume. + */ + + var ts = through(function (data) { + this.queue(data) + }, function () { + this.queue(null) + }) + + var ended = false, actual = [] + + ts.on('data', actual.push.bind(actual)) + ts.on('end', function () { + ended = true + }) + + ts.pause() + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.deepEqual(actual, [], 'no data written yet, still paused') + assert.ok(!ended, 'end not emitted yet, still paused') + ts.resume() + assert.deepEqual(actual, [1, 2, 3], 'resumed, all data should be delivered') + assert.ok(ended, 'end should be emitted once all data was delivered') + assert.end(); +}) diff --git a/node_modules/through/test/end.js b/node_modules/through/test/end.js new file mode 100644 index 0000000..fa113f5 --- /dev/null +++ b/node_modules/through/test/end.js @@ -0,0 +1,45 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('end before close', function (assert) { + var ts = through() + var ended = false, closed = false + + ts.on('end', function () { + assert.ok(!closed) + ended = true + }) + ts.on('close', function () { + assert.ok(ended) + closed = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.ok(ended) + assert.ok(closed) + assert.end() +}) + +test('end only once', function (t) { + + var ts = through() + var ended = false, closed = false + + ts.on('end', function () { + t.equal(ended, false) + ended = true + }) + + ts.queue(null) + ts.queue(null) + ts.queue(null) + + ts.resume() + + t.end() +}) diff --git a/node_modules/through/test/index.js b/node_modules/through/test/index.js new file mode 100644 index 0000000..96da82f --- /dev/null +++ b/node_modules/through/test/index.js @@ -0,0 +1,133 @@ + +var test = require('tape') +var spec = require('stream-spec') +var through = require('../') + +/* + I'm using these two functions, and not streams and pipe + so there is less to break. if this test fails it must be + the implementation of _through_ +*/ + +function write(array, stream) { + array = array.slice() + function next() { + while(array.length) + if(stream.write(array.shift()) === false) + return stream.once('drain', next) + + stream.end() + } + + next() +} + +function read(stream, callback) { + var actual = [] + stream.on('data', function (data) { + actual.push(data) + }) + stream.once('end', function () { + callback(null, actual) + }) + stream.once('error', function (err) { + callback(err) + }) +} + +test('simple defaults', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l * Math.random()) + + var t = through() + var s = spec(t).through().pausable() + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected) + assert.end() + }) + + t.on('close', s.validate) + + write(expected, t) +}); + +test('simple functions', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l * Math.random()) + + var t = through(function (data) { + this.emit('data', data*2) + }) + var s = spec(t).through().pausable() + + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected.map(function (data) { + return data*2 + })) + assert.end() + }) + + t.on('close', s.validate) + + write(expected, t) +}) + +test('pauses', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l) //Math.random()) + + var t = through() + + var s = spec(t) + .through() + .pausable() + + t.on('data', function () { + if(Math.random() > 0.1) return + t.pause() + process.nextTick(function () { + t.resume() + }) + }) + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected) + }) + + t.on('close', function () { + s.validate() + assert.end() + }) + + write(expected, t) +}) + +test('does not soft-end on `undefined`', function(assert) { + var stream = through() + , count = 0 + + stream.on('data', function (data) { + count++ + }) + + stream.write(undefined) + stream.write(undefined) + + assert.equal(count, 2) + + assert.end() +}) diff --git a/node_modules/through2/LICENSE.md b/node_modules/through2/LICENSE.md new file mode 100644 index 0000000..a2429b6 --- /dev/null +++ b/node_modules/through2/LICENSE.md @@ -0,0 +1,9 @@ +# The MIT License (MIT) + +**Copyright (c) Rod Vagg (the "Original Author") and additional contributors** + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/through2/README.md b/node_modules/through2/README.md new file mode 100644 index 0000000..b5e44c7 --- /dev/null +++ b/node_modules/through2/README.md @@ -0,0 +1,134 @@ +# through2 + +[![NPM](https://nodei.co/npm/through2.png?downloads&downloadRank)](https://nodei.co/npm/through2/) + +**A tiny wrapper around Node streams.Transform (Streams2/3) to avoid explicit subclassing noise** + +Inspired by [Dominic Tarr](https://github.com/dominictarr)'s [through](https://github.com/dominictarr/through) in that it's so much easier to make a stream out of a function than it is to set up the prototype chain properly: `through(function (chunk) { ... })`. + +Note: As 2.x.x this module starts using **Streams3** instead of Stream2. To continue using a Streams2 version use `npm install through2@0` to fetch the latest version of 0.x.x. More information about Streams2 vs Streams3 and recommendations see the article **[Why I don't use Node's core 'stream' module](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html)**. + +```js +fs.createReadStream('ex.txt') + .pipe(through2(function (chunk, enc, callback) { + for (var i = 0; i < chunk.length; i++) + if (chunk[i] == 97) + chunk[i] = 122 // swap 'a' for 'z' + + this.push(chunk) + + callback() + })) + .pipe(fs.createWriteStream('out.txt')) + .on('finish', () => doSomethingSpecial()) +``` + +Or object streams: + +```js +var all = [] + +fs.createReadStream('data.csv') + .pipe(csv2()) + .pipe(through2.obj(function (chunk, enc, callback) { + var data = { + name : chunk[0] + , address : chunk[3] + , phone : chunk[10] + } + this.push(data) + + callback() + })) + .on('data', (data) => { + all.push(data) + }) + .on('end', () => { + doSomethingSpecial(all) + }) +``` + +Note that `through2.obj(fn)` is a convenience wrapper around `through2({ objectMode: true }, fn)`. + +## API + +through2([ options, ] [ transformFunction ] [, flushFunction ]) + +Consult the **[stream.Transform](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_transform)** documentation for the exact rules of the `transformFunction` (i.e. `this._transform`) and the optional `flushFunction` (i.e. `this._flush`). + +### options + +The options argument is optional and is passed straight through to `stream.Transform`. So you can use `objectMode:true` if you are processing non-binary streams (or just use `through2.obj()`). + +The `options` argument is first, unlike standard convention, because if I'm passing in an anonymous function then I'd prefer for the options argument to not get lost at the end of the call: + +```js +fs.createReadStream('/tmp/important.dat') + .pipe(through2({ objectMode: true, allowHalfOpen: false }, + (chunk, enc, cb) => { + cb(null, 'wut?') // note we can use the second argument on the callback + // to provide data as an alternative to this.push('wut?') + } + ) + .pipe(fs.createWriteStream('/tmp/wut.txt')) +``` + +### transformFunction + +The `transformFunction` must have the following signature: `function (chunk, encoding, callback) {}`. A minimal implementation should call the `callback` function to indicate that the transformation is done, even if that transformation means discarding the chunk. + +To queue a new chunk, call `this.push(chunk)`—this can be called as many times as required before the `callback()` if you have multiple pieces to send on. + +Alternatively, you may use `callback(err, chunk)` as shorthand for emitting a single chunk or an error. + +If you **do not provide a `transformFunction`** then you will get a simple pass-through stream. + +### flushFunction + +The optional `flushFunction` is provided as the last argument (2nd or 3rd, depending on whether you've supplied options) is called just prior to the stream ending. Can be used to finish up any processing that may be in progress. + +```js +fs.createReadStream('/tmp/important.dat') + .pipe(through2( + (chunk, enc, cb) => cb(null, chunk), // transform is a noop + function (cb) { // flush function + this.push('tacking on an extra buffer to the end'); + cb(); + } + )) + .pipe(fs.createWriteStream('/tmp/wut.txt')); +``` + +through2.ctor([ options, ] transformFunction[, flushFunction ]) + +Instead of returning a `stream.Transform` instance, `through2.ctor()` returns a **constructor** for a custom Transform. This is useful when you want to use the same transform logic in multiple instances. + +```js +var FToC = through2.ctor({objectMode: true}, function (record, encoding, callback) { + if (record.temp != null && record.unit == "F") { + record.temp = ( ( record.temp - 32 ) * 5 ) / 9 + record.unit = "C" + } + this.push(record) + callback() +}) + +// Create instances of FToC like so: +var converter = new FToC() +// Or: +var converter = FToC() +// Or specify/override options when you instantiate, if you prefer: +var converter = FToC({objectMode: true}) +``` + +## See Also + + - [through2-map](https://github.com/brycebaril/through2-map) - Array.prototype.map analog for streams. + - [through2-filter](https://github.com/brycebaril/through2-filter) - Array.prototype.filter analog for streams. + - [through2-reduce](https://github.com/brycebaril/through2-reduce) - Array.prototype.reduce analog for streams. + - [through2-spy](https://github.com/brycebaril/through2-spy) - Wrapper for simple stream.PassThrough spies. + - the [mississippi stream utility collection](https://github.com/maxogden/mississippi) includes `through2` as well as many more useful stream modules similar to this one + +## License + +**through2** is Copyright (c) Rod Vagg [@rvagg](https://twitter.com/rvagg) and additional contributors and licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. diff --git a/node_modules/through2/node_modules/process-nextick-args/index.js b/node_modules/through2/node_modules/process-nextick-args/index.js new file mode 100644 index 0000000..3eecf11 --- /dev/null +++ b/node_modules/through2/node_modules/process-nextick-args/index.js @@ -0,0 +1,45 @@ +'use strict'; + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/node_modules/through2/node_modules/process-nextick-args/license.md b/node_modules/through2/node_modules/process-nextick-args/license.md new file mode 100644 index 0000000..c67e353 --- /dev/null +++ b/node_modules/through2/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/through2/node_modules/process-nextick-args/package.json b/node_modules/through2/node_modules/process-nextick-args/package.json new file mode 100644 index 0000000..88321f0 --- /dev/null +++ b/node_modules/through2/node_modules/process-nextick-args/package.json @@ -0,0 +1,50 @@ +{ + "_from": "process-nextick-args@~2.0.0", + "_id": "process-nextick-args@2.0.1", + "_inBundle": false, + "_integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "_location": "/through2/process-nextick-args", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "process-nextick-args@~2.0.0", + "name": "process-nextick-args", + "escapedName": "process-nextick-args", + "rawSpec": "~2.0.0", + "saveSpec": null, + "fetchSpec": "~2.0.0" + }, + "_requiredBy": [ + "/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "_shasum": "7820d9b16120cc55ca9ae7792680ae7dba6d7fe2", + "_spec": "process-nextick-args@~2.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/through2/node_modules/readable-stream", + "author": "", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "process.nextTick but always with args", + "devDependencies": { + "tap": "~0.2.6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "license": "MIT", + "main": "index.js", + "name": "process-nextick-args", + "repository": { + "type": "git", + "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "2.0.1" +} diff --git a/node_modules/through2/node_modules/process-nextick-args/readme.md b/node_modules/through2/node_modules/process-nextick-args/readme.md new file mode 100644 index 0000000..ecb432c --- /dev/null +++ b/node_modules/through2/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/through2/node_modules/readable-stream/.travis.yml b/node_modules/through2/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000..f62cdac --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/.travis.yml @@ -0,0 +1,34 @@ +sudo: false +language: node_js +before_install: + - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: NPM_LEGACY=true + - node_js: '0.10' + env: NPM_LEGACY=true + - node_js: '0.11' + env: NPM_LEGACY=true + - node_js: '0.12' + env: NPM_LEGACY=true + - node_js: 1 + env: NPM_LEGACY=true + - node_js: 2 + env: NPM_LEGACY=true + - node_js: 3 + env: NPM_LEGACY=true + - node_js: 4 + - node_js: 5 + - node_js: 6 + - node_js: 7 + - node_js: 8 + - node_js: 9 +script: "npm run test" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/through2/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/through2/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/through2/node_modules/readable-stream/GOVERNANCE.md b/node_modules/through2/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/through2/node_modules/readable-stream/LICENSE b/node_modules/through2/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/through2/node_modules/readable-stream/README.md b/node_modules/through2/node_modules/readable-stream/README.md new file mode 100644 index 0000000..23fe3f3 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +
+## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/through2/node_modules/readable-stream/duplex-browser.js b/node_modules/through2/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000..f8b2db8 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/through2/node_modules/readable-stream/duplex.js b/node_modules/through2/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..46924cb --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..57003c3 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..612edb4 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..0f80764 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..fcfc105 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..b0b0220 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000..aefc68b --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..5a0a0d8 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/through2/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/through2/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/through2/node_modules/readable-stream/package.json b/node_modules/through2/node_modules/readable-stream/package.json new file mode 100644 index 0000000..e950af9 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@~2.3.6", + "_id": "readable-stream@2.3.7", + "_inBundle": false, + "_integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "_location": "/through2/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@~2.3.6", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "~2.3.6", + "saveSpec": null, + "fetchSpec": "~2.3.6" + }, + "_requiredBy": [ + "/through2" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "_shasum": "1eca1cf711aef814c04f62252a36a62f6cb23b57", + "_spec": "readable-stream@~2.3.6", + "_where": "/home/kareml/Desktop/SMS/node_modules/through2", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.7" +} diff --git a/node_modules/through2/node_modules/readable-stream/passthrough.js b/node_modules/through2/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..ffd791d --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/through2/node_modules/readable-stream/readable-browser.js b/node_modules/through2/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..e503725 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/through2/node_modules/readable-stream/readable.js b/node_modules/through2/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..ec89ec5 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/through2/node_modules/readable-stream/transform.js b/node_modules/through2/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..b1baba2 --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/through2/node_modules/readable-stream/writable-browser.js b/node_modules/through2/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000..ebdde6a --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/through2/node_modules/readable-stream/writable.js b/node_modules/through2/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..3211a6f --- /dev/null +++ b/node_modules/through2/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/through2/node_modules/string_decoder/.travis.yml b/node_modules/through2/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000..3347a72 --- /dev/null +++ b/node_modules/through2/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/through2/node_modules/string_decoder/LICENSE b/node_modules/through2/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/through2/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/through2/node_modules/string_decoder/README.md b/node_modules/through2/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/through2/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/through2/node_modules/string_decoder/lib/string_decoder.js b/node_modules/through2/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/through2/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/through2/node_modules/string_decoder/package.json b/node_modules/through2/node_modules/string_decoder/package.json new file mode 100644 index 0000000..3732bdf --- /dev/null +++ b/node_modules/through2/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/through2/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/through2/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/through2/package.json b/node_modules/through2/package.json new file mode 100644 index 0000000..1702bf0 --- /dev/null +++ b/node_modules/through2/package.json @@ -0,0 +1,72 @@ +{ + "_from": "through2@^2.0.0", + "_id": "through2@2.0.5", + "_inBundle": false, + "_integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "_location": "/through2", + "_phantomChildren": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "1.0.0", + "safe-buffer": "5.1.2", + "util-deprecate": "1.0.2" + }, + "_requested": { + "type": "range", + "registry": true, + "raw": "through2@^2.0.0", + "name": "through2", + "escapedName": "through2", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/tap-spec" + ], + "_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "_shasum": "01c1e39eb31d07cb7d03a96a70823260b23132cd", + "_spec": "through2@^2.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/tap-spec", + "author": { + "name": "Rod Vagg", + "email": "r@va.gg", + "url": "https://github.com/rvagg" + }, + "bugs": { + "url": "https://github.com/rvagg/through2/issues" + }, + "bundleDependencies": false, + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + }, + "deprecated": false, + "description": "A tiny wrapper around Node streams2 Transform to avoid explicit subclassing noise", + "devDependencies": { + "bl": "~2.0.1", + "faucet": "0.0.1", + "nyc": "~13.1.0", + "safe-buffer": "~5.1.2", + "stream-spigot": "~3.0.6", + "tape": "~4.9.1" + }, + "homepage": "https://github.com/rvagg/through2#readme", + "keywords": [ + "stream", + "streams2", + "through", + "transform" + ], + "license": "MIT", + "main": "through2.js", + "name": "through2", + "repository": { + "type": "git", + "url": "git+https://github.com/rvagg/through2.git" + }, + "scripts": { + "test": "node test/test.js | faucet" + }, + "version": "2.0.5" +} diff --git a/node_modules/through2/through2.js b/node_modules/through2/through2.js new file mode 100644 index 0000000..6baa6a1 --- /dev/null +++ b/node_modules/through2/through2.js @@ -0,0 +1,96 @@ +var Transform = require('readable-stream').Transform + , inherits = require('util').inherits + , xtend = require('xtend') + +function DestroyableTransform(opts) { + Transform.call(this, opts) + this._destroyed = false +} + +inherits(DestroyableTransform, Transform) + +DestroyableTransform.prototype.destroy = function(err) { + if (this._destroyed) return + this._destroyed = true + + var self = this + process.nextTick(function() { + if (err) + self.emit('error', err) + self.emit('close') + }) +} + +// a noop _transform function +function noop (chunk, enc, callback) { + callback(null, chunk) +} + + +// create a new export function, used by both the main export and +// the .ctor export, contains common logic for dealing with arguments +function through2 (construct) { + return function (options, transform, flush) { + if (typeof options == 'function') { + flush = transform + transform = options + options = {} + } + + if (typeof transform != 'function') + transform = noop + + if (typeof flush != 'function') + flush = null + + return construct(options, transform, flush) + } +} + + +// main export, just make me a transform stream! +module.exports = through2(function (options, transform, flush) { + var t2 = new DestroyableTransform(options) + + t2._transform = transform + + if (flush) + t2._flush = flush + + return t2 +}) + + +// make me a reusable prototype that I can `new`, or implicitly `new` +// with a constructor call +module.exports.ctor = through2(function (options, transform, flush) { + function Through2 (override) { + if (!(this instanceof Through2)) + return new Through2(override) + + this.options = xtend(options, override) + + DestroyableTransform.call(this, this.options) + } + + inherits(Through2, DestroyableTransform) + + Through2.prototype._transform = transform + + if (flush) + Through2.prototype._flush = flush + + return Through2 +}) + + +module.exports.obj = through2(function (options, transform, flush) { + var t2 = new DestroyableTransform(xtend({ objectMode: true, highWaterMark: 16 }, options)) + + t2._transform = transform + + if (flush) + t2._flush = flush + + return t2 +}) diff --git a/node_modules/toidentifier/LICENSE b/node_modules/toidentifier/LICENSE new file mode 100644 index 0000000..de22d15 --- /dev/null +++ b/node_modules/toidentifier/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/toidentifier/README.md b/node_modules/toidentifier/README.md new file mode 100644 index 0000000..7c8794e --- /dev/null +++ b/node_modules/toidentifier/README.md @@ -0,0 +1,61 @@ +# toidentifier + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][codecov-image]][codecov-url] + +> Convert a string of words to a JavaScript identifier + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install toidentifier +``` + +## Example + +```js +var toIdentifier = require('toidentifier') + +console.log(toIdentifier('Bad Request')) +// => "BadRequest" +``` + +## API + +This CommonJS module exports a single default function: `toIdentifier`. + +### toIdentifier(string) + +Given a string as the argument, it will be transformed according to +the following rules and the new string will be returned: + +1. Split into words separated by space characters (`0x20`). +2. Upper case the first character of each word. +3. Join the words together with no separator. +4. Remove all non-word (`[0-9a-z_]`) characters. + +## License + +[MIT](LICENSE) + +[codecov-image]: https://img.shields.io/codecov/c/github/component/toidentifier.svg +[codecov-url]: https://codecov.io/gh/component/toidentifier +[downloads-image]: https://img.shields.io/npm/dm/toidentifier.svg +[downloads-url]: https://npmjs.org/package/toidentifier +[npm-image]: https://img.shields.io/npm/v/toidentifier.svg +[npm-url]: https://npmjs.org/package/toidentifier +[travis-image]: https://img.shields.io/travis/component/toidentifier/master.svg +[travis-url]: https://travis-ci.org/component/toidentifier + + +## + +[npm]: https://www.npmjs.com/ + +[yarn]: https://yarnpkg.com/ diff --git a/node_modules/toidentifier/index.js b/node_modules/toidentifier/index.js new file mode 100644 index 0000000..bba5411 --- /dev/null +++ b/node_modules/toidentifier/index.js @@ -0,0 +1,30 @@ +/*! + * toidentifier + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + * @public + */ + +module.exports = toIdentifier + +/** + * Trasform the given string into a JavaScript identifier + * + * @param {string} str + * @returns {string} + * @public + */ + +function toIdentifier (str) { + return str + .split(' ') + .map(function (token) { + return token.slice(0, 1).toUpperCase() + token.slice(1) + }) + .join('') + .replace(/[^ _0-9a-z]/gi, '') +} diff --git a/node_modules/toidentifier/package.json b/node_modules/toidentifier/package.json new file mode 100644 index 0000000..efc7fc1 --- /dev/null +++ b/node_modules/toidentifier/package.json @@ -0,0 +1,76 @@ +{ + "_from": "toidentifier@1.0.0", + "_id": "toidentifier@1.0.0", + "_inBundle": false, + "_integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", + "_location": "/toidentifier", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "toidentifier@1.0.0", + "name": "toidentifier", + "escapedName": "toidentifier", + "rawSpec": "1.0.0", + "saveSpec": null, + "fetchSpec": "1.0.0" + }, + "_requiredBy": [ + "/http-errors" + ], + "_resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "_shasum": "7e1be3470f1e77948bc43d94a3c8f4d7752ba553", + "_spec": "toidentifier@1.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/http-errors", + "author": { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + "bugs": { + "url": "https://github.com/component/toidentifier/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Nick Baugh", + "email": "niftylettuce@gmail.com", + "url": "http://niftylettuce.com/" + } + ], + "deprecated": false, + "description": "Convert a string of words to a JavaScript identifier", + "devDependencies": { + "eslint": "4.19.1", + "eslint-config-standard": "11.0.0", + "eslint-plugin-import": "2.11.0", + "eslint-plugin-markdown": "1.0.0-beta.6", + "eslint-plugin-node": "6.0.1", + "eslint-plugin-promise": "3.7.0", + "eslint-plugin-standard": "3.1.0", + "mocha": "1.21.5", + "nyc": "11.8.0" + }, + "engines": { + "node": ">=0.6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/component/toidentifier#readme", + "license": "MIT", + "name": "toidentifier", + "repository": { + "type": "git", + "url": "git+https://github.com/component/toidentifier.git" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "version": "1.0.0" +} diff --git a/node_modules/trim/.npmignore b/node_modules/trim/.npmignore new file mode 100644 index 0000000..f1250e5 --- /dev/null +++ b/node_modules/trim/.npmignore @@ -0,0 +1,4 @@ +support +test +examples +*.sock diff --git a/node_modules/trim/History.md b/node_modules/trim/History.md new file mode 100644 index 0000000..c8aa68f --- /dev/null +++ b/node_modules/trim/History.md @@ -0,0 +1,5 @@ + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/node_modules/trim/Makefile b/node_modules/trim/Makefile new file mode 100644 index 0000000..4e9c8d3 --- /dev/null +++ b/node_modules/trim/Makefile @@ -0,0 +1,7 @@ + +test: + @./node_modules/.bin/mocha \ + --require should \ + --reporter spec + +.PHONY: test \ No newline at end of file diff --git a/node_modules/trim/Readme.md b/node_modules/trim/Readme.md new file mode 100644 index 0000000..3460f52 --- /dev/null +++ b/node_modules/trim/Readme.md @@ -0,0 +1,69 @@ + +# trim + + Trims string whitespace. + +## Installation + +``` +$ npm install trim +$ component install component/trim +``` + +## API + + - [trim(str)](#trimstr) + - [.left(str)](#leftstr) + - [.right(str)](#rightstr) + + + +### trim(str) +should trim leading / trailing whitespace. + +```js +trim(' foo bar ').should.equal('foo bar'); +trim('\n\n\nfoo bar\n\r\n\n').should.equal('foo bar'); +``` + + +### .left(str) +should trim leading whitespace. + +```js +trim.left(' foo bar ').should.equal('foo bar '); +``` + + +### .right(str) +should trim trailing whitespace. + +```js +trim.right(' foo bar ').should.equal(' foo bar'); +``` + + +## License + +(The MIT License) + +Copyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/trim/component.json b/node_modules/trim/component.json new file mode 100644 index 0000000..560b258 --- /dev/null +++ b/node_modules/trim/component.json @@ -0,0 +1,7 @@ +{ + "name": "trim", + "version": "0.0.1", + "description": "Trim string whitespace", + "keywords": ["string", "trim"], + "scripts": ["index.js"] +} \ No newline at end of file diff --git a/node_modules/trim/index.js b/node_modules/trim/index.js new file mode 100644 index 0000000..640c24c --- /dev/null +++ b/node_modules/trim/index.js @@ -0,0 +1,14 @@ + +exports = module.exports = trim; + +function trim(str){ + return str.replace(/^\s*|\s*$/g, ''); +} + +exports.left = function(str){ + return str.replace(/^\s*/, ''); +}; + +exports.right = function(str){ + return str.replace(/\s*$/, ''); +}; diff --git a/node_modules/trim/package.json b/node_modules/trim/package.json new file mode 100644 index 0000000..75f8f41 --- /dev/null +++ b/node_modules/trim/package.json @@ -0,0 +1,49 @@ +{ + "_from": "trim@0.0.1", + "_id": "trim@0.0.1", + "_inBundle": false, + "_integrity": "sha1-WFhUf2spB1fulczMZm+1AITEYN0=", + "_location": "/trim", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "trim@0.0.1", + "name": "trim", + "escapedName": "trim", + "rawSpec": "0.0.1", + "saveSpec": null, + "fetchSpec": "0.0.1" + }, + "_requiredBy": [ + "/tap-out" + ], + "_resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", + "_shasum": "5858547f6b290757ee95cccc666fb50084c460dd", + "_spec": "trim@0.0.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/tap-out", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "bundleDependencies": false, + "component": { + "scripts": { + "trim/index.js": "index.js" + } + }, + "dependencies": {}, + "deprecated": false, + "description": "Trim string whitespace", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [ + "string", + "trim" + ], + "main": "index", + "name": "trim", + "version": "0.0.1" +} diff --git a/node_modules/type-is/HISTORY.md b/node_modules/type-is/HISTORY.md new file mode 100644 index 0000000..8de21f7 --- /dev/null +++ b/node_modules/type-is/HISTORY.md @@ -0,0 +1,259 @@ +1.6.18 / 2019-04-26 +=================== + + * Fix regression passing request object to `typeis.is` + +1.6.17 / 2019-04-25 +=================== + + * deps: mime-types@~2.1.24 + - Add Apple file extensions from IANA + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add extension `.owl` to `application/rdf+xml` + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add extensions from IANA for `image/*` types + - Add extensions from IANA for `model/*` types + - Add extensions to HEIC image types + - Add new mime types + - Add `text/mdx` with extension `.mdx` + * perf: prevent internal `throw` on invalid type + +1.6.16 / 2018-02-16 +=================== + + * deps: mime-types@~2.1.18 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add extension `.mjs` to `application/javascript` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add glTF types and extensions + - Add new mime types + - Update extensions `.md` and `.markdown` to be `text/markdown` + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +1.6.15 / 2017-03-31 +=================== + + * deps: mime-types@~2.1.15 + - Add new mime types + +1.6.14 / 2016-11-18 +=================== + + * deps: mime-types@~2.1.13 + - Add new mime types + +1.6.13 / 2016-05-18 +=================== + + * deps: mime-types@~2.1.11 + - Add new mime types + +1.6.12 / 2016-02-28 +=================== + + * deps: mime-types@~2.1.10 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +1.6.11 / 2016-01-29 +=================== + + * deps: mime-types@~2.1.9 + - Add new mime types + +1.6.10 / 2015-12-01 +=================== + + * deps: mime-types@~2.1.8 + - Add new mime types + +1.6.9 / 2015-09-27 +================== + + * deps: mime-types@~2.1.7 + - Add new mime types + +1.6.8 / 2015-09-04 +================== + + * deps: mime-types@~2.1.6 + - Add new mime types + +1.6.7 / 2015-08-20 +================== + + * Fix type error when given invalid type to match against + * deps: mime-types@~2.1.5 + - Add new mime types + +1.6.6 / 2015-07-31 +================== + + * deps: mime-types@~2.1.4 + - Add new mime types + +1.6.5 / 2015-07-16 +================== + + * deps: mime-types@~2.1.3 + - Add new mime types + +1.6.4 / 2015-07-01 +================== + + * deps: mime-types@~2.1.2 + - Add new mime types + * perf: enable strict mode + * perf: remove argument reassignment + +1.6.3 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - Add new mime types + * perf: reduce try block size + * perf: remove bitwise operations + +1.6.2 / 2015-05-10 +================== + + * deps: mime-types@~2.0.11 + - Add new mime types + +1.6.1 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - Add new mime types + +1.6.0 / 2015-02-12 +================== + + * fix false-positives in `hasBody` `Transfer-Encoding` check + * support wildcard for both type and subtype (`*/*`) + +1.5.7 / 2015-02-09 +================== + + * fix argument reassignment + * deps: mime-types@~2.0.9 + - Add new mime types + +1.5.6 / 2015-01-29 +================== + + * deps: mime-types@~2.0.8 + - Add new mime types + +1.5.5 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - Add new mime types + - Fix missing extensions + - Fix various invalid MIME type entries + - Remove example template MIME types + - deps: mime-db@~1.5.0 + +1.5.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - Add new mime types + - deps: mime-db@~1.3.0 + +1.5.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - Add new mime types + - deps: mime-db@~1.2.0 + +1.5.2 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - Add new mime types + - deps: mime-db@~1.1.0 + +1.5.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + * deps: media-typer@0.3.0 + * deps: mime-types@~2.0.1 + - Support Node.js 0.6 + +1.5.0 / 2014-09-05 +================== + + * fix `hasbody` to be true for `content-length: 0` + +1.4.0 / 2014-09-02 +================== + + * update mime-types + +1.3.2 / 2014-06-24 +================== + + * use `~` range on mime-types + +1.3.1 / 2014-06-19 +================== + + * fix global variable leak + +1.3.0 / 2014-06-19 +================== + + * improve type parsing + + - invalid media type never matches + - media type not case-sensitive + - extra LWS does not affect results + +1.2.2 / 2014-06-19 +================== + + * fix behavior on unknown type argument + +1.2.1 / 2014-06-03 +================== + + * switch dependency from `mime` to `mime-types@1.0.0` + +1.2.0 / 2014-05-11 +================== + + * support suffix matching: + + - `+json` matches `application/vnd+json` + - `*/vnd+json` matches `application/vnd+json` + - `application/*+json` matches `application/vnd+json` + +1.1.0 / 2014-04-12 +================== + + * add non-array values support + * expose internal utilities: + + - `.is()` + - `.hasBody()` + - `.normalize()` + - `.match()` + +1.0.1 / 2014-03-30 +================== + + * add `multipart` as a shorthand diff --git a/node_modules/type-is/LICENSE b/node_modules/type-is/LICENSE new file mode 100644 index 0000000..386b7b6 --- /dev/null +++ b/node_modules/type-is/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/type-is/README.md b/node_modules/type-is/README.md new file mode 100644 index 0000000..b85ef8f --- /dev/null +++ b/node_modules/type-is/README.md @@ -0,0 +1,170 @@ +# type-is + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Infer the content-type of a request. + +### Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install type-is +``` + +## API + +```js +var http = require('http') +var typeis = require('type-is') + +http.createServer(function (req, res) { + var istext = typeis(req, ['text/*']) + res.end('you ' + (istext ? 'sent' : 'did not send') + ' me text') +}) +``` + +### typeis(request, types) + +Checks if the `request` is one of the `types`. If the request has no body, +even if there is a `Content-Type` header, then `null` is returned. If the +`Content-Type` header is invalid or does not matches any of the `types`, then +`false` is returned. Otherwise, a string of the type that matched is returned. + +The `request` argument is expected to be a Node.js HTTP request. The `types` +argument is an array of type strings. + +Each type in the `types` array can be one of the following: + +- A file extension name such as `json`. This name will be returned if matched. +- A mime type such as `application/json`. +- A mime type with a wildcard such as `*/*` or `*/json` or `application/*`. + The full mime type will be returned if matched. +- A suffix such as `+json`. This can be combined with a wildcard such as + `*/vnd+json` or `application/*+json`. The full mime type will be returned + if matched. + +Some examples to illustrate the inputs and returned value: + + + +```js +// req.headers.content-type = 'application/json' + +typeis(req, ['json']) // => 'json' +typeis(req, ['html', 'json']) // => 'json' +typeis(req, ['application/*']) // => 'application/json' +typeis(req, ['application/json']) // => 'application/json' + +typeis(req, ['html']) // => false +``` + +### typeis.hasBody(request) + +Returns a Boolean if the given `request` has a body, regardless of the +`Content-Type` header. + +Having a body has no relation to how large the body is (it may be 0 bytes). +This is similar to how file existence works. If a body does exist, then this +indicates that there is data to read from the Node.js request stream. + + + +```js +if (typeis.hasBody(req)) { + // read the body, since there is one + + req.on('data', function (chunk) { + // ... + }) +} +``` + +### typeis.is(mediaType, types) + +Checks if the `mediaType` is one of the `types`. If the `mediaType` is invalid +or does not matches any of the `types`, then `false` is returned. Otherwise, a +string of the type that matched is returned. + +The `mediaType` argument is expected to be a +[media type](https://tools.ietf.org/html/rfc6838) string. The `types` argument +is an array of type strings. + +Each type in the `types` array can be one of the following: + +- A file extension name such as `json`. This name will be returned if matched. +- A mime type such as `application/json`. +- A mime type with a wildcard such as `*/*` or `*/json` or `application/*`. + The full mime type will be returned if matched. +- A suffix such as `+json`. This can be combined with a wildcard such as + `*/vnd+json` or `application/*+json`. The full mime type will be returned + if matched. + +Some examples to illustrate the inputs and returned value: + + + +```js +var mediaType = 'application/json' + +typeis.is(mediaType, ['json']) // => 'json' +typeis.is(mediaType, ['html', 'json']) // => 'json' +typeis.is(mediaType, ['application/*']) // => 'application/json' +typeis.is(mediaType, ['application/json']) // => 'application/json' + +typeis.is(mediaType, ['html']) // => false +``` + +## Examples + +### Example body parser + +```js +var express = require('express') +var typeis = require('type-is') + +var app = express() + +app.use(function bodyParser (req, res, next) { + if (!typeis.hasBody(req)) { + return next() + } + + switch (typeis(req, ['urlencoded', 'json', 'multipart'])) { + case 'urlencoded': + // parse urlencoded body + throw new Error('implement urlencoded body parsing') + case 'json': + // parse json body + throw new Error('implement json body parsing') + case 'multipart': + // parse multipart body + throw new Error('implement multipart body parsing') + default: + // 415 error code + res.statusCode = 415 + res.end() + break + } +}) +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/type-is/master +[coveralls-url]: https://coveralls.io/r/jshttp/type-is?branch=master +[node-version-image]: https://badgen.net/npm/node/type-is +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/type-is +[npm-url]: https://npmjs.org/package/type-is +[npm-version-image]: https://badgen.net/npm/v/type-is +[travis-image]: https://badgen.net/travis/jshttp/type-is/master +[travis-url]: https://travis-ci.org/jshttp/type-is diff --git a/node_modules/type-is/index.js b/node_modules/type-is/index.js new file mode 100644 index 0000000..890ad76 --- /dev/null +++ b/node_modules/type-is/index.js @@ -0,0 +1,266 @@ +/*! + * type-is + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var typer = require('media-typer') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = typeofrequest +module.exports.is = typeis +module.exports.hasBody = hasbody +module.exports.normalize = normalize +module.exports.match = mimeMatch + +/** + * Compare a `value` content-type with `types`. + * Each `type` can be an extension like `html`, + * a special shortcut like `multipart` or `urlencoded`, + * or a mime type. + * + * If no types match, `false` is returned. + * Otherwise, the first `type` that matches is returned. + * + * @param {String} value + * @param {Array} types + * @public + */ + +function typeis (value, types_) { + var i + var types = types_ + + // remove parameters and normalize + var val = tryNormalizeType(value) + + // no type or invalid + if (!val) { + return false + } + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length - 1) + for (i = 0; i < types.length; i++) { + types[i] = arguments[i + 1] + } + } + + // no types, return the content type + if (!types || !types.length) { + return val + } + + var type + for (i = 0; i < types.length; i++) { + if (mimeMatch(normalize(type = types[i]), val)) { + return type[0] === '+' || type.indexOf('*') !== -1 + ? val + : type + } + } + + // no matches + return false +} + +/** + * Check if a request has a request body. + * A request with a body __must__ either have `transfer-encoding` + * or `content-length` headers set. + * http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 + * + * @param {Object} request + * @return {Boolean} + * @public + */ + +function hasbody (req) { + return req.headers['transfer-encoding'] !== undefined || + !isNaN(req.headers['content-length']) +} + +/** + * Check if the incoming request contains the "Content-Type" + * header field, and it contains any of the give mime `type`s. + * If there is no request body, `null` is returned. + * If there is no content type, `false` is returned. + * Otherwise, it returns the first `type` that matches. + * + * Examples: + * + * // With Content-Type: text/html; charset=utf-8 + * this.is('html'); // => 'html' + * this.is('text/html'); // => 'text/html' + * this.is('text/*', 'application/json'); // => 'text/html' + * + * // When Content-Type is application/json + * this.is('json', 'urlencoded'); // => 'json' + * this.is('application/json'); // => 'application/json' + * this.is('html', 'application/*'); // => 'application/json' + * + * this.is('html'); // => false + * + * @param {String|Array} types... + * @return {String|false|null} + * @public + */ + +function typeofrequest (req, types_) { + var types = types_ + + // no body + if (!hasbody(req)) { + return null + } + + // support flattened arguments + if (arguments.length > 2) { + types = new Array(arguments.length - 1) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i + 1] + } + } + + // request content type + var value = req.headers['content-type'] + + return typeis(value, types) +} + +/** + * Normalize a mime type. + * If it's a shorthand, expand it to a valid mime type. + * + * In general, you probably want: + * + * var type = is(req, ['urlencoded', 'json', 'multipart']); + * + * Then use the appropriate body parsers. + * These three are the most common request body types + * and are thus ensured to work. + * + * @param {String} type + * @private + */ + +function normalize (type) { + if (typeof type !== 'string') { + // invalid type + return false + } + + switch (type) { + case 'urlencoded': + return 'application/x-www-form-urlencoded' + case 'multipart': + return 'multipart/*' + } + + if (type[0] === '+') { + // "+json" -> "*/*+json" expando + return '*/*' + type + } + + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if `expected` mime type + * matches `actual` mime type with + * wildcard and +suffix support. + * + * @param {String} expected + * @param {String} actual + * @return {Boolean} + * @private + */ + +function mimeMatch (expected, actual) { + // invalid type + if (expected === false) { + return false + } + + // split types + var actualParts = actual.split('/') + var expectedParts = expected.split('/') + + // invalid format + if (actualParts.length !== 2 || expectedParts.length !== 2) { + return false + } + + // validate type + if (expectedParts[0] !== '*' && expectedParts[0] !== actualParts[0]) { + return false + } + + // validate suffix wildcard + if (expectedParts[1].substr(0, 2) === '*+') { + return expectedParts[1].length <= actualParts[1].length + 1 && + expectedParts[1].substr(1) === actualParts[1].substr(1 - expectedParts[1].length) + } + + // validate subtype + if (expectedParts[1] !== '*' && expectedParts[1] !== actualParts[1]) { + return false + } + + return true +} + +/** + * Normalize a type and remove parameters. + * + * @param {string} value + * @return {string} + * @private + */ + +function normalizeType (value) { + // parse the type + var type = typer.parse(value) + + // remove the parameters + type.parameters = undefined + + // reformat it + return typer.format(type) +} + +/** + * Try to normalize a type and remove parameters. + * + * @param {string} value + * @return {string} + * @private + */ + +function tryNormalizeType (value) { + if (!value) { + return null + } + + try { + return normalizeType(value) + } catch (err) { + return null + } +} diff --git a/node_modules/type-is/package.json b/node_modules/type-is/package.json new file mode 100644 index 0000000..eca94f6 --- /dev/null +++ b/node_modules/type-is/package.json @@ -0,0 +1,85 @@ +{ + "_from": "type-is@~1.6.18", + "_id": "type-is@1.6.18", + "_inBundle": false, + "_integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "_location": "/type-is", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "type-is@~1.6.18", + "name": "type-is", + "escapedName": "type-is", + "rawSpec": "~1.6.18", + "saveSpec": null, + "fetchSpec": "~1.6.18" + }, + "_requiredBy": [ + "/body-parser", + "/express" + ], + "_resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "_shasum": "4e552cd05df09467dcbc4ef739de89f2cf37c131", + "_spec": "type-is@~1.6.18", + "_where": "/home/kareml/Desktop/SMS/node_modules/express", + "bugs": { + "url": "https://github.com/jshttp/type-is/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + } + ], + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "deprecated": false, + "description": "Infer the content-type of a request.", + "devDependencies": { + "eslint": "5.16.0", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.17.2", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.1.1", + "eslint-plugin-standard": "4.0.0", + "mocha": "6.1.4", + "nyc": "14.0.0" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "homepage": "https://github.com/jshttp/type-is#readme", + "keywords": [ + "content", + "type", + "checking" + ], + "license": "MIT", + "name": "type-is", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/type-is.git" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test" + }, + "version": "1.6.18" +} diff --git a/node_modules/unpipe/HISTORY.md b/node_modules/unpipe/HISTORY.md new file mode 100644 index 0000000..85e0f8d --- /dev/null +++ b/node_modules/unpipe/HISTORY.md @@ -0,0 +1,4 @@ +1.0.0 / 2015-06-14 +================== + + * Initial release diff --git a/node_modules/unpipe/LICENSE b/node_modules/unpipe/LICENSE new file mode 100644 index 0000000..aed0138 --- /dev/null +++ b/node_modules/unpipe/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/unpipe/README.md b/node_modules/unpipe/README.md new file mode 100644 index 0000000..e536ad2 --- /dev/null +++ b/node_modules/unpipe/README.md @@ -0,0 +1,43 @@ +# unpipe + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Unpipe a stream from all destinations. + +## Installation + +```sh +$ npm install unpipe +``` + +## API + +```js +var unpipe = require('unpipe') +``` + +### unpipe(stream) + +Unpipes all destinations from a given stream. With stream 2+, this is +equivalent to `stream.unpipe()`. When used with streams 1 style streams +(typically Node.js 0.8 and below), this module attempts to undo the +actions done in `stream.pipe(dest)`. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/unpipe.svg +[npm-url]: https://npmjs.org/package/unpipe +[node-image]: https://img.shields.io/node/v/unpipe.svg +[node-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/stream-utils/unpipe.svg +[travis-url]: https://travis-ci.org/stream-utils/unpipe +[coveralls-image]: https://img.shields.io/coveralls/stream-utils/unpipe.svg +[coveralls-url]: https://coveralls.io/r/stream-utils/unpipe?branch=master +[downloads-image]: https://img.shields.io/npm/dm/unpipe.svg +[downloads-url]: https://npmjs.org/package/unpipe diff --git a/node_modules/unpipe/index.js b/node_modules/unpipe/index.js new file mode 100644 index 0000000..15c3d97 --- /dev/null +++ b/node_modules/unpipe/index.js @@ -0,0 +1,69 @@ +/*! + * unpipe + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = unpipe + +/** + * Determine if there are Node.js pipe-like data listeners. + * @private + */ + +function hasPipeDataListeners(stream) { + var listeners = stream.listeners('data') + + for (var i = 0; i < listeners.length; i++) { + if (listeners[i].name === 'ondata') { + return true + } + } + + return false +} + +/** + * Unpipe a stream from all destinations. + * + * @param {object} stream + * @public + */ + +function unpipe(stream) { + if (!stream) { + throw new TypeError('argument stream is required') + } + + if (typeof stream.unpipe === 'function') { + // new-style + stream.unpipe() + return + } + + // Node.js 0.8 hack + if (!hasPipeDataListeners(stream)) { + return + } + + var listener + var listeners = stream.listeners('close') + + for (var i = 0; i < listeners.length; i++) { + listener = listeners[i] + + if (listener.name !== 'cleanup' && listener.name !== 'onclose') { + continue + } + + // invoke the listener + listener.call(stream) + } +} diff --git a/node_modules/unpipe/package.json b/node_modules/unpipe/package.json new file mode 100644 index 0000000..7fdc6fb --- /dev/null +++ b/node_modules/unpipe/package.json @@ -0,0 +1,63 @@ +{ + "_from": "unpipe@1.0.0", + "_id": "unpipe@1.0.0", + "_inBundle": false, + "_integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", + "_location": "/unpipe", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "unpipe@1.0.0", + "name": "unpipe", + "escapedName": "unpipe", + "rawSpec": "1.0.0", + "saveSpec": null, + "fetchSpec": "1.0.0" + }, + "_requiredBy": [ + "/finalhandler", + "/raw-body" + ], + "_resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "_shasum": "b2bf4ee8514aae6165b4817829d21b2ef49904ec", + "_spec": "unpipe@1.0.0", + "_where": "/home/kareml/Desktop/SMS/node_modules/raw-body", + "author": { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + "bugs": { + "url": "https://github.com/stream-utils/unpipe/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Unpipe a stream from all destinations", + "devDependencies": { + "istanbul": "0.3.15", + "mocha": "2.2.5", + "readable-stream": "1.1.13" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "homepage": "https://github.com/stream-utils/unpipe#readme", + "license": "MIT", + "name": "unpipe", + "repository": { + "type": "git", + "url": "git+https://github.com/stream-utils/unpipe.git" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "version": "1.0.0" +} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..3d6bfe6 --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,57 @@ +{ + "_from": "util-deprecate@~1.0.1", + "_id": "util-deprecate@1.0.2", + "_inBundle": false, + "_integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "_location": "/util-deprecate", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "util-deprecate@~1.0.1", + "name": "util-deprecate", + "escapedName": "util-deprecate", + "rawSpec": "~1.0.1", + "saveSpec": null, + "fetchSpec": "~1.0.1" + }, + "_requiredBy": [ + "/readable-stream", + "/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "_spec": "util-deprecate@~1.0.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/readable-stream", + "author": { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://n8.io/" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The Node.js `util.deprecate()` function with browser support", + "homepage": "https://github.com/TooTallNate/util-deprecate", + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "license": "MIT", + "main": "node.js", + "name": "util-deprecate", + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.0.2" +} diff --git a/node_modules/utils-merge/.npmignore b/node_modules/utils-merge/.npmignore new file mode 100644 index 0000000..3e53844 --- /dev/null +++ b/node_modules/utils-merge/.npmignore @@ -0,0 +1,9 @@ +CONTRIBUTING.md +Makefile +docs/ +examples/ +reports/ +test/ + +.jshintrc +.travis.yml diff --git a/node_modules/utils-merge/LICENSE b/node_modules/utils-merge/LICENSE new file mode 100644 index 0000000..76f6d08 --- /dev/null +++ b/node_modules/utils-merge/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013-2017 Jared Hanson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/utils-merge/README.md b/node_modules/utils-merge/README.md new file mode 100644 index 0000000..0cb7117 --- /dev/null +++ b/node_modules/utils-merge/README.md @@ -0,0 +1,34 @@ +# utils-merge + +[![Version](https://img.shields.io/npm/v/utils-merge.svg?label=version)](https://www.npmjs.com/package/utils-merge) +[![Build](https://img.shields.io/travis/jaredhanson/utils-merge.svg)](https://travis-ci.org/jaredhanson/utils-merge) +[![Quality](https://img.shields.io/codeclimate/github/jaredhanson/utils-merge.svg?label=quality)](https://codeclimate.com/github/jaredhanson/utils-merge) +[![Coverage](https://img.shields.io/coveralls/jaredhanson/utils-merge.svg)](https://coveralls.io/r/jaredhanson/utils-merge) +[![Dependencies](https://img.shields.io/david/jaredhanson/utils-merge.svg)](https://david-dm.org/jaredhanson/utils-merge) + + +Merges the properties from a source object into a destination object. + +## Install + +```bash +$ npm install utils-merge +``` + +## Usage + +```javascript +var a = { foo: 'bar' } + , b = { bar: 'baz' }; + +merge(a, b); +// => { foo: 'bar', bar: 'baz' } +``` + +## License + +[The MIT License](http://opensource.org/licenses/MIT) + +Copyright (c) 2013-2017 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)> + + Sponsor diff --git a/node_modules/utils-merge/index.js b/node_modules/utils-merge/index.js new file mode 100644 index 0000000..4265c69 --- /dev/null +++ b/node_modules/utils-merge/index.js @@ -0,0 +1,23 @@ +/** + * Merge object b with object a. + * + * var a = { foo: 'bar' } + * , b = { bar: 'baz' }; + * + * merge(a, b); + * // => { foo: 'bar', bar: 'baz' } + * + * @param {Object} a + * @param {Object} b + * @return {Object} + * @api public + */ + +exports = module.exports = function(a, b){ + if (a && b) { + for (var key in b) { + a[key] = b[key]; + } + } + return a; +}; diff --git a/node_modules/utils-merge/package.json b/node_modules/utils-merge/package.json new file mode 100644 index 0000000..eef9bb6 --- /dev/null +++ b/node_modules/utils-merge/package.json @@ -0,0 +1,66 @@ +{ + "_from": "utils-merge@1.0.1", + "_id": "utils-merge@1.0.1", + "_inBundle": false, + "_integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", + "_location": "/utils-merge", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "utils-merge@1.0.1", + "name": "utils-merge", + "escapedName": "utils-merge", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "_shasum": "9f95710f50a267947b2ccc124741c1028427e713", + "_spec": "utils-merge@1.0.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/express", + "author": { + "name": "Jared Hanson", + "email": "jaredhanson@gmail.com", + "url": "http://www.jaredhanson.net/" + }, + "bugs": { + "url": "http://github.com/jaredhanson/utils-merge/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "merge() utility function", + "devDependencies": { + "chai": "1.x.x", + "make-node": "0.3.x", + "mocha": "1.x.x" + }, + "engines": { + "node": ">= 0.4.0" + }, + "homepage": "https://github.com/jaredhanson/utils-merge#readme", + "keywords": [ + "util" + ], + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "http://opensource.org/licenses/MIT" + } + ], + "main": "./index", + "name": "utils-merge", + "repository": { + "type": "git", + "url": "git://github.com/jaredhanson/utils-merge.git" + }, + "scripts": { + "test": "mocha --reporter spec --require test/bootstrap/node test/*.test.js" + }, + "version": "1.0.1" +} diff --git a/node_modules/vary/HISTORY.md b/node_modules/vary/HISTORY.md new file mode 100644 index 0000000..f6cbcf7 --- /dev/null +++ b/node_modules/vary/HISTORY.md @@ -0,0 +1,39 @@ +1.1.2 / 2017-09-23 +================== + + * perf: improve header token parsing speed + +1.1.1 / 2017-03-20 +================== + + * perf: hoist regular expression + +1.1.0 / 2015-09-29 +================== + + * Only accept valid field names in the `field` argument + - Ensures the resulting string is a valid HTTP header value + +1.0.1 / 2015-07-08 +================== + + * Fix setting empty header from empty `field` + * perf: enable strict mode + * perf: remove argument reassignments + +1.0.0 / 2014-08-10 +================== + + * Accept valid `Vary` header string as `field` + * Add `vary.append` for low-level string manipulation + * Move to `jshttp` orgainzation + +0.1.0 / 2014-06-05 +================== + + * Support array of fields to set + +0.0.0 / 2014-06-04 +================== + + * Initial release diff --git a/node_modules/vary/LICENSE b/node_modules/vary/LICENSE new file mode 100644 index 0000000..84441fb --- /dev/null +++ b/node_modules/vary/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/vary/README.md b/node_modules/vary/README.md new file mode 100644 index 0000000..cc000b3 --- /dev/null +++ b/node_modules/vary/README.md @@ -0,0 +1,101 @@ +# vary + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Manipulate the HTTP Vary header + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install vary +``` + +## API + + + +```js +var vary = require('vary') +``` + +### vary(res, field) + +Adds the given header `field` to the `Vary` response header of `res`. +This can be a string of a single field, a string of a valid `Vary` +header, or an array of multiple fields. + +This will append the header if not already listed, otherwise leaves +it listed in the current location. + + + +```js +// Append "Origin" to the Vary header of the response +vary(res, 'Origin') +``` + +### vary.append(header, field) + +Adds the given header `field` to the `Vary` response header string `header`. +This can be a string of a single field, a string of a valid `Vary` header, +or an array of multiple fields. + +This will append the header if not already listed, otherwise leaves +it listed in the current location. The new header string is returned. + + + +```js +// Get header string appending "Origin" to "Accept, User-Agent" +vary.append('Accept, User-Agent', 'Origin') +``` + +## Examples + +### Updating the Vary header when content is based on it + +```js +var http = require('http') +var vary = require('vary') + +http.createServer(function onRequest (req, res) { + // about to user-agent sniff + vary(res, 'User-Agent') + + var ua = req.headers['user-agent'] || '' + var isMobile = /mobi|android|touch|mini/i.test(ua) + + // serve site, depending on isMobile + res.setHeader('Content-Type', 'text/html') + res.end('You are (probably) ' + (isMobile ? '' : 'not ') + 'a mobile user') +}) +``` + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/vary.svg +[npm-url]: https://npmjs.org/package/vary +[node-version-image]: https://img.shields.io/node/v/vary.svg +[node-version-url]: https://nodejs.org/en/download +[travis-image]: https://img.shields.io/travis/jshttp/vary/master.svg +[travis-url]: https://travis-ci.org/jshttp/vary +[coveralls-image]: https://img.shields.io/coveralls/jshttp/vary/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/vary +[downloads-image]: https://img.shields.io/npm/dm/vary.svg +[downloads-url]: https://npmjs.org/package/vary diff --git a/node_modules/vary/index.js b/node_modules/vary/index.js new file mode 100644 index 0000000..5b5e741 --- /dev/null +++ b/node_modules/vary/index.js @@ -0,0 +1,149 @@ +/*! + * vary + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + */ + +module.exports = vary +module.exports.append = append + +/** + * RegExp to match field-name in RFC 7230 sec 3.2 + * + * field-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + * / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + * / DIGIT / ALPHA + * ; any VCHAR, except delimiters + */ + +var FIELD_NAME_REGEXP = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/ + +/** + * Append a field to a vary header. + * + * @param {String} header + * @param {String|Array} field + * @return {String} + * @public + */ + +function append (header, field) { + if (typeof header !== 'string') { + throw new TypeError('header argument is required') + } + + if (!field) { + throw new TypeError('field argument is required') + } + + // get fields array + var fields = !Array.isArray(field) + ? parse(String(field)) + : field + + // assert on invalid field names + for (var j = 0; j < fields.length; j++) { + if (!FIELD_NAME_REGEXP.test(fields[j])) { + throw new TypeError('field argument contains an invalid header name') + } + } + + // existing, unspecified vary + if (header === '*') { + return header + } + + // enumerate current values + var val = header + var vals = parse(header.toLowerCase()) + + // unspecified vary + if (fields.indexOf('*') !== -1 || vals.indexOf('*') !== -1) { + return '*' + } + + for (var i = 0; i < fields.length; i++) { + var fld = fields[i].toLowerCase() + + // append value (case-preserving) + if (vals.indexOf(fld) === -1) { + vals.push(fld) + val = val + ? val + ', ' + fields[i] + : fields[i] + } + } + + return val +} + +/** + * Parse a vary header into an array. + * + * @param {String} header + * @return {Array} + * @private + */ + +function parse (header) { + var end = 0 + var list = [] + var start = 0 + + // gather tokens + for (var i = 0, len = header.length; i < len; i++) { + switch (header.charCodeAt(i)) { + case 0x20: /* */ + if (start === end) { + start = end = i + 1 + } + break + case 0x2c: /* , */ + list.push(header.substring(start, end)) + start = end = i + 1 + break + default: + end = i + 1 + break + } + } + + // final token + list.push(header.substring(start, end)) + + return list +} + +/** + * Mark that a request is varied on a header field. + * + * @param {Object} res + * @param {String|Array} field + * @public + */ + +function vary (res, field) { + if (!res || !res.getHeader || !res.setHeader) { + // quack quack + throw new TypeError('res argument is required') + } + + // get existing header + var val = res.getHeader('Vary') || '' + var header = Array.isArray(val) + ? val.join(', ') + : String(val) + + // set new header + if ((val = append(header, field))) { + res.setHeader('Vary', val) + } +} diff --git a/node_modules/vary/package.json b/node_modules/vary/package.json new file mode 100644 index 0000000..83ddac2 --- /dev/null +++ b/node_modules/vary/package.json @@ -0,0 +1,78 @@ +{ + "_from": "vary@~1.1.2", + "_id": "vary@1.1.2", + "_inBundle": false, + "_integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", + "_location": "/vary", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "vary@~1.1.2", + "name": "vary", + "escapedName": "vary", + "rawSpec": "~1.1.2", + "saveSpec": null, + "fetchSpec": "~1.1.2" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "_shasum": "2299f02c6ded30d4a5961b0b9f74524a18f634fc", + "_spec": "vary@~1.1.2", + "_where": "/home/kareml/Desktop/SMS/node_modules/express", + "author": { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + "bugs": { + "url": "https://github.com/jshttp/vary/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Manipulate the HTTP Vary header", + "devDependencies": { + "beautify-benchmark": "0.2.4", + "benchmark": "2.1.4", + "eslint": "3.19.0", + "eslint-config-standard": "10.2.1", + "eslint-plugin-import": "2.7.0", + "eslint-plugin-markdown": "1.0.0-beta.6", + "eslint-plugin-node": "5.1.1", + "eslint-plugin-promise": "3.5.0", + "eslint-plugin-standard": "3.0.1", + "istanbul": "0.4.5", + "mocha": "2.5.3", + "supertest": "1.1.0" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "homepage": "https://github.com/jshttp/vary#readme", + "keywords": [ + "http", + "res", + "vary" + ], + "license": "MIT", + "name": "vary", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/vary.git" + }, + "scripts": { + "bench": "node benchmark/index.js", + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "version": "1.1.2" +} diff --git a/node_modules/wrappy/LICENSE b/node_modules/wrappy/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/wrappy/README.md b/node_modules/wrappy/README.md new file mode 100644 index 0000000..98eab25 --- /dev/null +++ b/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/node_modules/wrappy/package.json b/node_modules/wrappy/package.json new file mode 100644 index 0000000..4c6ec61 --- /dev/null +++ b/node_modules/wrappy/package.json @@ -0,0 +1,59 @@ +{ + "_from": "wrappy@1", + "_id": "wrappy@1.0.2", + "_inBundle": false, + "_integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "_location": "/wrappy", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "wrappy@1", + "name": "wrappy", + "escapedName": "wrappy", + "rawSpec": "1", + "saveSpec": null, + "fetchSpec": "1" + }, + "_requiredBy": [ + "/inflight", + "/once" + ], + "_resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "_shasum": "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", + "_spec": "wrappy@1", + "_where": "/home/kareml/Desktop/SMS/node_modules/inflight", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Callback wrapping utility", + "devDependencies": { + "tap": "^2.3.1" + }, + "directories": { + "test": "test" + }, + "files": [ + "wrappy.js" + ], + "homepage": "https://github.com/npm/wrappy", + "license": "ISC", + "main": "wrappy.js", + "name": "wrappy", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/wrappy.git" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "version": "1.0.2" +} diff --git a/node_modules/wrappy/wrappy.js b/node_modules/wrappy/wrappy.js new file mode 100644 index 0000000..bb7e7d6 --- /dev/null +++ b/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/node_modules/xtend/.jshintrc b/node_modules/xtend/.jshintrc new file mode 100644 index 0000000..77887b5 --- /dev/null +++ b/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/node_modules/xtend/LICENSE b/node_modules/xtend/LICENSE new file mode 100644 index 0000000..0099f4f --- /dev/null +++ b/node_modules/xtend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xtend/README.md b/node_modules/xtend/README.md new file mode 100644 index 0000000..4a2703c --- /dev/null +++ b/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: "c" +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licensed + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/node_modules/xtend/immutable.js b/node_modules/xtend/immutable.js new file mode 100644 index 0000000..94889c9 --- /dev/null +++ b/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/mutable.js b/node_modules/xtend/mutable.js new file mode 100644 index 0000000..72debed --- /dev/null +++ b/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/package.json b/node_modules/xtend/package.json new file mode 100644 index 0000000..5cdcc9a --- /dev/null +++ b/node_modules/xtend/package.json @@ -0,0 +1,86 @@ +{ + "_from": "xtend@~4.0.1", + "_id": "xtend@4.0.2", + "_inBundle": false, + "_integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "_location": "/xtend", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "xtend@~4.0.1", + "name": "xtend", + "escapedName": "xtend", + "rawSpec": "~4.0.1", + "saveSpec": null, + "fetchSpec": "~4.0.1" + }, + "_requiredBy": [ + "/through2" + ], + "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "_shasum": "bb72779f5fa465186b1f438f674fa347fdb5db54", + "_spec": "xtend@~4.0.1", + "_where": "/home/kareml/Desktop/SMS/node_modules/through2", + "author": { + "name": "Raynos", + "email": "raynos2@gmail.com" + }, + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "extend like a boss", + "devDependencies": { + "tape": "~1.1.0" + }, + "engines": { + "node": ">=0.4" + }, + "homepage": "https://github.com/Raynos/xtend", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "license": "MIT", + "main": "immutable", + "name": "xtend", + "repository": { + "type": "git", + "url": "git://github.com/Raynos/xtend.git" + }, + "scripts": { + "test": "node test" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "version": "4.0.2" +} diff --git a/node_modules/xtend/test.js b/node_modules/xtend/test.js new file mode 100644 index 0000000..b895b42 --- /dev/null +++ b/node_modules/xtend/test.js @@ -0,0 +1,103 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("prototype pollution", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + extend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) + +test("prototype pollution mutable", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + mutableExtend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) diff --git a/public/grades.html b/public/grades.html new file mode 100644 index 0000000..97e79ba --- /dev/null +++ b/public/grades.html @@ -0,0 +1,21 @@ + + + + + + + + Student Managment System + + +
+ school picture +

Student Managment System

+
+
+ + + diff --git a/public/imges/grades.png b/public/imges/grades.png new file mode 100644 index 0000000..22217f3 Binary files /dev/null and b/public/imges/grades.png differ diff --git a/public/imges/school.png b/public/imges/school.png new file mode 100644 index 0000000..2b7ee4f Binary files /dev/null and b/public/imges/school.png differ diff --git a/public/imges/student.png b/public/imges/student.png new file mode 100644 index 0000000..591f996 Binary files /dev/null and b/public/imges/student.png differ diff --git a/public/index.html b/public/index.html index e69de29..56022e6 100644 --- a/public/index.html +++ b/public/index.html @@ -0,0 +1,28 @@ + + + + + + + + Student Managment System + + +
+ school picture +

Student Managment System

+
+
+ student picture + grades picture +
+ + + diff --git a/public/student.html b/public/student.html new file mode 100644 index 0000000..e69de29 diff --git a/public/style.css b/public/style.css index e69de29..6bd56f8 100644 --- a/public/style.css +++ b/public/style.css @@ -0,0 +1,42 @@ +* { + padding: 0; + margin: 0; +} + +.topsection { + background-color: #4e9cc9; + width: 100%; + height: 109px; + display: flex; + flex-direction: row; + align-items: center; +} + +.topsection img { + width: 50px; + height: 50px; + padding: 10px; +} + +.topsection h1 { + margin-top: 22px; + font-family: "Anton"; + font-size: 6vw; + color: white; +} + +.icons { + width: 100%; + height: auto; + padding: 5px; + display: flex; + flex-direction: column; + align-items: center; +} + +.icons img { + width: 200px; + height: 200px; + margin-top: 36px; + box-shadow: 30px 10px 10px 10px #aaaaaa; +}