diff --git a/.circleci/config.yml b/.circleci/config.yml index d90f42ae3..01f6c8312 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,5 @@ version: 2.1 - commands: npm-install: steps: @@ -18,7 +17,7 @@ commands: - run: echo $(python --version) > .python-version - restore_cache: key: py-deps-{{ checksum ".python-version" }}-{{ checksum "docs/requirements.txt" }} - - run: "if [ ! -d ./venv ]; then python -m venv ./venv; fi" + - run: 'if [ ! -d ./venv ]; then python -m venv ./venv; fi' - run: echo "source $(pwd)/venv/bin/activate" >> $BASH_ENV - run: pip install -r ./docs/requirements.txt - save_cache: @@ -26,23 +25,28 @@ commands: paths: - ./venv - aliases: - &node12 image: circleci/node:12 - - &python-docs - # using Python 3.6 as that's the version ReadTheDocs is running + - &python-docs # using Python 3.6 as that's the version ReadTheDocs is running image: circleci/python:3.6-node - &test-steps steps: - checkout - npm-install + - run: npm run build - run: npm run ci:test - jobs: + build: + docker: [<<: *node12] + steps: + - checkout + - npm-install + - run: npm run build + test-node12: docker: [<<: *node12] <<: *test-steps @@ -60,6 +64,7 @@ jobs: steps: - checkout - npm-install + - run: npm run build - run: npm run e2e:apib test-e2e-openapi2: @@ -67,6 +72,7 @@ jobs: steps: - checkout - npm-install + - run: npm run build - run: npm run e2e:openapi2 quality-checks: @@ -94,6 +100,7 @@ jobs: steps: - checkout - npm-install + - run: npm run build - run: npm run ci:smoke release: @@ -103,21 +110,22 @@ jobs: steps: - checkout - npm-install + - run: npm run build - run: npm run ci:release - workflows: version: 2 test-and-release: jobs: - quality-checks + - docs-dry-run - test-node12 - test-node10 - test-node8 - test-e2e-apib - test-e2e-openapi2 - smoke-tests - - docs-dry-run + - release: requires: - quality-checks diff --git a/.eslintignore b/.eslintignore index 3748c0979..673c6c821 100644 --- a/.eslintignore +++ b/.eslintignore @@ -2,6 +2,7 @@ .vscode coverage docs +build # In case Python virtualenv is present in the project directory, this ignores # its contents (some Python projects vendor JavaScript files, etc.) diff --git a/.eslintrc.js b/.eslintrc.js index 92d2d8b4e..28fd11d09 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,28 +1,20 @@ module.exports = { - extends: 'airbnb-base', + extends: ['airbnb-base', 'prettier'], env: { - 'node': true + node: true }, rules: { // Using 'console' is perfectly okay for a Node.js CLI tool and avoiding // it only brings unnecessary complexity 'no-console': 'off', - // Node 6 does not support dangling commas in function arguments - 'comma-dangle': [ - 'error', - { - 'arrays': 'always-multiline', - 'objects': 'always-multiline', - 'functions': 'never' - } - ], - // This is to allow a convention for exporting functions solely for // the purpose of the unit tests, see // https://github.com/apiaryio/dredd-transactions/pull/179#discussion_r206852270 'no-underscore-dangle': 'off', + 'import/prefer-default-export': 'off', + // Following rules were introduced to make the decaffeination // of the codebase possible and are to be removed in the future 'class-methods-use-this': 'off', @@ -38,6 +30,6 @@ module.exports = { 'no-plusplus': 'off', 'no-restricted-syntax': 'off', 'no-use-before-define': 'off', - 'prefer-destructuring': 'off', + 'prefer-destructuring': 'off' } -}; +} diff --git a/.gitignore b/.gitignore index 373a7c54c..7376d6952 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,8 @@ npm-shrinkwrap.json /coverage /docs/_build +build +typings node_modules .idea .vscode diff --git a/.npmignore b/.npmignore new file mode 100644 index 000000000..c42a223b6 --- /dev/null +++ b/.npmignore @@ -0,0 +1,2 @@ +# Prevent publishing source code to NPM +lib diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 000000000..a5607e385 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,6 @@ +{ + "semi": false, + "trailingComma": "all", + "singleQuote": true, + "arrowParens": "always" +} diff --git a/appveyor.yml b/appveyor.yml index c09d1192b..4d691946d 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,15 +1,16 @@ environment: - nodejs_version: "10" + nodejs_version: '10' install: - ps: Install-Product node 10 - - "npm -g install npm@6" + - 'npm -g install npm@6' - "set PATH=%APPDATA%\\npm;%PATH%" - - "npm install" + - 'npm install' cache: - - "node_modules -> package.json" + - 'node_modules -> package.json' - "%APPDATA%\\npm-cache -> package.json" build: off test_script: - - "node --version" - - "npm --version" - - "npm test" + - 'node --version' + - 'npm --version' + - 'npm run build' + - 'npm test' diff --git a/bin/dredd b/bin/dredd index fb0b7965f..68fc955e7 100755 --- a/bin/dredd +++ b/bin/dredd @@ -8,13 +8,13 @@ } }); -const CLI = require('../lib/CLI'); +const CLI = require('../build/CLI').default; -const dreddCli = new CLI({ +const dreddCLI = new CLI({ custom: { cwd: process.cwd(), - argv: process.argv.slice(2), - }, + argv: process.argv.slice(2) + } }); -dreddCli.run(); +dreddCLI.run(); diff --git a/docs/usage-cli.rst b/docs/usage-cli.rst index 641b53530..1c9c72a73 100644 --- a/docs/usage-cli.rst +++ b/docs/usage-cli.rst @@ -83,4 +83,4 @@ CLI Options Reference Remember you can always list all available arguments by ``dredd --help``. -.. cli-options:: ../lib/options.json +.. cli-options:: ../options.json diff --git a/lib/CLI.js b/lib/CLI.js index 4af39efa6..a926a1a2a 100644 --- a/lib/CLI.js +++ b/lib/CLI.js @@ -1,46 +1,47 @@ -const R = require('ramda'); -const console = require('console'); // Stubbed in tests by proxyquire -const fs = require('fs'); -const optimist = require('optimist'); -const os = require('os'); -const spawnArgs = require('spawn-args'); -const spawnSync = require('cross-spawn').sync; - -const configUtils = require('./configUtils'); -const Dredd = require('./Dredd'); -const ignorePipeErrors = require('./ignorePipeErrors'); -const interactiveConfig = require('./init'); -const logger = require('./logger'); -const { applyLoggingOptions } = require('./configuration'); -const { spawn } = require('./childProcess'); - -const dreddOptions = require('./options'); -const packageData = require('../package.json'); - +import R from 'ramda' +import console from 'console' // Stubbed in tests by proxyquire +import fs from 'fs' +import optimist from 'optimist' +import os from 'os' +import spawnArgs from 'spawn-args' +import { sync as spawnSync } from 'cross-spawn' + +import * as configUtils from './configUtils' +import Dredd from './Dredd' +import ignorePipeErrors from './ignorePipeErrors' +import interactiveConfig from './init' +import logger from './logger' +import { applyLoggingOptions } from './configuration' +import { spawn } from './childProcess' + +import dreddOptions from '../options.json' +import packageData from '../package.json' class CLI { constructor(options = {}, cb) { - this.cb = cb; - this.finished = false; - this.exit = options.exit; - this.custom = options.custom || {}; + this.cb = cb + this.finished = false + this.exit = options.exit + this.custom = options.custom || {} - this.setExitOrCallback(); + this.setExitOrCallback() if (!this.custom.cwd || typeof this.custom.cwd !== 'string') { - this.custom.cwd = process.cwd(); + this.custom.cwd = process.cwd() } if (!this.custom.argv || !Array.isArray(this.custom.argv)) { - this.custom.argv = []; + this.custom.argv = [] } } setOptimistArgv() { - this.optimist = optimist(this.custom.argv, this.custom.cwd); - this.cliArgv = this.optimist.argv; + this.optimist = optimist(this.custom.argv, this.custom.cwd) + this.cliArgv = this.optimist.argv - this.optimist.usage(`\ + this.optimist + .usage( + `\ Usage: $ dredd init @@ -49,27 +50,31 @@ Or: Example: $ dredd ./api-description.apib http://127.0.0.1:3000 --dry-run\ -`) +`, + ) .options(dreddOptions) - .wrap(80); + .wrap(80) - this.argv = this.optimist.argv; - applyLoggingOptions(this.argv); + this.argv = this.optimist.argv + applyLoggingOptions(this.argv) } // Gracefully terminate server stopServer(callback) { if (!this.serverProcess || !this.serverProcess.spawned) { - logger.debug('No backend server process to terminate.'); - return callback(); + logger.debug('No backend server process to terminate.') + return callback() } if (this.serverProcess.terminated) { - logger.debug('The backend server process has already terminated'); - return callback(); + logger.debug('The backend server process has already terminated') + return callback() } - logger.debug('Terminating backend server process, PID', this.serverProcess.pid); - this.serverProcess.terminate({ force: true }); - this.serverProcess.on('exit', () => callback()); + logger.debug( + 'Terminating backend server process, PID', + this.serverProcess.pid, + ) + this.serverProcess.terminate({ force: true }) + this.serverProcess.on('exit', () => callback()) } // This thing-a-ma-bob here is only for purpose of testing @@ -77,198 +82,241 @@ Example: setExitOrCallback() { if (!this.cb) { if (this.exit && this.exit === process.exit) { - this.sigIntEventAdd = true; + this.sigIntEventAdd = true } if (this.exit) { this._processExit = (exitStatus) => { - logger.debug(`Using configured custom exit() method to terminate the Dredd process with status '${exitStatus}'.`); - this.finished = true; + logger.debug( + `Using configured custom exit() method to terminate the Dredd process with status '${exitStatus}'.`, + ) + this.finished = true this.stopServer(() => { - this.exit(exitStatus); - }); - }; + this.exit(exitStatus) + }) + } } else { this._processExit = (exitStatus) => { - logger.debug(`Using native process.exit() method to terminate the Dredd process with status '${exitStatus}'.`); - this.stopServer(() => process.exit(exitStatus)); - }; + logger.debug( + `Using native process.exit() method to terminate the Dredd process with status '${exitStatus}'.`, + ) + this.stopServer(() => process.exit(exitStatus)) + } } } else { this._processExit = (exitStatus) => { - logger.debug(`Using configured custom callback to terminate the Dredd process with status '${exitStatus}'.`); - this.finished = true; + logger.debug( + `Using configured custom callback to terminate the Dredd process with status '${exitStatus}'.`, + ) + this.finished = true if (this.sigIntEventAdded) { if (this.serverProcess && !this.serverProcess.terminated) { - logger.debug('Killing backend server process before Dredd exits.'); - this.serverProcess.signalKill(); + logger.debug('Killing backend server process before Dredd exits.') + this.serverProcess.signalKill() } - process.removeEventListener('SIGINT', this.commandSigInt); + process.removeEventListener('SIGINT', this.commandSigInt) } - this.cb(exitStatus); - return this; - }; + this.cb(exitStatus) + return this + } } } moveBlueprintArgToPath() { // Transform path and p argument to array if it's not if (!Array.isArray(this.argv.path)) { - this.argv.path = this.argv.p = [this.argv.path]; + this.argv.path = this.argv.p = [this.argv.path] } } checkRequiredArgs() { - let argError = false; + let argError = false // If 'blueprint' is missing if (!this.argv._[0]) { - console.error('\nError: Must specify path to API description document.'); - argError = true; + console.error('\nError: Must specify path to API description document.') + argError = true } // If 'endpoint' is missing if (!this.argv._[1]) { - console.error('\nError: Must specify URL of the tested API instance.'); - argError = true; + console.error('\nError: Must specify URL of the tested API instance.') + argError = true } // Show help if argument is missing if (argError) { - console.error('\n'); - this.optimist.showHelp(console.error); - this._processExit(1); + console.error('\n') + this.optimist.showHelp(console.error) + this._processExit(1) } } runExitingActions() { // Run interactive config if (this.argv._[0] === 'init' || this.argv.init === true) { - logger.debug('Starting interactive configuration.'); - this.finished = true; - interactiveConfig(this.argv, (config) => { - configUtils.save(config); - }, (err) => { - if (err) { logger.error('Could not configure Dredd', err); } - this._processExit(0); - }); - - // Show help + logger.debug('Starting interactive configuration.') + this.finished = true + interactiveConfig( + this.argv, + (config) => { + configUtils.save(config) + }, + (err) => { + if (err) { + logger.error('Could not configure Dredd', err) + } + this._processExit(0) + }, + ) + + // Show help } else if (this.argv.help === true) { - this.optimist.showHelp(console.error); - this._processExit(0); + this.optimist.showHelp(console.error) + this._processExit(0) - // Show version + // Show version } else if (this.argv.version === true) { console.log(`\ ${packageData.name} v${packageData.version} \ (${os.type()} ${os.release()}; ${os.arch()})\ -`); - this._processExit(0); +`) + this._processExit(0) } } loadDreddFile() { - const configPath = this.argv.config; - logger.debug('Loading configuration file:', configPath); + const configPath = this.argv.config + logger.debug('Loading configuration file:', configPath) if (configPath && fs.existsSync(configPath)) { - logger.debug(`Configuration '${configPath}' found, ignoring other arguments.`); - this.argv = configUtils.load(configPath); + logger.debug( + `Configuration '${configPath}' found, ignoring other arguments.`, + ) + this.argv = configUtils.load(configPath) } // Overwrite saved config with cli arguments Object.keys(this.cliArgv).forEach((key) => { - const value = this.cliArgv[key]; + const value = this.cliArgv[key] if (key !== '_' && key !== '$0') { - this.argv[key] = value; + this.argv[key] = value } - }); + }) - applyLoggingOptions(this.argv); + applyLoggingOptions(this.argv) } parseCustomConfig() { - this.argv.custom = configUtils.parseCustom(this.argv.custom); + this.argv.custom = configUtils.parseCustom(this.argv.custom) } runServerAndThenDredd() { if (!this.argv.server) { - logger.debug('No backend server process specified, starting testing at once'); - this.runDredd(this.dreddInstance); + logger.debug( + 'No backend server process specified, starting testing at once', + ) + this.runDredd(this.dreddInstance) } else { - logger.debug('Backend server process specified, starting backend server and then testing'); - - const parsedArgs = spawnArgs(this.argv.server); - const command = parsedArgs.shift(); - - logger.debug(`Using '${command}' as a server command, ${JSON.stringify(parsedArgs)} as arguments`); - this.serverProcess = spawn(command, parsedArgs); - logger.debug(`Starting backend server process with command: ${this.argv.server}`); - - this.serverProcess.stdout.setEncoding('utf8'); - this.serverProcess.stdout.on('data', data => process.stdout.write(data.toString())); - - this.serverProcess.stderr.setEncoding('utf8'); - this.serverProcess.stderr.on('data', data => process.stdout.write(data.toString())); - - this.serverProcess.on('signalTerm', () => logger.debug('Gracefully terminating the backend server process')); - this.serverProcess.on('signalKill', () => logger.debug('Killing the backend server process')); + logger.debug( + 'Backend server process specified, starting backend server and then testing', + ) + + const parsedArgs = spawnArgs(this.argv.server) + const command = parsedArgs.shift() + + logger.debug( + `Using '${command}' as a server command, ${JSON.stringify( + parsedArgs, + )} as arguments`, + ) + this.serverProcess = spawn(command, parsedArgs) + logger.debug( + `Starting backend server process with command: ${this.argv.server}`, + ) + + this.serverProcess.stdout.setEncoding('utf8') + this.serverProcess.stdout.on('data', (data) => + process.stdout.write(data.toString()), + ) + + this.serverProcess.stderr.setEncoding('utf8') + this.serverProcess.stderr.on('data', (data) => + process.stdout.write(data.toString()), + ) + + this.serverProcess.on('signalTerm', () => + logger.debug('Gracefully terminating the backend server process'), + ) + this.serverProcess.on('signalKill', () => + logger.debug('Killing the backend server process'), + ) this.serverProcess.on('crash', (exitStatus, killed) => { - if (killed) { logger.debug('Backend server process was killed'); } - }); + if (killed) { + logger.debug('Backend server process was killed') + } + }) this.serverProcess.on('exit', () => { - logger.debug('Backend server process exited'); - }); + logger.debug('Backend server process exited') + }) this.serverProcess.on('error', (err) => { - logger.error('Command to start backend server process failed, exiting Dredd', err); - this._processExit(1); - }); + logger.error( + 'Command to start backend server process failed, exiting Dredd', + err, + ) + this._processExit(1) + }) // Ensure server is not running when dredd exits prematurely somewhere process.on('beforeExit', () => { if (this.serverProcess && !this.serverProcess.terminated) { - logger.debug('Killing backend server process before Dredd exits'); - this.serverProcess.signalKill(); + logger.debug('Killing backend server process before Dredd exits') + this.serverProcess.signalKill() } - }); + }) // Ensure server is not running when dredd exits prematurely somewhere process.on('exit', () => { if (this.serverProcess && !this.serverProcess.terminated) { - logger.debug('Killing backend server process on Dredd\'s exit'); - this.serverProcess.signalKill(); + logger.debug("Killing backend server process on Dredd's exit") + this.serverProcess.signalKill() } - }); + }) - const waitSecs = parseInt(this.argv['server-wait'], 10); - const waitMilis = waitSecs * 1000; - logger.debug(`Waiting ${waitSecs} seconds for backend server process to start`); + const waitSecs = parseInt(this.argv['server-wait'], 10) + const waitMilis = waitSecs * 1000 + logger.debug( + `Waiting ${waitSecs} seconds for backend server process to start`, + ) this.wait = setTimeout(() => { - this.runDredd(this.dreddInstance); - }, - waitMilis); + this.runDredd(this.dreddInstance) + }, waitMilis) } } // This should be handled in a better way in the future: // https://github.com/apiaryio/dredd/issues/625 logDebuggingInfo(config) { - logger.debug('Dredd version:', packageData.version); - logger.debug('Node.js version:', process.version); - logger.debug('Node.js environment:', process.versions); - logger.debug('System version:', os.type(), os.release(), os.arch()); + logger.debug('Dredd version:', packageData.version) + logger.debug('Node.js version:', process.version) + logger.debug('Node.js environment:', process.versions) + logger.debug('System version:', os.type(), os.release(), os.arch()) try { - const npmVersion = spawnSync('npm', ['--version']).stdout.toString().trim(); - logger.debug('npm version:', npmVersion || 'unable to determine npm version'); + const npmVersion = spawnSync('npm', ['--version']) + .stdout.toString() + .trim() + logger.debug( + 'npm version:', + npmVersion || 'unable to determine npm version', + ) } catch (err) { - logger.debug('npm version: unable to determine npm version:', err); + logger.debug('npm version: unable to determine npm version:', err) } - logger.debug('Configuration:', JSON.stringify(config)); + logger.debug('Configuration:', JSON.stringify(config)) } run() { @@ -281,27 +329,29 @@ ${packageData.name} v${packageData.version} \ this.checkRequiredArgs, this.moveBlueprintArgToPath, ]) { - task.call(this); - if (this.finished) { return; } + task.call(this) + if (this.finished) { + return + } } - const configurationForDredd = this.initConfig(); - this.logDebuggingInfo(configurationForDredd); + const configurationForDredd = this.initConfig() + this.logDebuggingInfo(configurationForDredd) - this.dreddInstance = this.initDredd(configurationForDredd); + this.dreddInstance = this.initDredd(configurationForDredd) } catch (e) { - this.exitWithStatus(e); + this.exitWithStatus(e) } - ignorePipeErrors(process); + ignorePipeErrors(process) try { - this.runServerAndThenDredd(); + this.runServerAndThenDredd() } catch (e) { - logger.error(e.message, e.stack); + logger.error(e.message, e.stack) this.stopServer(() => { - this._processExit(2); - }); + this._processExit(2) + }) } } @@ -309,74 +359,78 @@ ${packageData.name} v${packageData.version} \ // When API description path is a glob, some shells are automatically expanding globs and concating // result as arguments so I'm taking last argument as API endpoint server URL and removing it // from optimist's args - this.server = this.argv._[this.argv._.length - 1]; - this.argv._.splice(this.argv._.length - 1, 1); - return this; + this.server = this.argv._[this.argv._.length - 1] + this.argv._.splice(this.argv._.length - 1, 1) + return this } takeRestOfParamsAsPath() { // And rest of arguments concating to 'path' and 'p' opts, duplicates are filtered out later - this.argv.p = this.argv.path = this.argv.path.concat(this.argv._); - return this; + this.argv.p = this.argv.path = this.argv.path.concat(this.argv._) + return this } initConfig() { - this.lastArgvIsApiEndpoint().takeRestOfParamsAsPath(); + this.lastArgvIsApiEndpoint().takeRestOfParamsAsPath() const cliConfig = R.mergeDeepRight(this.argv, { server: this.server, - }); + }) // Push first argument (without some known configuration --key) into paths - if (!cliConfig.path) { cliConfig.path = []; } - cliConfig.path.push(this.argv._[0]); + if (!cliConfig.path) { + cliConfig.path = [] + } + cliConfig.path.push(this.argv._[0]) // Merge "this.custom" which is an input of CLI constructor // (used for internal testing), and "cliConfig" which is a result // of merge upon "argv". Otherwise "custom" key from "dredd.yml" // is always overridden by "this.custom". - cliConfig.custom = R.mergeDeepRight(this.custom, cliConfig.custom || {}); + cliConfig.custom = R.mergeDeepRight(this.custom, cliConfig.custom || {}) - return cliConfig; + return cliConfig } initDredd(configuration) { - return new Dredd(configuration); + return new Dredd(configuration) } commandSigInt() { - logger.error('\nShutting down from keyboard interruption (Ctrl+C)'); - this.dreddInstance.transactionsComplete(() => this._processExit(0)); + logger.error('\nShutting down from keyboard interruption (Ctrl+C)') + this.dreddInstance.transactionsComplete(() => this._processExit(0)) } runDredd(dreddInstance) { if (this.sigIntEventAdd) { // Handle SIGINT from user - this.sigIntEventAdded = !(this.sigIntEventAdd = false); - process.on('SIGINT', this.commandSigInt); + this.sigIntEventAdded = !(this.sigIntEventAdd = false) + process.on('SIGINT', this.commandSigInt) } - logger.debug('Running Dredd instance.'); + logger.debug('Running Dredd instance.') dreddInstance.run((error, stats) => { - logger.debug('Dredd instance run finished.'); - this.exitWithStatus(error, stats); - }); + logger.debug('Dredd instance run finished.') + this.exitWithStatus(error, stats) + }) - return this; + return this } exitWithStatus(error, stats) { if (error) { - if (error.message) { logger.error(error.message); } - this._processExit(1); + if (error.message) { + logger.error(error.message) + } + this._processExit(1) } - if ((stats.failures + stats.errors) > 0) { - this._processExit(1); + if (stats.failures + stats.errors > 0) { + this._processExit(1) } else { - this._processExit(0); + this._processExit(0) } } } -module.exports = CLI; +export default CLI diff --git a/lib/Dredd.js b/lib/Dredd.js index 02081adab..093ce3028 100644 --- a/lib/Dredd.js +++ b/lib/Dredd.js @@ -1,105 +1,141 @@ -const async = require('async'); -const parse = require('dredd-transactions/parse'); -const compile = require('dredd-transactions/compile'); - -const configureReporters = require('./configureReporters'); -const resolveLocations = require('./resolveLocations'); -const readLocation = require('./readLocation'); -const resolveModule = require('./resolveModule'); -const logger = require('./logger'); -const TransactionRunner = require('./TransactionRunner'); -const { applyConfiguration } = require('./configuration'); -const annotationToLoggerInfo = require('./annotationToLoggerInfo'); +import async from 'async' +import parse from 'dredd-transactions/parse' +import compile from 'dredd-transactions/compile' +import configureReporters from './configureReporters' +import resolveLocations from './resolveLocations' +import readLocation from './readLocation' +import resolveModule from './resolveModule' +import logger from './logger' +import TransactionRunner from './TransactionRunner' +import { applyConfiguration } from './configuration' +import annotationToLoggerInfo from './annotationToLoggerInfo' function prefixError(error, prefix) { - error.message = `${prefix}: ${error.message}`; - return error; + error.message = `${prefix}: ${error.message}` + return error } - function prefixErrors(decoratedCallback, prefix) { return (error, ...args) => { - if (error) { prefixError(error, prefix); } - decoratedCallback(error, ...args); - }; + if (error) { + prefixError(error, prefix) + } + decoratedCallback(error, ...args) + } } - function readLocations(locations, options, callback) { - if (typeof options === 'function') { [options, callback] = [{}, options]; } - - async.map(locations, (location, next) => { - const decoratedNext = prefixErrors(next, `Unable to load API description document from '${location}'`); - readLocation(location, options, decoratedNext); - }, (error, contents) => { - if (error) { callback(error); return; } - - const apiDescriptions = locations - .map((location, i) => ({ location, content: contents[i] })); - callback(null, apiDescriptions); - }); -} + if (typeof options === 'function') { + ;[options, callback] = [{}, options] + } + async.map( + locations, + (location, next) => { + const decoratedNext = prefixErrors( + next, + `Unable to load API description document from '${location}'`, + ) + readLocation(location, options, decoratedNext) + }, + (error, contents) => { + if (error) { + callback(error) + return + } -function parseContent(apiDescriptions, callback) { - async.map(apiDescriptions, ({ location, content }, next) => { - const decoratedNext = prefixErrors(next, `Unable to parse API description document '${location}'`); - parse(content, decoratedNext); - }, (error, parseResults) => { - if (error) { callback(error); return; } - - const parsedAPIdescriptions = apiDescriptions - .map((apiDescription, i) => Object.assign({}, parseResults[i], apiDescription)); - callback(null, parsedAPIdescriptions); - }); + const apiDescriptions = locations.map((location, i) => ({ + location, + content: contents[i], + })) + callback(null, apiDescriptions) + }, + ) } +function parseContent(apiDescriptions, callback) { + async.map( + apiDescriptions, + ({ location, content }, next) => { + const decoratedNext = prefixErrors( + next, + `Unable to parse API description document '${location}'`, + ) + parse(content, decoratedNext) + }, + (error, parseResults) => { + if (error) { + callback(error) + return + } + + const parsedAPIdescriptions = apiDescriptions.map((apiDescription, i) => + Object.assign({}, parseResults[i], apiDescription), + ) + callback(null, parsedAPIdescriptions) + }, + ) +} function compileTransactions(apiDescriptions) { return apiDescriptions .map(({ mediaType, apiElements, location }) => { try { - return compile(mediaType, apiElements, location); + return compile(mediaType, apiElements, location) } catch (error) { - throw prefixError(error, 'Unable to compile HTTP transactions from ' - + `API description document '${location}': ${error.message}`); + throw prefixError( + error, + 'Unable to compile HTTP transactions from ' + + `API description document '${location}': ${error.message}`, + ) } }) - .map((compileResult, i) => Object.assign({}, compileResult, apiDescriptions[i])); + .map((compileResult, i) => + Object.assign({}, compileResult, apiDescriptions[i]), + ) } - function toTransactions(apiDescriptions) { - return apiDescriptions - // produce an array of transactions for each API description, - // where each transaction object gets an extra 'apiDescription' - // property with details about the API description it comes from - .map(apiDescription => ( - apiDescription.transactions - .map(transaction => Object.assign({ - apiDescription: { - location: apiDescription.location, - mediaType: apiDescription.mediaType, - }, - }, transaction)) - )) - // flatten array of arrays - .reduce((flatArray, array) => flatArray.concat(array), []); + return ( + apiDescriptions + // produce an array of transactions for each API description, + // where each transaction object gets an extra 'apiDescription' + // property with details about the API description it comes from + .map((apiDescription) => + apiDescription.transactions.map((transaction) => + Object.assign( + { + apiDescription: { + location: apiDescription.location, + mediaType: apiDescription.mediaType, + }, + }, + transaction, + ), + ), + ) + // flatten array of arrays + .reduce((flatArray, array) => flatArray.concat(array), []) + ) } - function toLoggerInfos(apiDescriptions) { return apiDescriptions - .map(apiDescription => apiDescription.annotations - .map(annotation => annotationToLoggerInfo(apiDescription.location, annotation))) - .reduce((flatAnnotations, annotations) => flatAnnotations.concat(annotations), []); + .map((apiDescription) => + apiDescription.annotations.map((annotation) => + annotationToLoggerInfo(apiDescription.location, annotation), + ), + ) + .reduce( + (flatAnnotations, annotations) => flatAnnotations.concat(annotations), + [], + ) } - class Dredd { constructor(config) { - this.configuration = applyConfiguration(config); + this.configuration = applyConfiguration(config) this.stats = { tests: 0, failures: 0, @@ -109,93 +145,116 @@ class Dredd { start: 0, end: 0, duration: 0, - }; - this.transactionRunner = new TransactionRunner(this.configuration); - this.logger = logger; + } + this.transactionRunner = new TransactionRunner(this.configuration) + this.logger = logger } prepareAPIdescriptions(callback) { - this.logger.debug('Resolving locations of API description documents'); - let locations; + this.logger.debug('Resolving locations of API description documents') + let locations try { - locations = resolveLocations(this.configuration.custom.cwd, this.configuration.path); + locations = resolveLocations( + this.configuration.custom.cwd, + this.configuration.path, + ) } catch (error) { - process.nextTick(() => callback(error)); - return; + process.nextTick(() => callback(error)) + return } - async.waterfall([ - (next) => { - this.logger.debug('Reading API description documents'); - readLocations(locations, { http: this.configuration.http }, next); - }, - (apiDescriptions, next) => { - const allAPIdescriptions = this.configuration.apiDescriptions.concat(apiDescriptions); - this.logger.debug('Parsing API description documents'); - parseContent(allAPIdescriptions, next); - }, - ], (error, apiDescriptions) => { - if (error) { callback(error); return; } + async.waterfall( + [ + (next) => { + this.logger.debug('Reading API description documents') + readLocations(locations, { http: this.configuration.http }, next) + }, + (apiDescriptions, next) => { + const allAPIdescriptions = this.configuration.apiDescriptions.concat( + apiDescriptions, + ) + this.logger.debug('Parsing API description documents') + parseContent(allAPIdescriptions, next) + }, + ], + (error, apiDescriptions) => { + if (error) { + callback(error) + return + } - this.logger.debug('Compiling HTTP transactions from API description documents'); - let apiDescriptionsWithTransactions; - try { - apiDescriptionsWithTransactions = compileTransactions(apiDescriptions); - } catch (compileErr) { - callback(compileErr); - return; - } + this.logger.debug( + 'Compiling HTTP transactions from API description documents', + ) + let apiDescriptionsWithTransactions + try { + apiDescriptionsWithTransactions = compileTransactions(apiDescriptions) + } catch (compileErr) { + callback(compileErr) + return + } - callback(null, apiDescriptionsWithTransactions); - }); + callback(null, apiDescriptionsWithTransactions) + }, + ) } run(callback) { - this.logger.debug('Resolving --require'); + this.logger.debug('Resolving --require') + if (this.configuration.require) { - const requirePath = resolveModule(this.configuration.custom.cwd, this.configuration.require); + const requirePath = resolveModule( + this.configuration.custom.cwd, + this.configuration.require, + ) try { - require(requirePath); // eslint-disable-line global-require, import/no-dynamic-require + require(requirePath) // eslint-disable-line global-require, import/no-dynamic-require } catch (error) { - callback(error, this.stats); - return; + callback(error, this.stats) + return } } - this.logger.debug('Configuring reporters'); - configureReporters(this.configuration, this.stats, this.transactionRunner); + this.logger.debug('Configuring reporters') + configureReporters(this.configuration, this.stats, this.transactionRunner) // FIXME: 'configureReporters()' pollutes the 'stats' object with // this property. Which is unfortunate, as the 'stats' object is // a part of Dredd's public interface. This line cleans it up for now, but // ideally the property wouldn't be needed at all. - delete this.stats.fileBasedReporters; + delete this.stats.fileBasedReporters + + this.logger.debug('Preparing API description documents') - this.logger.debug('Preparing API description documents'); this.prepareAPIdescriptions((error, apiDescriptions) => { - if (error) { callback(error, this.stats); return; } + if (error) { + callback(error, this.stats) + return + } - const loggerInfos = toLoggerInfos(apiDescriptions); + const loggerInfos = toLoggerInfos(apiDescriptions) // FIXME: Winston 3.x supports calling .log() directly with the loggerInfo // object as it's sole argument, but that's not the case with Winston 2.x // Once we upgrade Winston, the line below can be simplified to .log(loggerInfo) // // Watch https://github.com/apiaryio/dredd/issues/1225 for updates - loggerInfos.forEach(({ level, message }) => this.logger.log(level, message)); - if (loggerInfos.find(loggerInfo => loggerInfo.level === 'error')) { - callback(new Error('API description processing error'), this.stats); - return; + loggerInfos.forEach(({ level, message }) => + this.logger.log(level, message), + ) + if (loggerInfos.find((loggerInfo) => loggerInfo.level === 'error')) { + callback(new Error('API description processing error'), this.stats) + return } - this.logger.debug('Starting the transaction runner'); - this.configuration.apiDescriptions = apiDescriptions; - this.transactionRunner.config(this.configuration); - const transactions = toTransactions(apiDescriptions); + this.logger.debug('Starting the transaction runner') + this.configuration.apiDescriptions = apiDescriptions + this.transactionRunner.config(this.configuration) + const transactions = toTransactions(apiDescriptions) + this.transactionRunner.run(transactions, (runError) => { - callback(runError, this.stats); - }); - }); + callback(runError, this.stats) + }) + }) } } - -module.exports = Dredd; +export default Dredd diff --git a/lib/Hooks.js b/lib/Hooks.js index f79d7f3a2..c039ade9d 100644 --- a/lib/Hooks.js +++ b/lib/Hooks.js @@ -1,4 +1,4 @@ -const hooksLog = require('./hooksLog'); +import hooksLog from './hooksLog' // READ THIS! Disclaimer: // Do not add any functionality to this class unless you want to expose it to the Hooks API. @@ -6,71 +6,71 @@ const hooksLog = require('./hooksLog'); class Hooks { constructor(options = {}) { - this.before = this.before.bind(this); - this.beforeValidation = this.beforeValidation.bind(this); - this.after = this.after.bind(this); - this.beforeAll = this.beforeAll.bind(this); - this.afterAll = this.afterAll.bind(this); - this.beforeEach = this.beforeEach.bind(this); - this.beforeEachValidation = this.beforeEachValidation.bind(this); - this.afterEach = this.afterEach.bind(this); - this.log = this.log.bind(this); - ({ logs: this.logs, logger: this.logger } = options); - this.beforeHooks = {}; - this.beforeValidationHooks = {}; - this.afterHooks = {}; - this.beforeAllHooks = []; - this.afterAllHooks = []; - this.beforeEachHooks = []; - this.beforeEachValidationHooks = []; - this.afterEachHooks = []; + this.before = this.before.bind(this) + this.beforeValidation = this.beforeValidation.bind(this) + this.after = this.after.bind(this) + this.beforeAll = this.beforeAll.bind(this) + this.afterAll = this.afterAll.bind(this) + this.beforeEach = this.beforeEach.bind(this) + this.beforeEachValidation = this.beforeEachValidation.bind(this) + this.afterEach = this.afterEach.bind(this) + this.log = this.log.bind(this) + ;({ logs: this.logs, logger: this.logger } = options) + this.beforeHooks = {} + this.beforeValidationHooks = {} + this.afterHooks = {} + this.beforeAllHooks = [] + this.afterAllHooks = [] + this.beforeEachHooks = [] + this.beforeEachValidationHooks = [] + this.afterEachHooks = [] } before(name, hook) { - this.addHook(this.beforeHooks, name, hook); + this.addHook(this.beforeHooks, name, hook) } beforeValidation(name, hook) { - this.addHook(this.beforeValidationHooks, name, hook); + this.addHook(this.beforeValidationHooks, name, hook) } after(name, hook) { - this.addHook(this.afterHooks, name, hook); + this.addHook(this.afterHooks, name, hook) } beforeAll(hook) { - this.beforeAllHooks.push(hook); + this.beforeAllHooks.push(hook) } afterAll(hook) { - this.afterAllHooks.push(hook); + this.afterAllHooks.push(hook) } beforeEach(hook) { - this.beforeEachHooks.push(hook); + this.beforeEachHooks.push(hook) } beforeEachValidation(hook) { - this.beforeEachValidationHooks.push(hook); + this.beforeEachValidationHooks.push(hook) } afterEach(hook) { - this.afterEachHooks.push(hook); + this.afterEachHooks.push(hook) } addHook(hooks, name, hook) { if (hooks[name]) { - hooks[name].push(hook); + hooks[name].push(hook) } else { - hooks[name] = [hook]; + hooks[name] = [hook] } } // log(logVariant, content) // log(content) log(...args) { - this.logs = hooksLog(this.logs, this.logger, ...Array.from(args)); + this.logs = hooksLog(this.logs, this.logger, ...Array.from(args)) } } -module.exports = Hooks; +export default Hooks diff --git a/lib/HooksWorkerClient.js b/lib/HooksWorkerClient.js index c3fd8a9d2..0f2ff0089 100644 --- a/lib/HooksWorkerClient.js +++ b/lib/HooksWorkerClient.js @@ -1,299 +1,354 @@ -const generateUuid = require('uuid/v4'); -const net = require('net'); -const path = require('path'); -const spawnArgs = require('spawn-args'); -const { EventEmitter } = require('events'); +import generateUuid from 'uuid/v4' +import net from 'net' +import path from 'path' +import spawnArgs from 'spawn-args' +import { EventEmitter } from 'events' -const getGoBinary = require('./getGoBinary'); -const logger = require('./logger'); -const which = require('./which'); -const { spawn } = require('./childProcess'); +import getGoBinary from './getGoBinary' +import logger from './logger' +import which from './which' +import { spawn } from './childProcess' class HooksWorkerClient { constructor(runner) { - this.runner = runner; - const options = this.runner.hooks.configuration; - - this.language = options.language; - this.timeout = options['hooks-worker-timeout'] || 5000; - this.connectTimeout = options['hooks-worker-connect-timeout'] || 1500; - this.connectRetry = options['hooks-worker-connect-retry'] || 500; - this.afterConnectWait = options['hooks-worker-after-connect-wait'] || 100; - this.termTimeout = options['hooks-worker-term-timeout'] || 5000; - this.termRetry = options['hooks-worker-term-retry'] || 500; - this.handlerHost = options['hooks-worker-handler-host'] || '127.0.0.1'; - this.handlerPort = options['hooks-worker-handler-port'] || 61321; - this.handlerMessageDelimiter = '\n'; - this.clientConnected = false; - this.connectError = false; - this.emitter = new EventEmitter(); + this.runner = runner + const options = this.runner.hooks.configuration + + this.language = options.language + this.timeout = options['hooks-worker-timeout'] || 5000 + this.connectTimeout = options['hooks-worker-connect-timeout'] || 1500 + this.connectRetry = options['hooks-worker-connect-retry'] || 500 + this.afterConnectWait = options['hooks-worker-after-connect-wait'] || 100 + this.termTimeout = options['hooks-worker-term-timeout'] || 5000 + this.termRetry = options['hooks-worker-term-retry'] || 500 + this.handlerHost = options['hooks-worker-handler-host'] || '127.0.0.1' + this.handlerPort = options['hooks-worker-handler-port'] || 61321 + this.handlerMessageDelimiter = '\n' + this.clientConnected = false + this.connectError = false + this.emitter = new EventEmitter() } start(callback) { - logger.debug('Looking up hooks handler implementation:', this.language); + logger.debug('Looking up hooks handler implementation:', this.language) this.setCommandAndCheckForExecutables((executablesError) => { - if (executablesError) { return callback(executablesError); } + if (executablesError) { + return callback(executablesError) + } - logger.debug('Starting hooks handler.'); + logger.debug('Starting hooks handler.') this.spawnHandler((spawnHandlerError) => { - if (spawnHandlerError) { return callback(spawnHandlerError); } + if (spawnHandlerError) { + return callback(spawnHandlerError) + } - logger.debug('Connecting to hooks handler.'); + logger.debug('Connecting to hooks handler.') this.connectToHandler((connectHandlerError) => { if (connectHandlerError) { - this.terminateHandler(terminateError => callback(connectHandlerError || terminateError)); - return; + this.terminateHandler((terminateError) => + callback(connectHandlerError || terminateError), + ) + return } - logger.debug('Registering hooks.'); + logger.debug('Registering hooks.') this.registerHooks((registerHooksError) => { - if (registerHooksError) { return callback(registerHooksError); } - callback(); - }); - }); - }); - }); + if (registerHooksError) { + return callback(registerHooksError) + } + callback() + }) + }) + }) + }) } stop(callback) { - this.disconnectFromHandler(); - this.terminateHandler(callback); + this.disconnectFromHandler() + this.terminateHandler(callback) } terminateHandler(callback) { - logger.debug('Terminating hooks handler process, PID', this.handler.pid); + logger.debug('Terminating hooks handler process, PID', this.handler.pid) if (this.handler.terminated) { - logger.debug('The hooks handler process has already terminated'); - return callback(); + logger.debug('The hooks handler process has already terminated') + return callback() } - this.handler.terminate({ force: true, timeout: this.termTimeout, retryDelay: this.termRetry }); - this.handler.on('close', () => callback()); + this.handler.terminate({ + force: true, + timeout: this.termTimeout, + retryDelay: this.termRetry, + }) + this.handler.on('close', () => callback()) } disconnectFromHandler() { - this.handlerClient.destroy(); + this.handlerClient.destroy() } setCommandAndCheckForExecutables(callback) { // Select handler based on option, use option string as command if not match anything - let msg; + let msg if (this.language === 'ruby') { - this.handlerCommand = 'dredd-hooks-ruby'; - this.handlerCommandArgs = []; + this.handlerCommand = 'dredd-hooks-ruby' + this.handlerCommandArgs = [] if (!which.which(this.handlerCommand)) { msg = ` Ruby hooks handler command not found: ${this.handlerCommand} Install ruby hooks handler by running: $ gem install dredd_hooks -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } else { - callback(); + callback() } } else if (this.language === 'rust') { - this.handlerCommand = 'dredd-hooks-rust'; - this.handlerCommandArgs = []; + this.handlerCommand = 'dredd-hooks-rust' + this.handlerCommandArgs = [] if (!which.which(this.handlerCommand)) { msg = ` Rust hooks handler command not found: ${this.handlerCommand} Install rust hooks handler by running: $ cargo install dredd-hooks -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } else { - callback(); + callback() } } else if (this.language === 'python') { - this.handlerCommand = 'dredd-hooks-python'; - this.handlerCommandArgs = []; + this.handlerCommand = 'dredd-hooks-python' + this.handlerCommandArgs = [] if (!which.which(this.handlerCommand)) { msg = ` Python hooks handler command not found: ${this.handlerCommand} Install python hooks handler by running: $ pip install dredd_hooks -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } else { - callback(); + callback() } } else if (this.language === 'php') { - this.handlerCommand = 'dredd-hooks-php'; - this.handlerCommandArgs = []; + this.handlerCommand = 'dredd-hooks-php' + this.handlerCommandArgs = [] if (!which.which(this.handlerCommand)) { msg = ` PHP hooks handler command not found: ${this.handlerCommand} Install php hooks handler by running: $ composer require ddelnano/dredd-hooks-php --dev -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } else { - callback(); + callback() } } else if (this.language === 'perl') { - this.handlerCommand = 'dredd-hooks-perl'; - this.handlerCommandArgs = []; + this.handlerCommand = 'dredd-hooks-perl' + this.handlerCommandArgs = [] if (!which.which(this.handlerCommand)) { msg = ` Perl hooks handler command not found: ${this.handlerCommand} Install perl hooks handler by running: $ cpanm Dredd::Hooks -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } else { - callback(); + callback() } } else if (this.language === 'nodejs') { msg = ` Hooks handler should not be used for Node.js. Use Dredd's native Node.js hooks instead. -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } else if (this.language === 'go') { getGoBinary((err, goBin) => { if (err) { - callback(new Error(`Go doesn't seem to be installed: ${err.message}`)); + callback(new Error(`Go doesn't seem to be installed: ${err.message}`)) } else { - this.handlerCommand = path.join(goBin, 'goodman'); - this.handlerCommandArgs = []; + this.handlerCommand = path.join(goBin, 'goodman') + this.handlerCommandArgs = [] if (which.which(this.handlerCommand)) { - callback(); + callback() } else { msg = ` Go hooks handler command not found: ${this.handlerCommand} Install go hooks handler by running: $ go get github.com/snikch/goodman/cmd/goodman -`; - callback(new Error(msg)); +` + callback(new Error(msg)) } } - }); + }) } else { - const parsedArgs = spawnArgs(this.language); - this.handlerCommand = parsedArgs.shift(); - this.handlerCommandArgs = parsedArgs; - - logger.debug(`Using '${this.handlerCommand}' as a hooks handler command, '${this.handlerCommandArgs.join(' ')}' as arguments`); + const parsedArgs = spawnArgs(this.language) + this.handlerCommand = parsedArgs.shift() + this.handlerCommandArgs = parsedArgs + + logger.debug( + `Using '${ + this.handlerCommand + }' as a hooks handler command, '${this.handlerCommandArgs.join( + ' ', + )}' as arguments`, + ) if (!which.which(this.handlerCommand)) { - msg = `Hooks handler command not found: ${this.handlerCommand}`; - callback(new Error(msg)); + msg = `Hooks handler command not found: ${this.handlerCommand}` + callback(new Error(msg)) } else { - callback(); + callback() } } } spawnHandler(callback) { - const pathGlobs = this.runner.hooks.configuration.hookfiles; - const handlerCommandArgs = this.handlerCommandArgs.concat(pathGlobs); - - logger.debug(`Spawning '${this.language}' hooks handler process.`); - this.handler = spawn(this.handlerCommand, handlerCommandArgs); - - this.handler.stdout.on('data', data => logger.debug('Hooks handler stdout:', data.toString())); - this.handler.stderr.on('data', data => logger.debug('Hooks handler stderr:', data.toString())); - - this.handler.on('signalTerm', () => logger.debug('Gracefully terminating the hooks handler process')); - this.handler.on('signalKill', () => logger.debug('Killing the hooks handler process')); + const pathGlobs = this.runner.hooks.configuration.hookfiles + const handlerCommandArgs = this.handlerCommandArgs.concat(pathGlobs) + + logger.debug(`Spawning '${this.language}' hooks handler process.`) + this.handler = spawn(this.handlerCommand, handlerCommandArgs) + + this.handler.stdout.on('data', (data) => + logger.debug('Hooks handler stdout:', data.toString()), + ) + this.handler.stderr.on('data', (data) => + logger.debug('Hooks handler stderr:', data.toString()), + ) + + this.handler.on('signalTerm', () => + logger.debug('Gracefully terminating the hooks handler process'), + ) + this.handler.on('signalKill', () => + logger.debug('Killing the hooks handler process'), + ) this.handler.on('crash', (exitStatus, killed) => { - let msg; + let msg if (killed) { - msg = `Hooks handler process '${this.handlerCommand} ${handlerCommandArgs.join(' ')}' was killed.`; + msg = `Hooks handler process '${ + this.handlerCommand + } ${handlerCommandArgs.join(' ')}' was killed.` } else { - msg = `Hooks handler process '${this.handlerCommand} ${handlerCommandArgs.join(' ')}' exited with status: ${exitStatus}`; + msg = `Hooks handler process '${ + this.handlerCommand + } ${handlerCommandArgs.join(' ')}' exited with status: ${exitStatus}` } - logger.error(msg); - this.runner.hookHandlerError = new Error(msg); - }); + logger.error(msg) + this.runner.hookHandlerError = new Error(msg) + }) this.handler.on('error', (err) => { - this.runner.hookHandlerError = err; - }); - callback(); + this.runner.hookHandlerError = err + }) + callback() } connectToHandler(callback) { - let timeout; - const start = Date.now(); + let timeout + const start = Date.now() const waitForConnect = () => { - if ((Date.now() - start) < this.connectTimeout) { - clearTimeout(timeout); + if (Date.now() - start < this.connectTimeout) { + clearTimeout(timeout) if (this.connectError !== false) { - logger.warn('Error connecting to the hooks handler process. Is the handler running? Retrying.'); - this.connectError = false; + logger.warn( + 'Error connecting to the hooks handler process. Is the handler running? Retrying.', + ) + this.connectError = false } if (this.clientConnected !== true) { - connectAndSetupClient(); - timeout = setTimeout(waitForConnect, this.connectRetry); + connectAndSetupClient() + timeout = setTimeout(waitForConnect, this.connectRetry) } } else { - clearTimeout(timeout); + clearTimeout(timeout) if (!this.clientConnected) { - if (this.handlerClient) { this.handlerClient.destroy(); } - const msg = `Connection timeout ${this.connectTimeout / 1000}s to hooks handler ` - + `on ${this.handlerHost}:${this.handlerPort} exceeded. Try increasing the limit.`; - callback(new Error(msg)); + if (this.handlerClient) { + this.handlerClient.destroy() + } + const msg = + `Connection timeout ${this.connectTimeout / + 1000}s to hooks handler ` + + `on ${this.handlerHost}:${this.handlerPort} exceeded. Try increasing the limit.` + callback(new Error(msg)) } } - }; + } const connectAndSetupClient = () => { - logger.debug('Starting TCP connection with hooks handler process.'); + logger.debug('Starting TCP connection with hooks handler process.') if (this.runner.hookHandlerError) { - callback(this.runner.hookHandlerError); + callback(this.runner.hookHandlerError) } - this.handlerClient = net.connect({ port: this.handlerPort, host: this.handlerHost }); + this.handlerClient = net.connect({ + port: this.handlerPort, + host: this.handlerHost, + }) this.handlerClient.on('connect', () => { - logger.debug(`Successfully connected to hooks handler. Waiting ${this.afterConnectWait / 1000}s to start testing.`); - this.clientConnected = true; - clearTimeout(timeout); - setTimeout(callback, this.afterConnectWait); - }); - - this.handlerClient.on('close', () => logger.debug('TCP communication with hooks handler closed.')); + logger.debug( + `Successfully connected to hooks handler. Waiting ${this + .afterConnectWait / 1000}s to start testing.`, + ) + this.clientConnected = true + clearTimeout(timeout) + setTimeout(callback, this.afterConnectWait) + }) + + this.handlerClient.on('close', () => + logger.debug('TCP communication with hooks handler closed.'), + ) this.handlerClient.on('error', (connectError) => { - logger.debug('TCP communication with hooks handler errored.', connectError); - this.connectError = connectError; - }); + logger.debug( + 'TCP communication with hooks handler errored.', + connectError, + ) + this.connectError = connectError + }) - let handlerBuffer = ''; + let handlerBuffer = '' this.handlerClient.on('data', (data) => { - logger.debug('Dredd received some data from hooks handler.'); + logger.debug('Dredd received some data from hooks handler.') - handlerBuffer += data.toString(); + handlerBuffer += data.toString() if (data.toString().indexOf(this.handlerMessageDelimiter) > -1) { - const splittedData = handlerBuffer.split(this.handlerMessageDelimiter); + const splittedData = handlerBuffer.split(this.handlerMessageDelimiter) // Add last chunk to the buffer - handlerBuffer = splittedData.pop(); + handlerBuffer = splittedData.pop() - const messages = []; + const messages = [] for (const message of splittedData) { - messages.push(JSON.parse(message)); + messages.push(JSON.parse(message)) } - const result = []; + const result = [] for (const message of messages) { if (message.uuid) { - logger.debug('Dredd received a valid message from hooks handler:', message.uuid); - result.push(this.emitter.emit(message.uuid, message)); + logger.debug( + 'Dredd received a valid message from hooks handler:', + message.uuid, + ) + result.push(this.emitter.emit(message.uuid, message)) } else { - result.push(logger.debug('UUID not present in hooks handler message, ignoring:', JSON.stringify(message, null, 2))); + result.push( + logger.debug( + 'UUID not present in hooks handler message, ignoring:', + JSON.stringify(message, null, 2), + ), + ) } } - return result; + return result } - }); - }; + }) + } - timeout = setTimeout(waitForConnect, this.connectRetry); + timeout = setTimeout(waitForConnect, this.connectRetry) } registerHooks(callback) { @@ -303,28 +358,28 @@ $ go get github.com/snikch/goodman/cmd/goodman 'afterEach', 'beforeAll', 'afterAll', - ]; + ] for (const eventName of eachHookNames) { this.runner.hooks[eventName]((data, hookCallback) => { - const uuid = generateUuid(); + const uuid = generateUuid() // Send transaction to the handler const message = { event: eventName, uuid, data, - }; + } - logger.debug('Sending HTTP transaction data to hooks handler:', uuid); - this.handlerClient.write(JSON.stringify(message)); - this.handlerClient.write(this.handlerMessageDelimiter); + logger.debug('Sending HTTP transaction data to hooks handler:', uuid) + this.handlerClient.write(JSON.stringify(message)) + this.handlerClient.write(this.handlerMessageDelimiter) // Register event for the sent transaction function messageHandler(receivedMessage) { - let value; - logger.debug('Handling hook:', uuid); - clearTimeout(timeout); + let value + logger.debug('Handling hook:', uuid) + clearTimeout(timeout) // We are directly modifying the `data` argument here. Neither direct // assignment (`data = receivedMessage.data`) nor `clone()` will work... @@ -332,59 +387,60 @@ $ go get github.com/snikch/goodman/cmd/goodman // *All hooks receive array of transactions if (eventName.indexOf('All') > -1) { for (let index = 0; index < receivedMessage.data.length; index++) { - value = receivedMessage.data[index]; - data[index] = value; + value = receivedMessage.data[index] + data[index] = value } - // *Each hook receives single transaction + // *Each hook receives single transaction } else { for (const key of Object.keys(receivedMessage.data || {})) { - value = receivedMessage.data[key]; - data[key] = value; + value = receivedMessage.data[key] + data[key] = value } } - hookCallback(); + hookCallback() } const handleTimeout = () => { - logger.warn('Hook handling timed out.'); + logger.warn('Hook handling timed out.') if (eventName.indexOf('All') === -1) { - data.fail = 'Hook timed out.'; + data.fail = 'Hook timed out.' } - this.emitter.removeListener(uuid, messageHandler); + this.emitter.removeListener(uuid, messageHandler) - hookCallback(); - }; + hookCallback() + } // Set timeout for the hook - let timeout = setTimeout(handleTimeout, this.timeout); + let timeout = setTimeout(handleTimeout, this.timeout) - this.emitter.on(uuid, messageHandler); - }); + this.emitter.on(uuid, messageHandler) + }) } this.runner.hooks.afterAll((transactions, hookCallback) => { // This is needed for transaction modification integration tests: // https://github.com/apiaryio/dredd-hooks-template/blob/master/features/execution_order.feature if (process.env.TEST_DREDD_HOOKS_HANDLER_ORDER === 'true') { - console.error('FOR TESTING ONLY'); - const modifications = (transactions[0] && transactions[0].hooks_modifications) || []; + console.error('FOR TESTING ONLY') + const modifications = + (transactions[0] && transactions[0].hooks_modifications) || [] if (!modifications.length) { - throw new Error('Hooks must modify transaction.hooks_modifications'); + throw new Error('Hooks must modify transaction.hooks_modifications') } for (let index = 0; index < modifications.length; index++) { - const modification = modifications[index]; - console.error(`${index} ${modification}`); + const modification = modifications[index] + console.error(`${index} ${modification}`) } - console.error('FOR TESTING ONLY'); + console.error('FOR TESTING ONLY') } - this.stop(hookCallback); - }); + this.stop(hookCallback) + }) - callback(); + callback() } } -module.exports = HooksWorkerClient; +export default HooksWorkerClient diff --git a/lib/TransactionRunner.js b/lib/TransactionRunner.js index 4a8c8e500..f2329fb43 100644 --- a/lib/TransactionRunner.js +++ b/lib/TransactionRunner.js @@ -1,16 +1,15 @@ -const async = require('async'); -const chai = require('chai'); -const gavel = require('gavel'); -const os = require('os'); -const url = require('url'); - -const addHooks = require('./addHooks'); -const logger = require('./logger'); -const reporterOutputLogger = require('./reporters/reporterOutputLogger'); -const packageData = require('../package.json'); -const sortTransactions = require('./sortTransactions'); -const performRequest = require('./performRequest'); - +import async from 'async'; +import chai from 'chai'; +import gavel from 'gavel'; +import os from 'os'; +import url from 'url'; + +import addHooks from './addHooks'; +import logger from './logger'; +import reporterOutputLogger from './reporters/reporterOutputLogger'; +import packageData from '../package.json'; +import sortTransactions from './sortTransactions'; +import performRequest from './performRequest'; function headersArrayToObject(arr) { return Array.from(arr).reduce((result, currentItem) => { @@ -20,10 +19,11 @@ function headersArrayToObject(arr) { } function eventCallback(reporterError) { - if (reporterError) { logger.error(reporterError.message); } + if (reporterError) { + logger.error(reporterError.message); + } } - class TransactionRunner { constructor(configuration) { this.configureTransaction = this.configureTransaction.bind(this); @@ -43,25 +43,39 @@ class TransactionRunner { run(transactions, callback) { logger.debug('Starting reporters and waiting until all of them are ready'); this.emitStart((emitStartErr) => { - if (emitStartErr) { return callback(emitStartErr); } + if (emitStartErr) { + return callback(emitStartErr); + } logger.debug('Sorting HTTP transactions'); - transactions = this.configuration.sorted ? sortTransactions(transactions) : transactions; + transactions = this.configuration.sorted + ? sortTransactions(transactions) + : transactions; logger.debug('Configuring HTTP transactions'); transactions = transactions.map(this.configureTransaction.bind(this)); logger.debug('Reading hook files and registering hooks'); addHooks(this, transactions, (addHooksError) => { - if (addHooksError) { return callback(addHooksError); } + if (addHooksError) { + return callback(addHooksError); + } logger.debug('Executing HTTP transactions'); - this.executeAllTransactions(transactions, this.hooks, (execAllTransErr) => { - if (execAllTransErr) { return callback(execAllTransErr); } + this.executeAllTransactions( + transactions, + this.hooks, + (execAllTransErr) => { + if (execAllTransErr) { + return callback(execAllTransErr); + } - logger.debug('Wrapping up testing and waiting until all reporters are done'); - this.emitEnd(callback); - }); + logger.debug( + 'Wrapping up testing and waiting until all reporters are done' + ); + this.emitEnd(callback); + } + ); }); }); } @@ -72,13 +86,21 @@ class TransactionRunner { // When event 'start' is emitted, function in callback is executed for each // reporter registered by listeners - this.configuration.emitter.emit('start', this.configuration.apiDescriptions, (reporterError) => { - if (reporterError) { logger.error(reporterError.message); } + this.configuration.emitter.emit( + 'start', + this.configuration.apiDescriptions, + (reporterError) => { + if (reporterError) { + logger.error(reporterError.message); + } - // Last called reporter callback function starts the runner - reporterCount--; - if (reporterCount === 0) { callback(); } - }); + // Last called reporter callback function starts the runner + reporterCount--; + if (reporterCount === 0) { + callback(); + } + } + ); } executeAllTransactions(transactions, hooks, callback) { @@ -96,63 +118,103 @@ class TransactionRunner { } // End of warning - if (this.hookHandlerError) { return callback(this.hookHandlerError); } + if (this.hookHandlerError) { + return callback(this.hookHandlerError); + } - logger.debug('Running \'beforeAll\' hooks'); + logger.debug("Running 'beforeAll' hooks"); this.runHooksForData(hooks.beforeAllHooks, transactions, () => { - if (this.hookHandlerError) { return callback(this.hookHandlerError); } + if (this.hookHandlerError) { + return callback(this.hookHandlerError); + } // Iterate over transactions' transaction // Because async changes the way referencing of properties work, // we need to work with indexes (keys) here, no other way of access. - return async.timesSeries(transactions.length, (transactionIndex, iterationCallback) => { - transaction = transactions[transactionIndex]; - logger.debug(`Processing transaction #${transactionIndex + 1}:`, transaction.name); - - logger.debug('Running \'beforeEach\' hooks'); - this.runHooksForData(hooks.beforeEachHooks, transaction, () => { - if (this.hookHandlerError) { return iterationCallback(this.hookHandlerError); } - - logger.debug('Running \'before\' hooks'); - this.runHooksForData(hooks.beforeHooks[transaction.name], transaction, () => { - if (this.hookHandlerError) { return iterationCallback(this.hookHandlerError); } - - // This method: - // - skips and fails based on hooks or options - // - executes a request - // - recieves a response - // - runs beforeEachValidation hooks - // - runs beforeValidation hooks - // - runs Gavel validation - this.executeTransaction(transaction, hooks, () => { - if (this.hookHandlerError) { return iterationCallback(this.hookHandlerError); } - - logger.debug('Running \'afterEach\' hooks'); - this.runHooksForData(hooks.afterEachHooks, transaction, () => { - if (this.hookHandlerError) { return iterationCallback(this.hookHandlerError); } - - logger.debug('Running \'after\' hooks'); - this.runHooksForData(hooks.afterHooks[transaction.name], transaction, () => { - if (this.hookHandlerError) { return iterationCallback(this.hookHandlerError); } - - logger.debug(`Evaluating results of transaction execution #${transactionIndex + 1}:`, transaction.name); - this.emitResult(transaction, iterationCallback); + return async.timesSeries( + transactions.length, + (transactionIndex, iterationCallback) => { + transaction = transactions[transactionIndex]; + logger.debug( + `Processing transaction #${transactionIndex + 1}:`, + transaction.name + ); + + logger.debug("Running 'beforeEach' hooks"); + this.runHooksForData(hooks.beforeEachHooks, transaction, () => { + if (this.hookHandlerError) { + return iterationCallback(this.hookHandlerError); + } + + logger.debug("Running 'before' hooks"); + this.runHooksForData( + hooks.beforeHooks[transaction.name], + transaction, + () => { + if (this.hookHandlerError) { + return iterationCallback(this.hookHandlerError); + } + + // This method: + // - skips and fails based on hooks or options + // - executes a request + // - recieves a response + // - runs beforeEachValidation hooks + // - runs beforeValidation hooks + // - runs Gavel validation + this.executeTransaction(transaction, hooks, () => { + if (this.hookHandlerError) { + return iterationCallback(this.hookHandlerError); + } + + logger.debug("Running 'afterEach' hooks"); + this.runHooksForData( + hooks.afterEachHooks, + transaction, + () => { + if (this.hookHandlerError) { + return iterationCallback(this.hookHandlerError); + } + + logger.debug("Running 'after' hooks"); + this.runHooksForData( + hooks.afterHooks[transaction.name], + transaction, + () => { + if (this.hookHandlerError) { + return iterationCallback(this.hookHandlerError); + } + + logger.debug( + `Evaluating results of transaction execution #${transactionIndex + + 1}:`, + transaction.name + ); + this.emitResult(transaction, iterationCallback); + } + ); + } + ); }); - }); - }); + } + ); }); - }); - }, - (iterationError) => { - if (iterationError) { return callback(iterationError); } - - logger.debug('Running \'afterAll\' hooks'); - this.runHooksForData(hooks.afterAllHooks, transactions, () => { - if (this.hookHandlerError) { return callback(this.hookHandlerError); } - callback(); - }); - }); + }, + (iterationError) => { + if (iterationError) { + return callback(iterationError); + } + + logger.debug("Running 'afterAll' hooks"); + this.runHooksForData(hooks.afterAllHooks, transactions, () => { + if (this.hookHandlerError) { + return callback(this.hookHandlerError); + } + callback(); + }); + } + ); }); } @@ -179,7 +241,12 @@ class TransactionRunner { // all the flow can be executed twice. We need to reimplement this. if (error instanceof chai.AssertionError) { const transactions = Array.isArray(data) ? data : [data]; - for (const transaction of transactions) { this.failTransaction(transaction, `Failed assertion in hooks: ${error.message}`); } + for (const transaction of transactions) { + this.failTransaction( + transaction, + `Failed assertion in hooks: ${error.message}` + ); + } } else { logger.debug('Hook errored:', error); this.emitHookError(error, data); @@ -201,7 +268,9 @@ class TransactionRunner { // function. That probably isn't correct and should be fixed eventually // (beware, tests count with the current behavior). emitHookError(error, data) { - if (!(error instanceof Error)) { error = new Error(error); } + if (!(error instanceof Error)) { + error = new Error(error); + } const test = this.createTest(data); test.request = data.request; this.emitError(error, test); @@ -223,14 +292,16 @@ class TransactionRunner { const { origin, request, response } = transaction; // Parse the server URL (just once, caching it in @parsedUrl) - if (!this.parsedUrl) { this.parsedUrl = this.parseServerUrl(configuration.endpoint); } + if (!this.parsedUrl) { + this.parsedUrl = this.parseServerUrl(configuration.endpoint); + } const fullPath = this.getFullPath(this.parsedUrl.path, request.uri); const headers = headersArrayToObject(request.headers); // Add Dredd User-Agent (if no User-Agent is already present) const hasUserAgent = Object.keys(headers) - .map(name => name.toLowerCase()) + .map((name) => name.toLowerCase()) .includes('user-agent'); if (!hasUserAgent) { const system = `${os.type()} ${os.release()}; ${os.arch()}`; @@ -251,22 +322,34 @@ class TransactionRunner { // The data models as used here must conform to Gavel.js // as defined in `http-response.coffee` const expected = { headers: headersArrayToObject(response.headers) }; - if (response.body) { expected.body = response.body; } - if (response.status) { expected.statusCode = response.status; } - if (response.schema) { expected.bodySchema = response.schema; } + if (response.body) { + expected.body = response.body; + } + if (response.status) { + expected.statusCode = response.status; + } + if (response.schema) { + expected.bodySchema = response.schema; + } // Backward compatible transaction name hack. Transaction names will be // replaced by Canonical Transaction Paths: https://github.com/apiaryio/dredd/issues/227 if (!this.multiBlueprint) { - transaction.name = transaction.name.replace(`${transaction.origin.apiName} > `, ''); + transaction.name = transaction.name.replace( + `${transaction.origin.apiName} > `, + '' + ); } // Transaction skipping (can be modified in hooks). If the input format // is OpenAPI 2, non-2xx transactions should be skipped by default. let skip = false; - if (transaction.apiDescription && transaction.apiDescription.mediaType.includes('swagger')) { + if ( + transaction.apiDescription && + transaction.apiDescription.mediaType.includes('swagger') + ) { const status = parseInt(response.status, 10); - if ((status < 200) || (status >= 300)) { + if (status < 200 || status >= 300) { skip = true; } } @@ -282,7 +365,7 @@ class TransactionRunner { origin, fullPath, protocol: this.parsedUrl.protocol, - skip, + skip }; return configuredTransaction; @@ -298,8 +381,12 @@ class TransactionRunner { } getFullPath(serverPath, requestPath) { - if (serverPath === '/') { return requestPath; } - if (!requestPath) { return serverPath; } + if (serverPath === '/') { + return requestPath; + } + if (!requestPath) { + return serverPath; + } // Join two paths // @@ -313,10 +400,13 @@ class TransactionRunner { // undesirable behavior depending on slashes. // See also https://github.com/joyent/node/issues/2216 let segments = [serverPath, requestPath]; - segments = (Array.from(segments).map(segment => segment.replace(/^\/|\/$/g, ''))); + segments = Array.from(segments).map((segment) => + segment.replace(/^\/|\/$/g, '') + ); // Keep trailing slash at the end if specified in requestPath // and if requestPath isn't only '/' - const trailingSlash = (requestPath !== '/') && (requestPath.slice(-1) === '/') ? '/' : ''; + const trailingSlash = + requestPath !== '/' && requestPath.slice(-1) === '/' ? '/' : ''; return `/${segments.join('/')}${trailingSlash}`; } @@ -328,7 +418,7 @@ class TransactionRunner { message: transaction.name, origin: transaction.origin, startedAt: transaction.startedAt, - errors: transaction.errors, + errors: transaction.errors }; } @@ -351,11 +441,17 @@ class TransactionRunner { transaction.fail = true; this.ensureTransactionErrors(transaction); - if (reason) { transaction.errors.push({ severity: 'error', message: reason }); } + if (reason) { + transaction.errors.push({ severity: 'error', message: reason }); + } - if (!transaction.test) { transaction.test = this.createTest(transaction); } + if (!transaction.test) { + transaction.test = this.createTest(transaction); + } transaction.test.status = 'fail'; - if (reason) { transaction.test.message = reason; } + if (reason) { + transaction.test.message = reason; + } this.ensureTestStructure(transaction); } @@ -366,13 +462,17 @@ class TransactionRunner { transaction.skip = true; this.ensureTransactionErrors(transaction); - if (reason) { transaction.errors.push({ severity: 'warning', message: reason }); } + if (reason) { + transaction.errors.push({ severity: 'warning', message: reason }); + } if (!transaction.test) { transaction.test = this.createTest(transaction); } transaction.test.status = 'skip'; - if (reason) { transaction.test.message = reason; } + if (reason) { + transaction.test.message = reason; + } this.ensureTestStructure(transaction); } @@ -380,8 +480,12 @@ class TransactionRunner { // Ensures that given transaction object has the "errors" key // where custom test run errors (not validation errors) are stored. ensureTransactionErrors(transaction) { - if (!transaction.results) { transaction.results = {}; } - if (!transaction.errors) { transaction.errors = []; } + if (!transaction.results) { + transaction.results = {}; + } + if (!transaction.errors) { + transaction.errors = []; + } return transaction.errors; } @@ -390,31 +494,54 @@ class TransactionRunner { // according to the test's status emitResult(transaction, callback) { if (this.error || !transaction.test) { - logger.debug('No emission of test data to reporters', this.error, transaction.test); + logger.debug( + 'No emission of test data to reporters', + this.error, + transaction.test + ); this.error = null; // Reset the error indicator return callback(); } if (transaction.skip) { logger.debug('Emitting to reporters: test skip'); - this.configuration.emitter.emit('test skip', transaction.test, eventCallback); + this.configuration.emitter.emit( + 'test skip', + transaction.test, + eventCallback + ); return callback(); } if (transaction.test.valid) { if (transaction.fail) { - this.failTransaction(transaction, `Failed in after hook: ${transaction.fail}`); + this.failTransaction( + transaction, + `Failed in after hook: ${transaction.fail}` + ); logger.debug('Emitting to reporters: test fail'); - this.configuration.emitter.emit('test fail', transaction.test, eventCallback); + this.configuration.emitter.emit( + 'test fail', + transaction.test, + eventCallback + ); } else { logger.debug('Emitting to reporters: test pass'); - this.configuration.emitter.emit('test pass', transaction.test, eventCallback); + this.configuration.emitter.emit( + 'test pass', + transaction.test, + eventCallback + ); } return callback(); } logger.debug('Emitting to reporters: test fail'); - this.configuration.emitter.emit('test fail', transaction.test, eventCallback); + this.configuration.emitter.emit( + 'test fail', + transaction.test, + eventCallback + ); callback(); } @@ -431,7 +558,9 @@ class TransactionRunner { // This is actually doing more some pre-flight and conditional skipping of // the transcation based on the configuration or hooks. TODO rename executeTransaction(transaction, hooks, callback) { - if (!callback) { [callback, hooks] = Array.from([hooks, undefined]); } + if (!callback) { + [callback, hooks] = Array.from([hooks, undefined]); + } // Number in miliseconds (UNIX-like timestamp * 1000 precision) transaction.startedAt = Date.now(); @@ -443,35 +572,57 @@ class TransactionRunner { this.ensureTransactionErrors(transaction); if (transaction.skip) { - logger.debug('HTTP transaction was marked in hooks as to be skipped. Skipping'); + logger.debug( + 'HTTP transaction was marked in hooks as to be skipped. Skipping' + ); transaction.test = test; this.skipTransaction(transaction, 'Skipped in before hook'); return callback(); - } if (transaction.fail) { - logger.debug('HTTP transaction was marked in hooks as to be failed. Reporting as failed'); + } + if (transaction.fail) { + logger.debug( + 'HTTP transaction was marked in hooks as to be failed. Reporting as failed' + ); transaction.test = test; - this.failTransaction(transaction, `Failed in before hook: ${transaction.fail}`); + this.failTransaction( + transaction, + `Failed in before hook: ${transaction.fail}` + ); return callback(); - } if (this.configuration['dry-run']) { + } + if (this.configuration['dry-run']) { reporterOutputLogger.info('Dry run. Not performing HTTP request'); transaction.test = test; this.skipTransaction(transaction); return callback(); - } if (this.configuration.names) { + } + if (this.configuration.names) { reporterOutputLogger.info(transaction.name); transaction.test = test; this.skipTransaction(transaction); return callback(); - } if ((this.configuration.method.length > 0) && !(Array.from(this.configuration.method).includes(transaction.request.method))) { + } + if ( + this.configuration.method.length > 0 && + !Array.from(this.configuration.method).includes( + transaction.request.method + ) + ) { logger.debug(`\ -Only ${(Array.from(this.configuration.method).map(m => m.toUpperCase())).join(', ')}\ +Only ${Array.from(this.configuration.method) + .map((m) => m.toUpperCase()) + .join(', ')}\ requests are set to be executed. \ Not performing HTTP ${transaction.request.method.toUpperCase()} request.\ `); transaction.test = test; this.skipTransaction(transaction); return callback(); - } if ((this.configuration.only.length > 0) && !(Array.from(this.configuration.only).includes(transaction.name))) { + } + if ( + this.configuration.only.length > 0 && + !Array.from(this.configuration.only).includes(transaction.name) + ) { logger.debug(`\ Only '${this.configuration.only}' transaction is set to be executed. \ Not performing HTTP request for '${transaction.name}'.\ @@ -486,11 +637,12 @@ Not performing HTTP request for '${transaction.name}'.\ // An actual HTTP request, before validation hooks triggering // and the response validation is invoked here performRequestAndValidate(test, transaction, hooks, callback) { - const uri = url.format({ - protocol: transaction.protocol, - hostname: transaction.host, - port: transaction.port, - }) + transaction.fullPath; + const uri = + url.format({ + protocol: transaction.protocol, + hostname: transaction.host, + port: transaction.port + }) + transaction.fullPath; const options = { http: this.configuration.http }; performRequest(uri, transaction.request, options, (error, real) => { @@ -504,21 +656,32 @@ Not performing HTTP request for '${transaction.name}'.\ } transaction.real = real; - logger.debug('Running \'beforeEachValidation\' hooks'); - this.runHooksForData(hooks && hooks.beforeEachValidationHooks, transaction, () => { - if (this.hookHandlerError) { return callback(this.hookHandlerError); } + logger.debug("Running 'beforeEachValidation' hooks"); + this.runHooksForData( + hooks && hooks.beforeEachValidationHooks, + transaction, + () => { + if (this.hookHandlerError) { + return callback(this.hookHandlerError); + } - logger.debug('Running \'beforeValidation\' hooks'); - this.runHooksForData(hooks && hooks.beforeValidationHooks[transaction.name], transaction, () => { - if (this.hookHandlerError) { return callback(this.hookHandlerError); } + logger.debug("Running 'beforeValidation' hooks"); + this.runHooksForData( + hooks && hooks.beforeValidationHooks[transaction.name], + transaction, + () => { + if (this.hookHandlerError) { + return callback(this.hookHandlerError); + } - this.validateTransaction(test, transaction, callback); - }); - }); + this.validateTransaction(test, transaction, callback); + } + ); + } + ); }); } - // TODO Rewrite this entire method. // Motivations: // 1. Mutations at place. @@ -560,8 +723,11 @@ Not performing HTTP request for '${transaction.name}'.\ const isActualResponseStatusCodeEmpty = ['204', '205'].includes( test.actual.statusCode ? test.actual.statusCode.toString() : undefined ); - const hasBody = (test.expected.body || test.actual.body); - if ((isExpectedResponseStatusCodeEmpty || isActualResponseStatusCodeEmpty) && hasBody) { + const hasBody = test.expected.body || test.actual.body; + if ( + (isExpectedResponseStatusCodeEmpty || isActualResponseStatusCodeEmpty) && + hasBody + ) { logger.warn(`\ ${test.title} HTTP 204 and 205 responses must not \ include a message body: https://tools.ietf.org/html/rfc7231#section-6.3\ @@ -573,8 +739,9 @@ include a message body: https://tools.ietf.org/html/rfc7231#section-6.3\ // Order-sensitive list of Gavel validation fields to output in the log // Note that Dredd asserts EXACTLY this order. Make sure to adjust tests upon change. - const loggedFields = ['headers', 'body', 'statusCode'] - .filter(fieldName => Object.prototype.hasOwnProperty.call(gavelResult.fields, fieldName)); + const loggedFields = ['headers', 'body', 'statusCode'].filter((fieldName) => + Object.prototype.hasOwnProperty.call(gavelResult.fields, fieldName) + ); loggedFields.forEach((fieldName) => { const fieldResult = gavelResult.fields[fieldName]; @@ -601,9 +768,11 @@ include a message body: https://tools.ietf.org/html/rfc7231#section-6.3\ let reporterCount = this.configuration.emitter.listeners('end').length; this.configuration.emitter.emit('end', () => { reporterCount--; - if (reporterCount === 0) { callback(); } + if (reporterCount === 0) { + callback(); + } }); } } -module.exports = TransactionRunner; +export default TransactionRunner; diff --git a/lib/__general.ts b/lib/__general.ts new file mode 100644 index 000000000..e64663f34 --- /dev/null +++ b/lib/__general.ts @@ -0,0 +1,82 @@ +export enum RESTMethod { + CONNECT = 'CONNECT', + OPTIONS = 'OPTIONS', + POST = 'POST', + GET = 'GET', + HEAD = 'HEAD', + PUT = 'PUT', + PATCH = 'PATCH', + DELETE = 'DELETE', + TRACE = 'TRACE' +} + +export enum BodyEncoding { + 'utf-8', + 'base64' +} + +export enum TransactionTestStatus { + 'pass', + 'fail', + 'skip' +} + +export interface Transaction { + id: string + name: string + origin: TransactionOrigin + host: string + port: number + protocol: 'http:' | 'https:' + fullPath: string + request: TransactionRequest + expected: { + statusCode: number + headers: Record + body: string + bodySchema: Record + } + real: { + statusCode: string + headers: Record + body: string + bodyEncoding: BodyEncoding + } + skip: boolean + fail: boolean + + test: TransactionTest +} + +export interface TransactionRequest { + method: RESTMethod + url: string + body?: string + bodyEncoding?: BodyEncoding + headers?: Record +} + +export interface TransactionOrigin { + filename: string + apiName: string + resourceGroupName: string + resourceName: string + actionName: string + exampleName: string +} + +export interface TransactionTest { + start: Date + end: Date + duration: number + startedAt: number + title: string + request: TransactionRequest + actual: any + expected: any + status: TransactionTestStatus + message: string + results: any + valid: boolean + origin: TransactionOrigin +} diff --git a/lib/addHooks.js b/lib/addHooks.js deleted file mode 100644 index 3b6069c72..000000000 --- a/lib/addHooks.js +++ /dev/null @@ -1,79 +0,0 @@ -const clone = require('clone'); -const proxyquire = require('proxyquire').noCallThru(); - -const Hooks = require('./Hooks'); -const HooksWorkerClient = require('./HooksWorkerClient'); -const logger = require('./logger'); -const reporterOutputLogger = require('./reporters/reporterOutputLogger'); -const resolvePaths = require('./resolvePaths'); - - -// The 'addHooks()' function is a strange glue code responsible for various -// side effects needed as a preparation for loading Node.js hooks. It is -// asynchronous only because as the last thing, it spawns the hooks handler -// process if it figures out the hooks are not JavaScript hooks. -// -// In the future we should get rid of this code. Hooks should get a nice, -// separate logical component, which takes care of their loading and running -// regardless the language used, and either delegates to the hooks handler -// or not. Side effects should get eliminated as much as possible in favor -// of decoupling. - - -function loadHookFile(hookfile, hooks) { - try { - proxyquire(hookfile, { hooks }); - } catch (error) { - logger.warn(`Skipping hook loading. Error reading hook file '${hookfile}'. ` - + 'This probably means one or more of your hook files are invalid.\n' - + `Message: ${error.message}\n` - + `Stack: \n${error.stack}\n`); - } -} - - -module.exports = function addHooks(runner, transactions, callback) { - if (!runner.logs) { runner.logs = []; } - runner.hooks = new Hooks({ logs: runner.logs, logger: reporterOutputLogger }); - - if (!runner.hooks.transactions) { runner.hooks.transactions = {}; } - - Array.from(transactions).forEach((transaction) => { - runner.hooks.transactions[transaction.name] = transaction; - }); - - // No hooks - if (!runner.configuration.hookfiles || !runner.configuration.hookfiles.length) { - return callback(); - } - - // Loading hookfiles from fs - let hookfiles; - try { - hookfiles = resolvePaths(runner.configuration.custom.cwd, runner.configuration.hookfiles); - } catch (err) { - return callback(err); - } - logger.debug('Found Hookfiles:', hookfiles); - - // Override hookfiles option in configuration object with - // sorted and resolved files - runner.configuration.hookfiles = hookfiles; - - // Clone the configuration object to hooks.configuration to make it - // accessible in the node.js hooks API - runner.hooks.configuration = clone(runner.configuration); - - // If the language is empty or it is nodejs - if ( - !runner.configuration.language - || runner.configuration.language === 'nodejs' - ) { - hookfiles.forEach(hookfile => loadHookFile(hookfile, runner.hooks)); - return callback(); - } - - // If other language than nodejs, run hooks worker client - // Worker client will start the worker server and pass the "hookfiles" options as CLI arguments to it - return (new HooksWorkerClient(runner)).start(callback); -}; diff --git a/lib/addHooks.ts b/lib/addHooks.ts new file mode 100644 index 000000000..4bc018399 --- /dev/null +++ b/lib/addHooks.ts @@ -0,0 +1,96 @@ +import clone from 'clone' +import { noCallThru } from 'proxyquire' + +import Hooks from './Hooks' +import HooksWorkerClient from './HooksWorkerClient' +import logger from './logger' +import reporterOutputLogger from './reporters/reporterOutputLogger' +import resolvePaths from './resolvePaths' + +import { Transaction } from './__general' + +const proxyquire = noCallThru() + +// The 'addHooks()' function is a strange glue code responsible for various +// side effects needed as a preparation for loading Node.js hooks. It is +// asynchronous only because as the last thing, it spawns the hooks handler +// process if it figures out the hooks are not JavaScript hooks. +// +// In the future we should get rid of this code. Hooks should get a nice, +// separate logical component, which takes care of their loading and running +// regardless the language used, and either delegates to the hooks handler +// or not. Side effects should get eliminated as much as possible in favor +// of decoupling. + +function loadHookFile(hookfile: string, hooks: any) { + try { + proxyquire(hookfile, { hooks }) + } catch (error) { + logger.warn( + `Skipping hook loading. Error reading hook file '${hookfile}'. ` + + 'This probably means one or more of your hook files are invalid.\n' + + `Message: ${error.message}\n` + + `Stack: \n${error.stack}\n`, + ) + } +} + +export default function addHooks( + runner: any, + transactions: Transaction[], + callback: (error?: Error) => void, +) { + if (!runner.logs) { + runner.logs = [] + } + runner.hooks = new Hooks({ logs: runner.logs, logger: reporterOutputLogger }) + + if (!runner.hooks.transactions) { + runner.hooks.transactions = {} + } + + Array.from(transactions).forEach((transaction) => { + runner.hooks.transactions[transaction.name] = transaction + }) + + // No hooks + if ( + !runner.configuration.hookfiles || + !runner.configuration.hookfiles.length + ) { + return callback() + } + + // Loading hookfiles from fs + let hookfiles + try { + hookfiles = resolvePaths( + runner.configuration.custom.cwd, + runner.configuration.hookfiles, + ) + } catch (err) { + return callback(err) + } + logger.debug('Found Hookfiles:', hookfiles) + + // Override hookfiles option in configuration object with + // sorted and resolved files + runner.configuration.hookfiles = hookfiles + + // Clone the configuration object to hooks.configuration to make it + // accessible in the node.js hooks API + runner.hooks.configuration = clone(runner.configuration) + + // If the language is empty or it is nodejs + if ( + !runner.configuration.language || + runner.configuration.language === 'nodejs' + ) { + hookfiles.forEach((hookfile) => loadHookFile(hookfile, runner.hooks)) + return callback() + } + + // If other language than nodejs, run hooks worker client + // Worker client will start the worker server and pass the "hookfiles" options as CLI arguments to it + return new HooksWorkerClient(runner).start(callback) +} diff --git a/lib/annotationToLoggerInfo.js b/lib/annotationToLoggerInfo.js index 048ecf02f..e5cebf0b1 100644 --- a/lib/annotationToLoggerInfo.js +++ b/lib/annotationToLoggerInfo.js @@ -1,18 +1,16 @@ -const compileTransactionName = require('./compileTransactionName'); - +import compileTransactionName from './compileTransactionName' /** * Turns annotation type into a log level */ function typeToLogLevel(annotationType) { - const level = { error: 'error', warning: 'warn' }[annotationType]; + const level = { error: 'error', warning: 'warn' }[annotationType] if (!level) { - throw new Error(`Invalid annotation type: '${annotationType}'`); + throw new Error(`Invalid annotation type: '${annotationType}'`) } - return level; + return level } - /** * Takes a component identifier and turns it into something user can understand * @@ -21,17 +19,16 @@ function typeToLogLevel(annotationType) { function formatComponent(component) { switch (component) { case 'apiDescriptionParser': - return 'API description parser'; + return 'API description parser' case 'parametersValidation': - return 'API description URI parameters validation'; + return 'API description URI parameters validation' case 'uriTemplateExpansion': - return 'API description URI template expansion'; + return 'API description URI template expansion' default: - return 'API description'; + return 'API description' } } - /** * Formats given location data as something user can understand * @@ -40,24 +37,24 @@ function formatComponent(component) { */ function formatLocation(apiDescriptionLocation, annotationLocation) { if (!annotationLocation) { - return apiDescriptionLocation; + return apiDescriptionLocation } - const [[startLine, startColumn], [endLine, endColumn]] = annotationLocation; - const editorLink = `${apiDescriptionLocation}:${startLine}`; - const from = `line ${startLine} column ${startColumn}`; + const [[startLine, startColumn], [endLine, endColumn]] = annotationLocation + const editorLink = `${apiDescriptionLocation}:${startLine}` + const from = `line ${startLine} column ${startColumn}` if (startLine === endLine && startColumn === endColumn) { - return `${editorLink} (${from})`; + return `${editorLink} (${from})` } - const to = startLine === endLine - ? `column ${endColumn}` - : `line ${endLine} column ${endColumn}`; - return `${editorLink} (from ${from} to ${to})`; + const to = + startLine === endLine + ? `column ${endColumn}` + : `line ${endLine} column ${endColumn}` + return `${editorLink} (from ${from} to ${to})` } - /** * @typedef {Object} LoggerInfo A plain object winston.log() accepts as input * @property {string} level @@ -73,24 +70,27 @@ function formatLocation(apiDescriptionLocation, annotationLocation) { * @param {Object} annotation the annotation object from Dredd Transactions * @return {LoggerInfo} */ -module.exports = function annotationToLoggerInfo(apiDescriptionLocation, annotation) { - const level = typeToLogLevel(annotation.type); +export default function annotationToLoggerInfo( + apiDescriptionLocation, + annotation +) { + const level = typeToLogLevel(annotation.type) if (annotation.component === 'apiDescriptionParser') { - const message = ( - `${formatComponent(annotation.component)} ${annotation.type}` - + ` in ${formatLocation(apiDescriptionLocation, annotation.location)}:` - + ` ${annotation.message}` - ); - return { level, message }; + const message = + `${formatComponent(annotation.component)} ${annotation.type}` + + ` in ${formatLocation(apiDescriptionLocation, annotation.location)}:` + + ` ${annotation.message}` + return { level, message } } // See https://github.com/apiaryio/dredd-transactions/issues/275 why this // is handled in a different way than parser annotations - const message = ( - `${formatComponent(annotation.component)} ${annotation.type}` - + ` in ${apiDescriptionLocation} (${compileTransactionName(annotation.origin)}):` - + ` ${annotation.message}` - ); - return { level, message }; -}; + const message = + `${formatComponent(annotation.component)} ${annotation.type}` + + ` in ${apiDescriptionLocation} (${compileTransactionName( + annotation.origin + )}):` + + ` ${annotation.message}` + return { level, message } +} diff --git a/lib/childProcess.js b/lib/childProcess.js index b423cfea2..7b841a690 100644 --- a/lib/childProcess.js +++ b/lib/childProcess.js @@ -1,38 +1,37 @@ -const crossSpawn = require('cross-spawn'); +import crossSpawn from 'cross-spawn' -const ignorePipeErrors = require('./ignorePipeErrors'); - - -const ASCII_CTRL_C = 3; -const IS_WINDOWS = process.platform === 'win32'; -const TERM_FIRST_CHECK_TIMEOUT_MS = 1; -const TERM_DEFAULT_TIMEOUT_MS = 1000; -const TERM_DEFAULT_RETRY_MS = 300; +import ignorePipeErrors from './ignorePipeErrors' +const ASCII_CTRL_C = 3 +const IS_WINDOWS = process.platform === 'win32' +const TERM_FIRST_CHECK_TIMEOUT_MS = 1 +const TERM_DEFAULT_TIMEOUT_MS = 1000 +const TERM_DEFAULT_RETRY_MS = 300 // Signals the child process to forcefully terminate -function signalKill(childProcess, callback) { - childProcess.emit('signalKill'); +export function signalKill(childProcess, callback) { + childProcess.emit('signalKill') if (IS_WINDOWS) { - const taskkill = spawn('taskkill', ['/F', '/T', '/PID', childProcess.pid]); + const taskkill = spawn('taskkill', ['/F', '/T', '/PID', childProcess.pid]) taskkill.on('exit', (exitStatus) => { if (exitStatus) { return callback( - new Error(`Unable to forcefully terminate process ${childProcess.pid}`) - ); + new Error( + `Unable to forcefully terminate process ${childProcess.pid}`, + ), + ) } - callback(); - }); + callback() + }) } else { - childProcess.kill('SIGKILL'); - process.nextTick(callback); + childProcess.kill('SIGKILL') + process.nextTick(callback) } } - // Signals the child process to gracefully terminate -function signalTerm(childProcess, callback) { - childProcess.emit('signalTerm'); +export function signalTerm(childProcess, callback) { + childProcess.emit('signalTerm') if (IS_WINDOWS) { // On Windows, there is no such way as SIGTERM or SIGINT. The closest // thing is to interrupt the process with Ctrl+C. Under the hood, that @@ -51,14 +50,13 @@ function signalTerm(childProcess, callback) { // it sends the '\u0003' to stdin of the child. It's up to the child // to implement reading from stdin in such way it works both for // programmatic and manual Ctrl+C. - childProcess.stdin.write(String.fromCharCode(ASCII_CTRL_C)); + childProcess.stdin.write(String.fromCharCode(ASCII_CTRL_C)) } else { - childProcess.kill('SIGTERM'); + childProcess.kill('SIGTERM') } - process.nextTick(callback); + process.nextTick(callback) } - // Gracefully terminates a child process // // Sends a signal to the process as a heads up it should terminate. @@ -74,102 +72,121 @@ function signalTerm(childProcess, callback) { // attempts will be done // - retryDelay (number) - Delay in ms between termination attempts // - force (boolean) - Kills the process forcefully after the timeout -function terminate(childProcess, options = {}, callback) { - if (typeof options === 'function') { [callback, options] = Array.from([options, {}]); } - const force = options.force || false; +export function terminate(childProcess, options = {}, callback) { + if (typeof options === 'function') { + ;[callback, options] = Array.from([options, {}]) + } + const force = options.force || false // If the timeout is zero or less then the delay for waiting between // retries, there will be just one termination attempt - const timeout = options.timeout ? options.timeout : TERM_DEFAULT_TIMEOUT_MS; - const retryDelay = options.retryDelay ? options.retryDelay : TERM_DEFAULT_RETRY_MS; + const timeout = options.timeout ? options.timeout : TERM_DEFAULT_TIMEOUT_MS + const retryDelay = options.retryDelay + ? options.retryDelay + : TERM_DEFAULT_RETRY_MS - let terminated = false; + let terminated = false const onExit = () => { - terminated = true; - childProcess.removeListener('exit', onExit); - }; - childProcess.on('exit', onExit); + terminated = true + childProcess.removeListener('exit', onExit) + } + childProcess.on('exit', onExit) - const start = Date.now(); - let t; + const start = Date.now() + let t // A function representing one check, whether the process already // ended or not. It is repeatedly called until the timeout has passed. function check() { if (terminated) { // Successfully terminated - clearTimeout(t); - return callback(); + clearTimeout(t) + return callback() } - if ((Date.now() - start) < timeout) { + if (Date.now() - start < timeout) { // Still not terminated, try again signalTerm(childProcess, (err) => { - if (err) { return callback(err); } - t = setTimeout(check, retryDelay); - }); + if (err) { + return callback(err) + } + t = setTimeout(check, retryDelay) + }) } else { // Still not terminated and the timeout has passed, either // kill the process (force) or provide an error - clearTimeout(t); + clearTimeout(t) if (force) { - signalKill(childProcess, callback); + signalKill(childProcess, callback) } else { callback( - new Error(`Unable to gracefully terminate process ${childProcess.pid}`) - ); + new Error( + `Unable to gracefully terminate process ${childProcess.pid}`, + ), + ) } } } // Fire the first termination attempt and check the result signalTerm(childProcess, (err) => { - if (err) { return callback(err); } - t = setTimeout(check, TERM_FIRST_CHECK_TIMEOUT_MS); - }); + if (err) { + return callback(err) + } + t = setTimeout(check, TERM_FIRST_CHECK_TIMEOUT_MS) + }) } +export function spawn(...args) { + const childProcess = crossSpawn.spawn.apply(null, args) -function spawn(...args) { - const childProcess = crossSpawn.spawn.apply(null, args); - - ignorePipeErrors(childProcess); + ignorePipeErrors(childProcess) - childProcess.spawned = true; - childProcess.terminated = false; - let killedIntentionally = false; - let terminatedIntentionally = false; + childProcess.spawned = true + childProcess.terminated = false + let killedIntentionally = false + let terminatedIntentionally = false - childProcess.on('signalKill', () => { killedIntentionally = true; }); - childProcess.on('signalTerm', () => { terminatedIntentionally = true; }); + childProcess.on('signalKill', () => { + killedIntentionally = true + }) + childProcess.on('signalTerm', () => { + terminatedIntentionally = true + }) childProcess.signalKill = () => { signalKill(childProcess, (err) => { - if (err) { childProcess.emit('error', err); } - }); - }; + if (err) { + childProcess.emit('error', err) + } + }) + } childProcess.signalTerm = () => { signalTerm(childProcess, (err) => { - if (err) { childProcess.emit('error', err); } - }); - }; + if (err) { + childProcess.emit('error', err) + } + }) + } childProcess.terminate = (options) => { terminate(childProcess, options, (err) => { - if (err) { childProcess.emit('error', err); } - }); - }; + if (err) { + childProcess.emit('error', err) + } + }) + } childProcess.on('error', (err) => { - if (err.syscall && (err.syscall.indexOf('spawn') >= 0)) { - childProcess.spawned = false; + if (err.syscall && err.syscall.indexOf('spawn') >= 0) { + childProcess.spawned = false } - }); + }) childProcess.on('exit', (exitStatus, signal) => { - childProcess.terminated = true; - childProcess.killedIntentionally = killedIntentionally; - childProcess.terminatedIntentionally = terminatedIntentionally; + childProcess.terminated = true + childProcess.killedIntentionally = killedIntentionally + childProcess.terminatedIntentionally = terminatedIntentionally // Crash detection. Emits a 'crash' event in case the process // unintentionally terminated with non-zero status code. @@ -192,20 +209,12 @@ function spawn(...args) { // a process was forcefully killed... if (!killedIntentionally && !terminatedIntentionally) { if (signal === 'SIGKILL') { - childProcess.emit('crash', null, true); + childProcess.emit('crash', null, true) } else if (exitStatus !== 0) { - childProcess.emit('crash', exitStatus, false); + childProcess.emit('crash', exitStatus, false) } } - }); + }) - return childProcess; + return childProcess } - - -module.exports = { - signalKill, - signalTerm, - terminate, - spawn, -}; diff --git a/lib/compileTransactionName.js b/lib/compileTransactionName.js deleted file mode 100644 index 591be727c..000000000 --- a/lib/compileTransactionName.js +++ /dev/null @@ -1,14 +0,0 @@ -// This file is copy-pasted "as is" from the Dredd Transactions library, where -// it's also tested. This is a temporary solution, -// see https://github.com/apiaryio/dredd-transactions/issues/276 - - -module.exports = function compileTransactionName(origin) { - const segments = []; - if (origin.apiName) { segments.push(origin.apiName); } - if (origin.resourceGroupName) { segments.push(origin.resourceGroupName); } - if (origin.resourceName) { segments.push(origin.resourceName); } - if (origin.actionName) { segments.push(origin.actionName); } - if (origin.exampleName) { segments.push(origin.exampleName); } - return segments.join(' > '); -}; diff --git a/lib/compileTransactionName.ts b/lib/compileTransactionName.ts new file mode 100644 index 000000000..68f8f23d2 --- /dev/null +++ b/lib/compileTransactionName.ts @@ -0,0 +1,27 @@ +import { TransactionOrigin } from './__general' + +// This file is copy-pasted "as is" from the Dredd Transactions library, where +// it's also tested. This is a temporary solution, +// see https://github.com/apiaryio/dredd-transactions/issues/276 + +export default function compileTransactionName( + origin: TransactionOrigin, +): string { + const segments = [] + if (origin.apiName) { + segments.push(origin.apiName) + } + if (origin.resourceGroupName) { + segments.push(origin.resourceGroupName) + } + if (origin.resourceName) { + segments.push(origin.resourceName) + } + if (origin.actionName) { + segments.push(origin.actionName) + } + if (origin.exampleName) { + segments.push(origin.exampleName) + } + return segments.join(' > ') +} diff --git a/lib/configUtils.js b/lib/configUtils.js index 91f563bb4..573422c01 100644 --- a/lib/configUtils.js +++ b/lib/configUtils.js @@ -1,56 +1,52 @@ -const clone = require('clone'); -const fs = require('fs'); -const yaml = require('js-yaml'); +import clone from 'clone' +import fs from 'fs' +import yaml from 'js-yaml' +export const save = (argsOrigin, path) => { + if (!path) { + path = './dredd.yml' + } -function save(argsOrigin, path) { - if (!path) { path = './dredd.yml'; } - - const args = clone(argsOrigin); + const args = clone(argsOrigin) - args.blueprint = args._[0]; - args.endpoint = args._[1]; + args.blueprint = args._[0] + args.endpoint = args._[1] Object.keys(args).forEach((key) => { - if (key.length === 1) { delete args[key]; } - }); + if (key.length === 1) { + delete args[key] + } + }) - delete args.$0; - delete args._; + delete args.$0 + delete args._ - fs.writeFileSync(path, yaml.dump(args)); + fs.writeFileSync(path, yaml.dump(args)) } +export const load = (path) => { + if (!path) { + path = './dredd.yml' + } -function load(path) { - if (!path) { path = './dredd.yml'; } - - const yamlData = fs.readFileSync(path); - const data = yaml.safeLoad(yamlData); + const yamlData = fs.readFileSync(path) + const data = yaml.safeLoad(yamlData) - data._ = [data.blueprint, data.endpoint]; + data._ = [data.blueprint, data.endpoint] - delete data.blueprint; - delete data.endpoint; + delete data.blueprint + delete data.endpoint - return data; + return data } - -function parseCustom(customArray) { - const output = {}; +export const parseCustom = (customArray) => { + const output = {} if (Array.isArray(customArray)) { for (const string of customArray) { - const splitted = string.split(/:(.+)?/); - output[splitted[0]] = splitted[1]; + const splitted = string.split(/:(.+)?/) + output[splitted[0]] = splitted[1] } } - return output; + return output } - - -module.exports = { - save, - load, - parseCustom, -}; diff --git a/lib/configuration/applyConfiguration.js b/lib/configuration/applyConfiguration.js index cde208c63..777ca5eb9 100644 --- a/lib/configuration/applyConfiguration.js +++ b/lib/configuration/applyConfiguration.js @@ -1,13 +1,13 @@ -const R = require('ramda'); -const { EventEmitter } = require('events'); +import * as R from 'ramda' +import { EventEmitter } from 'events' -const logger = require('../logger'); -const getProxySettings = require('../getProxySettings'); -const applyLoggingOptions = require('./applyLoggingOptions'); -const validateConfig = require('./validateConfig'); -const normalizeConfig = require('./normalizeConfig'); +import logger from '../logger' +import getProxySettings from '../getProxySettings' +import applyLoggingOptions from './applyLoggingOptions' +import validateConfig from './validateConfig' +import normalizeConfig from './normalizeConfig' -const DEFAULT_CONFIG = { +export const DEFAULT_CONFIG = { http: {}, endpoint: null, // TODO https://github.com/apiaryio/dredd/issues/1345 @@ -16,7 +16,7 @@ const DEFAULT_CONFIG = { // not being copied. This breaks event emitter. // emitter: new EventEmitter(), custom: { - cwd: process.cwd(), + cwd: process.cwd() }, path: [], apiDescriptions: [], @@ -42,8 +42,8 @@ const DEFAULT_CONFIG = { 'hooks-worker-term-timeout': 5000, 'hooks-worker-term-retry': 500, 'hooks-worker-handler-host': '127.0.0.1', - 'hooks-worker-handler-port': 61321, -}; + 'hooks-worker-handler-port': 61321 +} // Flattens given configuration Object, removing nested "options" key. // This makes it possible to use nested "options" key without introducing @@ -66,63 +66,62 @@ function flattenConfig(config) { R.dissoc('server'), R.assoc('endpoint', R.prop('server', config)) ) - )(config); + )(config) - const rootOptions = R.omit(['options'], aliasedConfig); - const nestedOptions = R.prop('options', aliasedConfig); + const rootOptions = R.omit(['options'], aliasedConfig) + const nestedOptions = R.prop('options', aliasedConfig) if (nestedOptions) { - logger.warn('Deprecated usage of `options` in Dredd configuration.'); + logger.warn('Deprecated usage of `options` in Dredd configuration.') } - return R.mergeDeepLeft(nestedOptions || {}, rootOptions); + return R.mergeDeepLeft(nestedOptions || {}, rootOptions) } -function resolveConfig(config) { +export const resolveConfig = (config) => { const inConfig = R.compose( // Set "emitter" property explicitly to preserve its prototype. // During deep merge Ramda omits prototypes, breaking emitter. R.assoc('emitter', R.propOr(new EventEmitter(), 'emitter', config)), R.mergeDeepRight(DEFAULT_CONFIG), flattenConfig - )(config); + )(config) // Validate Dredd configuration - const { warnings, errors } = validateConfig(inConfig); - warnings.forEach(message => logger.warn(message)); - errors.forEach(message => logger.error(message)); + const { warnings, errors } = validateConfig(inConfig) + warnings.forEach((message) => logger.warn(message)) + errors.forEach((message) => logger.error(message)) // Fail fast upon any Dredd configuration errors if (errors.length > 0) { - throw new Error('Could not configure Dredd'); + throw new Error('Could not configure Dredd') } return { config: normalizeConfig(inConfig), warnings, - errors, - }; + errors + } } function applyConfiguration(config) { - const { config: resolvedConfig } = resolveConfig(config); + const { config: resolvedConfig } = resolveConfig(config) - applyLoggingOptions(resolvedConfig); + applyLoggingOptions(resolvedConfig) // Log information about the HTTP proxy settings - const proxySettings = getProxySettings(process.env); + const proxySettings = getProxySettings(process.env) if (proxySettings.length) { logger.warn( - `HTTP(S) proxy specified by environment variables: ${proxySettings.join(', ')}. ` - + 'Please read documentation on how Dredd works with proxies: ' - + 'https://dredd.org/en/latest/how-it-works/#using-https-proxy' - ); + `HTTP(S) proxy specified by environment variables: ${proxySettings.join( + ', ' + )}. ` + + 'Please read documentation on how Dredd works with proxies: ' + + 'https://dredd.org/en/latest/how-it-works/#using-https-proxy' + ) } - return resolvedConfig; + return resolvedConfig } -applyConfiguration.resolveConfig = resolveConfig; -applyConfiguration.DEFAULT_CONFIG = DEFAULT_CONFIG; - -module.exports = applyConfiguration; +export default applyConfiguration diff --git a/lib/configuration/applyLoggingOptions.js b/lib/configuration/applyLoggingOptions.js index 82e22f588..983e156f4 100644 --- a/lib/configuration/applyLoggingOptions.js +++ b/lib/configuration/applyLoggingOptions.js @@ -1,5 +1,5 @@ -const logger = require('../logger'); -const reporterOutputLogger = require('../reporters/reporterOutputLogger'); +import logger from '../logger' +import reporterOutputLogger from '../reporters/reporterOutputLogger' /** * Applies logging options from the given configuration. @@ -7,30 +7,32 @@ const reporterOutputLogger = require('../reporters/reporterOutputLogger'); */ function applyLoggingOptions(config) { if (config.color === false) { - logger.transports.console.colorize = false; - reporterOutputLogger.transports.console.colorize = false; + logger.transports.console.colorize = false + reporterOutputLogger.transports.console.colorize = false } // TODO https://github.com/apiaryio/dredd/issues/1346 if (config.loglevel) { - const loglevel = config.loglevel.toLowerCase(); + const loglevel = config.loglevel.toLowerCase() if (loglevel === 'silent') { - logger.transports.console.silent = true; + logger.transports.console.silent = true } else if (loglevel === 'warning') { - logger.transports.console.level = 'warn'; + logger.transports.console.level = 'warn' } else if (loglevel === 'debug') { - logger.transports.console.level = 'debug'; - logger.transports.console.timestamp = true; + logger.transports.console.level = 'debug' + logger.transports.console.timestamp = true } else if (['warn', 'error'].includes(loglevel)) { - logger.transports.console.level = loglevel; + logger.transports.console.level = loglevel } else { - logger.transports.console.level = 'warn'; - throw new Error(`The logging level '${loglevel}' is unsupported, ` - + 'supported are: silent, error, warning, debug'); + logger.transports.console.level = 'warn' + throw new Error( + `The logging level '${loglevel}' is unsupported, ` + + 'supported are: silent, error, warning, debug' + ) } } else { - logger.transports.console.level = 'warn'; + logger.transports.console.level = 'warn' } } -module.exports = applyLoggingOptions; +export default applyLoggingOptions diff --git a/lib/configuration/index.js b/lib/configuration/index.js index 3ef0fbb5c..9aeda28b5 100644 --- a/lib/configuration/index.js +++ b/lib/configuration/index.js @@ -1,7 +1,2 @@ -const applyConfiguration = require('./applyConfiguration'); -const applyLoggingOptions = require('./applyLoggingOptions'); - -module.exports = { - applyConfiguration, - applyLoggingOptions, -}; +export { default as applyConfiguration } from './applyConfiguration' +export { default as applyLoggingOptions } from './applyLoggingOptions' diff --git a/lib/configuration/normalizeConfig.js b/lib/configuration/normalizeConfig.js index e5a735129..7bc2c0f94 100644 --- a/lib/configuration/normalizeConfig.js +++ b/lib/configuration/normalizeConfig.js @@ -1,10 +1,10 @@ -const R = require('ramda'); +import * as R from 'ramda' /** * Removes options that are no longer supported by Dredd. * Any coercion will not be performed, as they are removed prior to coercion. */ -const removeUnsupportedOptions = R.compose( +export const removeUnsupportedOptions = R.compose( R.dissoc('q'), R.dissoc('silent'), R.dissoc('t'), @@ -12,85 +12,76 @@ const removeUnsupportedOptions = R.compose( R.dissoc('blueprintPath'), R.dissoc('b'), R.dissoc('sandbox') -); +) const getUserHeader = R.compose( - token => `Authorization: Basic ${token}`, - user => Buffer.from(user).toString('base64') -); + (token) => `Authorization: Basic ${token}`, + (user) => Buffer.from(user).toString('base64') +) const updateHeaderWithUser = R.compose( R.unnest, R.adjust(0, getUserHeader), R.values, R.pick(['user', 'header']) -); +) -const coerceToArray = R.cond([ - [R.is(String), v => [v]], +export const coerceToArray = R.cond([ + [R.is(String), (v) => [v]], [R.isNil, R.always([])], - [R.T, R.identity], -]); - -function coerceToBoolean(value) { - if (value === 'true') return true; - if (value === 'false') return false; - if (value) return true; - return false; + [R.T, R.identity] +]) + +export const coerceToBoolean = (value) => { + if (value === 'true') return true + if (value === 'false') return false + if (value) return true + return false } /** * Appends authorization header when supplied with "user" option. */ -const coerceUserOption = R.when( +export const coerceUserOption = R.when( R.propSatisfies(R.complement(R.isNil), 'user'), R.compose( R.dissoc('user'), - R.over( - R.lens(updateHeaderWithUser, R.assoc('header')), - R.identity - ) + R.over(R.lens(updateHeaderWithUser, R.assoc('header')), R.identity) ) -); +) -const mapIndexed = R.addIndex(R.map); +const mapIndexed = R.addIndex(R.map) -const coerceApiDescriptions = R.compose( +export const coerceApiDescriptions = R.compose( mapIndexed((content, index) => ({ location: `configuration.apiDescriptions[${index}]`, - content: R.when(R.has('content'), R.prop('content'), content), + content: R.when(R.has('content'), R.prop('content'), content) })), coerceToArray -); +) const coerceLevel = R.compose( R.cond([ - [ - R.includes(R.__, ['silly', 'debug', 'verbose']), - R.always('debug'), - ], + [R.includes(R.__, ['silly', 'debug', 'verbose']), R.always('debug')], [R.equals('error'), R.always('error')], [R.equals('silent'), R.always('silent')], - [R.T, R.always('warn')], + [R.T, R.always('warn')] ]), R.either(R.prop('l'), R.prop('level')) -); +) /** * Coerces the given deprecated value of the "level" option * and returns the supported value for "loglevel" option. */ -const coerceDeprecatedLevelOption = R.when( +export const coerceDeprecatedLevelOption = R.when( R.either(R.has('l'), R.has('level')), R.compose( R.dissoc('l'), R.dissoc('level'), - R.over( - R.lens(coerceLevel, R.assoc('loglevel')), - R.identity - ) + R.over(R.lens(coerceLevel, R.assoc('loglevel')), R.identity) ) -); +) const coerceDataToApiDescriptions = R.compose( R.unnest, @@ -98,51 +89,46 @@ const coerceDataToApiDescriptions = R.compose( R.evolve({ data: R.compose( R.map(([location, content]) => { - const apiDescription = (typeof content === 'string') - ? { location, content } - : { - location: content.filename, - content: content.raw, - }; - - return apiDescription; + const apiDescription = + typeof content === 'string' + ? { location, content } + : { + location: content.filename, + content: content.raw + } + + return apiDescription }), R.toPairs - ), + ) }), R.pick(['apiDescriptions', 'data']) -); +) -const coerceDeprecatedDataOption = R.when( +export const coerceDeprecatedDataOption = R.when( R.propSatisfies(R.complement(R.isNil), 'data'), R.compose( R.dissoc('data'), R.over( - R.lens( - coerceDataToApiDescriptions, - R.assoc('apiDescriptions') - ), + R.lens(coerceDataToApiDescriptions, R.assoc('apiDescriptions')), R.identity ) ) -); +) -const coerceColorOption = R.when( +export const coerceColorOption = R.when( R.has('c'), R.compose( R.dissoc('c'), - R.over( - R.lens(R.prop('c'), R.assoc('color')), - coerceToBoolean - ) + R.over(R.lens(R.prop('c'), R.assoc('color')), coerceToBoolean) ) -); +) const coerceDeprecatedOptions = R.compose( coerceColorOption, coerceDeprecatedDataOption, coerceDeprecatedLevelOption -); +) const coerceOptions = R.compose( coerceDeprecatedOptions, @@ -153,25 +139,19 @@ const coerceOptions = R.compose( reporter: coerceToArray, output: coerceToArray, header: coerceToArray, - method: R.compose(R.map(R.toUpper), coerceToArray), + method: R.compose( + R.map(R.toUpper), + coerceToArray + ), only: coerceToArray, path: coerceToArray, - hookfiles: coerceToArray, + hookfiles: coerceToArray }) -); +) const normalizeConfig = R.compose( coerceOptions, removeUnsupportedOptions -); - -normalizeConfig.removeUnsupportedOptions = removeUnsupportedOptions; -normalizeConfig.coerceToArray = coerceToArray; -normalizeConfig.coerceToBoolean = coerceToBoolean; -normalizeConfig.coerceUserOption = coerceUserOption; -normalizeConfig.coerceApiDescriptions = coerceApiDescriptions; -normalizeConfig.coerceColorOption = coerceColorOption; -normalizeConfig.coerceDeprecatedLevelOption = coerceDeprecatedLevelOption; -normalizeConfig.coerceDeprecatedDataOption = coerceDeprecatedDataOption; - -module.exports = normalizeConfig; +) + +export default normalizeConfig diff --git a/lib/configuration/validateConfig.js b/lib/configuration/validateConfig.js index 5b872d638..fd578878e 100644 --- a/lib/configuration/validateConfig.js +++ b/lib/configuration/validateConfig.js @@ -1,56 +1,64 @@ const deprecatedOptions = [ { options: ['c'], - message: 'DEPRECATED: The -c configuration option is deprecated. Plese use --color instead.', + message: + 'DEPRECATED: The -c configuration option is deprecated. Plese use --color instead.' }, { options: ['data'], - message: 'DEPRECATED: The --data configuration option is deprecated ' - + 'in favor of `apiDescriptions`, please see https://dredd.org', + message: + 'DEPRECATED: The --data configuration option is deprecated ' + + 'in favor of `apiDescriptions`, please see https://dredd.org' }, { options: ['blueprintPath'], - message: 'DEPRECATED: The --blueprintPath configuration option is deprecated, ' - + 'please use --path instead.', + message: + 'DEPRECATED: The --blueprintPath configuration option is deprecated, ' + + 'please use --path instead.' }, { options: ['level'], - message: 'DEPRECATED: The --level configuration option is deprecated. Please use --loglevel instead.', - }, -]; + message: + 'DEPRECATED: The --level configuration option is deprecated. Please use --loglevel instead.' + } +] const unsupportedOptions = [ { options: ['timestamp', 't'], - message: 'REMOVED: The --timestamp/-t configuration option is no longer supported. Please use --loglevel=debug instead.', + message: + 'REMOVED: The --timestamp/-t configuration option is no longer supported. Please use --loglevel=debug instead.' }, { options: ['silent', 'q'], - message: 'REMOVED: The --silent/-q configuration option is no longer supported. Please use --loglevel=silent instead.', + message: + 'REMOVED: The --silent/-q configuration option is no longer supported. Please use --loglevel=silent instead.' }, { options: ['sandbox', 'b'], - message: 'REMOVED: Dredd does not support sandboxed JS hooks anymore, use standard JS hooks instead.', + message: + 'REMOVED: Dredd does not support sandboxed JS hooks anymore, use standard JS hooks instead.' }, { options: ['hooksData'], - message: 'REMOVED: Dredd does not support sandboxed JS hooks anymore, use standard JS hooks instead.', - }, -]; + message: + 'REMOVED: Dredd does not support sandboxed JS hooks anymore, use standard JS hooks instead.' + } +] function flushMessages(rules, config) { return Object.keys(config).reduce((messages, configKey) => { - const warning = rules.find(rule => rule.options.includes(configKey)); - return warning ? messages.concat(warning.message) : messages; - }, []); + const warning = rules.find((rule) => rule.options.includes(configKey)) + return warning ? messages.concat(warning.message) : messages + }, []) } /** * Returns the errors and warnings relative to the given config. */ -const validateConfig = config => ({ +const validateConfig = (config) => ({ warnings: flushMessages(deprecatedOptions, config), - errors: flushMessages(unsupportedOptions, config), -}); + errors: flushMessages(unsupportedOptions, config) +}) -module.exports = validateConfig; +export default validateConfig diff --git a/lib/configureReporters.js b/lib/configureReporters.js index 08bd1c23e..fa97b728a 100644 --- a/lib/configureReporters.js +++ b/lib/configureReporters.js @@ -1,96 +1,98 @@ -const ApiaryReporter = require('./reporters/ApiaryReporter'); -const BaseReporter = require('./reporters/BaseReporter'); -const CLIReporter = require('./reporters/CLIReporter'); -const DotReporter = require('./reporters/DotReporter'); -const HTMLReporter = require('./reporters/HTMLReporter'); -const MarkdownReporter = require('./reporters/MarkdownReporter'); -const NyanCatReporter = require('./reporters/NyanReporter'); -const XUnitReporter = require('./reporters/XUnitReporter'); +import ApiaryReporter from './reporters/ApiaryReporter' +import BaseReporter from './reporters/BaseReporter' +import CLIReporter from './reporters/CLIReporter' +import DotReporter from './reporters/DotReporter' +import MarkdownReporter from './reporters/MarkdownReporter' +import NyanCatReporter from './reporters/NyanReporter' +import XUnitReporter from './reporters/XUnitReporter' +import HTMLReporter from './reporters/HTMLReporter' +import logger from './logger' -const logger = require('./logger'); +const fileReporters = ['xunit', 'html', 'markdown', 'apiary'] -const fileReporters = [ - 'xunit', - 'html', - 'markdown', - 'apiary', -]; - -const cliReporters = ['dot', 'nyan']; +const cliReporters = ['dot', 'nyan'] function intersection(a, b) { - if (a.length > b.length) { [a, b] = Array.from([b, a]); } - return Array.from(a).filter(value => Array.from(b).includes(value)); + if (a.length > b.length) { + ;[a, b] = Array.from([b, a]) + } + return Array.from(a).filter((value) => Array.from(b).includes(value)) } function configureReporters(config, stats, runner) { - addReporter('base', config.emitter, stats); + addReporter('base', config.emitter, stats) - const reporters = config.reporter; - const outputs = config.output; + const reporters = config.reporter + const outputs = config.output - logger.debug('Configuring reporters:', reporters, outputs); + logger.debug('Configuring reporters:', reporters, outputs) function addCli(reportersArr) { if (reportersArr.length > 0) { - const usedCliReporters = intersection(reportersArr, cliReporters); + const usedCliReporters = intersection(reportersArr, cliReporters) if (usedCliReporters.length === 0) { return new CLIReporter( - config.emitter, stats, config['inline-errors'], config.details - ); + config.emitter, + stats, + config['inline-errors'], + config.details, + ) } - return addReporter(usedCliReporters[0], config.emitter, stats); + return addReporter(usedCliReporters[0], config.emitter, stats) } return new CLIReporter( - config.emitter, stats, config['inline-errors'], config.details - ); + config.emitter, + stats, + config['inline-errors'], + config.details, + ) } function addReporter(reporter, emitter, statistics, path) { switch (reporter) { case 'xunit': - return new XUnitReporter(emitter, statistics, path, config.details); + return new XUnitReporter(emitter, statistics, path, config.details) case 'dot': - return new DotReporter(emitter, statistics); + return new DotReporter(emitter, statistics) case 'nyan': - return new NyanCatReporter(emitter, statistics); + return new NyanCatReporter(emitter, statistics) case 'html': - return new HTMLReporter(emitter, statistics, path, config.details); + return new HTMLReporter(emitter, statistics, path, config.details) case 'markdown': - return new MarkdownReporter(emitter, statistics, path, config.details); + return new MarkdownReporter(emitter, statistics, path, config.details) case 'apiary': - return new ApiaryReporter(emitter, statistics, config, runner); + return new ApiaryReporter(emitter, statistics, config, runner) default: // I don't even know where to begin... // TODO: DESIGN / REFACTOR WHOLE REPORTER(S) API FROM SCRATCH, THIS IS MADNESS!!1 - (new BaseReporter(emitter, statistics)); + new BaseReporter(emitter, statistics) } } - addCli(reporters); + addCli(reporters) - const usedFileReporters = intersection(reporters, fileReporters); + const usedFileReporters = intersection(reporters, fileReporters) - stats.fileBasedReporters = usedFileReporters.length; + stats.fileBasedReporters = usedFileReporters.length if (usedFileReporters.length > 0) { - let usedFileReportersLength = usedFileReporters.length; + let usedFileReportersLength = usedFileReporters.length if (reporters.indexOf('apiary') > -1) { - usedFileReportersLength -= 1; + usedFileReportersLength -= 1 } if (usedFileReportersLength > outputs.length) { logger.warn(` There are more reporters requiring output paths than there are output paths provided. Using default paths for additional file-based reporters. -`); +`) } return usedFileReporters.map((usedFileReporter, index) => { - const path = outputs[index] ? outputs[index] : undefined; - return addReporter(usedFileReporter, config.emitter, stats, path); - }); + const path = outputs[index] ? outputs[index] : undefined + return addReporter(usedFileReporter, config.emitter, stats, path) + }) } } -module.exports = configureReporters; +export default configureReporters diff --git a/lib/getGoBinary.js b/lib/getGoBinary.js index 231da860f..b83b0b02c 100644 --- a/lib/getGoBinary.js +++ b/lib/getGoBinary.js @@ -1,19 +1,21 @@ -const childProcess = require('child_process'); -const path = require('path'); +import childProcess from 'child_process' +import * as path from 'path' // Docs: // - https://golang.org/doc/code.html#GOPATH // - https://golang.org/cmd/go/#hdr-GOPATH_environment_variable -module.exports = function getGoBinary(callback) { - const goBin = process.env.GOBIN; +export default function getGoBinary(callback) { + const goBin = process.env.GOBIN if (goBin) { - process.nextTick(() => callback(null, goBin)); + process.nextTick(() => callback(null, goBin)) } else if (process.env.GOPATH) { - process.nextTick(() => callback(null, path.join(process.env.GOPATH, 'bin'))); + process.nextTick(() => callback(null, path.join(process.env.GOPATH, 'bin'))) } else { childProcess.exec('go env GOPATH', (err, stdout) => { - if (err) { return callback(err); } - callback(null, path.join(stdout.trim(), 'bin')); - }); + if (err) { + return callback(err) + } + callback(null, path.join(stdout.trim(), 'bin')) + }) } -}; +} diff --git a/lib/getProxySettings.js b/lib/getProxySettings.js index a5642fe19..77606b94c 100644 --- a/lib/getProxySettings.js +++ b/lib/getProxySettings.js @@ -1,5 +1,4 @@ -const PROXY_ENV_VARIABLES = ['HTTP_PROXY', 'HTTPS_PROXY', 'NO_PROXY']; - +const PROXY_ENV_VARIABLES = ['HTTP_PROXY', 'HTTPS_PROXY', 'NO_PROXY'] /** * Expects an environment variables object (typically process.env) @@ -14,9 +13,9 @@ const PROXY_ENV_VARIABLES = ['HTTP_PROXY', 'HTTPS_PROXY', 'NO_PROXY']; * is handled directly by the 'request' library, see * https://github.com/request/request#user-content-proxies */ -module.exports = function getProxySettings(env) { +export default function getProxySettings(env) { return Object.entries(env) - .filter(entry => PROXY_ENV_VARIABLES.includes(entry[0].toUpperCase())) - .filter(entry => entry[1] !== '') - .map(entry => `${entry[0]}=${entry[1]}`); -}; + .filter((entry) => PROXY_ENV_VARIABLES.includes(entry[0].toUpperCase())) + .filter((entry) => entry[1] !== '') + .map((entry) => `${entry[0]}=${entry[1]}`) +} diff --git a/lib/hooksLog.js b/lib/hooksLog.js index 9ee0b7d31..595e805f1 100644 --- a/lib/hooksLog.js +++ b/lib/hooksLog.js @@ -1,14 +1,16 @@ -const util = require('util'); +import util from 'util' -module.exports = function hooksLog(logs = [], logger, content) { +export default function hooksLog(logs = [], logger, content) { // Log to logger - if (logger && typeof logger.hook === 'function') { logger.hook(content); } + if (logger && typeof logger.hook === 'function') { + logger.hook(content) + } // Append to array of logs to allow further operations, e.g. send all hooks logs to Apiary logs.push({ timestamp: Date.now(), content: typeof content === 'object' ? util.format(content) : `${content}`, - }); + }) - return logs; -}; + return logs +} diff --git a/lib/ignorePipeErrors.js b/lib/ignorePipeErrors.js index 8be468c6a..f0f5e7207 100644 --- a/lib/ignorePipeErrors.js +++ b/lib/ignorePipeErrors.js @@ -2,8 +2,8 @@ // on either side can result `uncaughtException` causing // dredd main process exiting with exitCode 7 instead of 1. This _fix_ // remedies the issue. -module.exports = function ignorePipeErrors(proc) { - if (proc.stdout) proc.stdout.on('error', () => {}); - if (proc.stderr) proc.stderr.on('error', () => {}); - if (proc.stdin) proc.stdin.on('error', () => {}); -}; +export default function ignorePipeErrors(proc) { + if (proc.stdout) proc.stdout.on('error', () => {}) + if (proc.stderr) proc.stderr.on('error', () => {}) + if (proc.stdin) proc.stdin.on('error', () => {}) +} diff --git a/lib/index.ts b/lib/index.ts new file mode 100644 index 000000000..b9354388a --- /dev/null +++ b/lib/index.ts @@ -0,0 +1,3 @@ +import Dredd from './Dredd' + +module.exports = Dredd diff --git a/lib/init.js b/lib/init.js index 61864cbcf..b6c886330 100644 --- a/lib/init.js +++ b/lib/init.js @@ -1,389 +1,416 @@ /* eslint no-console: ["error", { allow: ["log"] }] */ -const fs = require('fs'); -const makeDir = require('make-dir'); -const path = require('path'); -const inquirer = require('inquirer'); -const yaml = require('js-yaml'); +import fs from 'fs' +import * as path from 'path' +import makeDir from 'make-dir' +import inquirer from 'inquirer' +import * as yaml from 'js-yaml' -const packageData = require('../package.json'); - - -const INSTALL_DREDD = `npm install dredd@${packageData.version} --global`; -const RUN_DREDD = 'dredd'; +import * as packageData from '../package.json' +const INSTALL_DREDD = `npm install dredd@${packageData.version} --global` +const RUN_DREDD = 'dredd' function init(config, save, callback) { - if (!config) { config = {}; } - if (!config._) { config._ = []; } - if (!config.custom) { config.custom = {}; } + if (!config) { + config = {} + } + if (!config._) { + config._ = [] + } + if (!config.custom) { + config.custom = {} + } - const files = fs.readdirSync('.'); - const detected = detect(files); + const files = fs.readdirSync('.') + const detected = detect(files) prompt(config, detected, (error, answers) => { - if (error) { callback(error); } + if (error) { + callback(error) + } - const updatedConfig = applyAnswers(config, answers); - save(updatedConfig); - printClosingMessage(updatedConfig); + const updatedConfig = applyAnswers(config, answers) + save(updatedConfig) + printClosingMessage(updatedConfig) - callback(); - }); + callback() + }) } - function detect(files) { return { ci: detectCI(files), apiDescription: detectApiDescription(files), server: detectServer(files), language: detectLanguage(files), - }; + } } - function prompt(config, detected, callback) { - inquirer.prompt([ - { - name: 'apiDescription', - message: 'Location of the API description document', - type: 'input', - default: config.blueprint || detected.apiDescription, - }, - { - name: 'server', - message: 'Command to start the API server under test', - type: 'input', - default: config.server || detected.server, - }, - { - name: 'apiHost', - message: 'Host of the API under test', - type: 'input', - default: config.endpoint || 'http://127.0.0.1:3000', - }, - { - name: 'hooks', - message: 'Do you want to use hooks to customize Dredd\'s behavior?', - type: 'confirm', - default: true, - when: () => config.language === 'nodejs', - }, - { - name: 'language', - message: 'Programming language of the hooks', - type: 'list', - default: detected.language, - choices: [ - { name: 'Go', value: 'go' }, - { name: 'JavaScript', value: 'nodejs' }, - { name: 'Perl', value: 'perl' }, - { name: 'PHP', value: 'php' }, - { name: 'Python', value: 'python' }, - { name: 'Ruby', value: 'ruby' }, - { name: 'Rust', value: 'rust' }, - ], - when: answers => answers.hooks, - }, - { - name: 'apiary', - message: 'Do you want to report your tests to the Apiary inspector?', - type: 'confirm', - default: true, - when: () => config.reporter !== 'apiary', - }, - { - name: 'apiaryApiKey', - message: 'Enter Apiary API key (leave empty for anonymous, disposable test reports)', - type: 'input', - default: config.custom ? config.custom.apiaryApiKey : undefined, - when: answers => ( - answers.apiary - && (!config.custom || !config.custom.apiaryApiKey) - ), - }, - { - name: 'apiaryApiName', - message: 'Enter Apiary API name', - type: 'input', - default: config.custom ? config.custom.apiaryApiName : undefined, - when: answers => ( - answers.apiary - && answers.apiaryApiKey - && (!config.custom || !config.custom.apiaryApiName) - ), - }, - { - name: 'appveyor', - message: 'Found AppVeyor configuration, do you want to add Dredd?', - type: 'confirm', - default: true, - when: () => detected.ci.includes('appveyor'), - }, - { - name: 'circleci', - message: 'Found CircleCI configuration, do you want to add Dredd?', - type: 'confirm', - default: true, - when: () => detected.ci.includes('circleci'), - }, - { - name: 'travisci', - message: 'Found Travis CI configuration, do you want to add Dredd?', - type: 'confirm', - default: true, - when: () => detected.ci.includes('travisci'), - }, - { - name: 'wercker', - message: 'Found Wercker configuration, do you want to add Dredd?', - type: 'confirm', - default: true, - when: () => detected.ci.includes('wercker'), - }, - { - name: 'ci', - message: 'Dredd is best served with Continuous Integration. Do you want to create CI configuration?', - type: 'confirm', - default: true, - when: () => !detected.ci.length, - }, - { - name: 'createCI', - message: 'Which CI do you want to use?', - type: 'list', - default: 'travisci', - choices: [ - { name: 'AppVeyor', value: 'appveyor' }, - { name: 'CircleCI', value: 'circleci' }, - { name: 'Travis CI', value: 'travisci' }, - { name: 'Wercker (Oracle Container Pipelines)', value: 'wercker' }, - ], - when: answers => answers.ci, - }, - ]).then((answers) => { - callback(null, answers); - }); + inquirer + .prompt([ + { + name: 'apiDescription', + message: 'Location of the API description document', + type: 'input', + default: config.blueprint || detected.apiDescription, + }, + { + name: 'server', + message: 'Command to start the API server under test', + type: 'input', + default: config.server || detected.server, + }, + { + name: 'apiHost', + message: 'Host of the API under test', + type: 'input', + default: config.endpoint || 'http://127.0.0.1:3000', + }, + { + name: 'hooks', + message: "Do you want to use hooks to customize Dredd's behavior?", + type: 'confirm', + default: true, + when: () => config.language === 'nodejs', + }, + { + name: 'language', + message: 'Programming language of the hooks', + type: 'list', + default: detected.language, + choices: [ + { name: 'Go', value: 'go' }, + { name: 'JavaScript', value: 'nodejs' }, + { name: 'Perl', value: 'perl' }, + { name: 'PHP', value: 'php' }, + { name: 'Python', value: 'python' }, + { name: 'Ruby', value: 'ruby' }, + { name: 'Rust', value: 'rust' }, + ], + when: (answers) => answers.hooks, + }, + { + name: 'apiary', + message: 'Do you want to report your tests to the Apiary inspector?', + type: 'confirm', + default: true, + when: () => config.reporter !== 'apiary', + }, + { + name: 'apiaryApiKey', + message: + 'Enter Apiary API key (leave empty for anonymous, disposable test reports)', + type: 'input', + default: config.custom ? config.custom.apiaryApiKey : undefined, + when: (answers) => + answers.apiary && (!config.custom || !config.custom.apiaryApiKey), + }, + { + name: 'apiaryApiName', + message: 'Enter Apiary API name', + type: 'input', + default: config.custom ? config.custom.apiaryApiName : undefined, + when: (answers) => + answers.apiary && + answers.apiaryApiKey && + (!config.custom || !config.custom.apiaryApiName), + }, + { + name: 'appveyor', + message: 'Found AppVeyor configuration, do you want to add Dredd?', + type: 'confirm', + default: true, + when: () => detected.ci.includes('appveyor'), + }, + { + name: 'circleci', + message: 'Found CircleCI configuration, do you want to add Dredd?', + type: 'confirm', + default: true, + when: () => detected.ci.includes('circleci'), + }, + { + name: 'travisci', + message: 'Found Travis CI configuration, do you want to add Dredd?', + type: 'confirm', + default: true, + when: () => detected.ci.includes('travisci'), + }, + { + name: 'wercker', + message: 'Found Wercker configuration, do you want to add Dredd?', + type: 'confirm', + default: true, + when: () => detected.ci.includes('wercker'), + }, + { + name: 'ci', + message: + 'Dredd is best served with Continuous Integration. Do you want to create CI configuration?', + type: 'confirm', + default: true, + when: () => !detected.ci.length, + }, + { + name: 'createCI', + message: 'Which CI do you want to use?', + type: 'list', + default: 'travisci', + choices: [ + { name: 'AppVeyor', value: 'appveyor' }, + { name: 'CircleCI', value: 'circleci' }, + { name: 'Travis CI', value: 'travisci' }, + { name: 'Wercker (Oracle Container Pipelines)', value: 'wercker' }, + ], + when: (answers) => answers.ci, + }, + ]) + .then((answers) => { + callback(null, answers) + }) } - -function applyAnswers(config, answers, options = {}) { +export const applyAnswers = (config, answers, options = {}) => { const ci = options.ci || { appveyor: updateAppVeyor, circleci: updateCircleCI, travisci: updateTravisCI, wercker: updateWercker, - }; + } - config._[0] = answers.apiDescription; - config._[1] = answers.apiHost; + config._[0] = answers.apiDescription + config._[1] = answers.apiHost - config.server = answers.server || null; - config.language = answers.language || 'nodejs'; + config.server = answers.server || null + config.language = answers.language || 'nodejs' - if (answers.apiary) { config.reporter = 'apiary'; } - if (answers.apiaryApiKey) { config.custom.apiaryApiKey = answers.apiaryApiKey; } - if (answers.apiaryApiName) { config.custom.apiaryApiName = answers.apiaryApiName; } + if (answers.apiary) { + config.reporter = 'apiary' + } + if (answers.apiaryApiKey) { + config.custom.apiaryApiKey = answers.apiaryApiKey + } + if (answers.apiaryApiName) { + config.custom.apiaryApiName = answers.apiaryApiName + } if (answers.createCI) { - ci[answers.createCI](); + ci[answers.createCI]() } else { Object.keys(ci).forEach((name) => { if (answers[name]) { - ci[name](); + ci[name]() } - }); + }) } - return config; + return config } - -function printClosingMessage(config, print = console.log) { - print('\nConfiguration saved to dredd.yml\n'); +export const printClosingMessage = (config, print = console.log) => { + print('\nConfiguration saved to dredd.yml\n') if (config.language === 'nodejs') { - print('You can run tests now, with:\n'); + print('You can run tests now, with:\n') } else { - print('Install hooks and run Dredd test with:\n'); + print('Install hooks and run Dredd test with:\n') } switch (config.language) { case 'ruby': - print(' $ gem install dredd_hooks'); break; + print(' $ gem install dredd_hooks') + break case 'python': - print(' $ pip install dredd_hooks'); break; + print(' $ pip install dredd_hooks') + break case 'php': - print(' $ composer require ddelnano/dredd-hooks-php --dev'); break; + print(' $ composer require ddelnano/dredd-hooks-php --dev') + break case 'perl': - print(' $ cpanm Dredd::Hooks'); break; + print(' $ cpanm Dredd::Hooks') + break case 'go': - print(' $ go get github.com/snikch/goodman/cmd/goodman'); break; + print(' $ go get github.com/snikch/goodman/cmd/goodman') + break case 'rust': - print(' $ cargo install dredd-hooks'); break; - default: break; + print(' $ cargo install dredd-hooks') + break + default: + break } - print(' $ dredd\n'); + print(' $ dredd\n') } - function editYaml(file, update) { const contents = fs.existsSync(file) ? yaml.safeLoad(fs.readFileSync(file)) - : {}; + : {} - update(contents); + update(contents) - makeDir.sync(path.dirname(file)); - fs.writeFileSync(file, yaml.safeDump(contents)); + makeDir.sync(path.dirname(file)) + fs.writeFileSync(file, yaml.safeDump(contents)) } - -function updateAppVeyor(options = {}) { - const edit = options.editYaml || editYaml; +export const updateAppVeyor = (options = {}) => { + const edit = options.editYaml || editYaml edit('appveyor.yml', (contents) => { - if (!contents.install) { contents.install = []; } - - contents.install.push({ ps: 'Install-Product node' }); - contents.install.push('set PATH=%APPDATA%\\npm;%PATH%'); - contents.install.push(INSTALL_DREDD); - - if (!contents.build) { contents.build = false; } - - if (!contents.test_script) { contents.test_script = []; } - contents.test_script.push(RUN_DREDD); - }); + if (!contents.install) { + contents.install = [] + } + + contents.install.push({ ps: 'Install-Product node' }) + contents.install.push('set PATH=%APPDATA%\\npm;%PATH%') + contents.install.push(INSTALL_DREDD) + + if (!contents.build) { + contents.build = false + } + + if (!contents.test_script) { + contents.test_script = [] + } + contents.test_script.push(RUN_DREDD) + }) } - -function updateCircleCI(options = {}) { - const edit = options.editYaml || editYaml; +export const updateCircleCI = (options = {}) => { + const edit = options.editYaml || editYaml edit('.circleci/config.yml', (contents) => { - if (!contents.version) { contents.version = 2; } + if (!contents.version) { + contents.version = 2 + } - if (!contents.jobs) { contents.jobs = {}; } + if (!contents.jobs) { + contents.jobs = {} + } contents.jobs.dredd = { docker: [{ image: 'circleci/node:latest' }], steps: ['checkout', { run: INSTALL_DREDD }, { run: RUN_DREDD }], - }; - }); + } + }) } - -function updateTravisCI(options = {}) { - const edit = options.editYaml || editYaml; +export const updateTravisCI = (options = {}) => { + const edit = options.editYaml || editYaml edit('.travis.yml', (contents) => { - if (!contents.language) { contents.language = 'node_js'; } - - if (!contents.before_install) { contents.before_install = []; } - contents.before_install.push(INSTALL_DREDD); - - if (!contents.before_script) { contents.before_script = []; } - contents.before_script.push(RUN_DREDD); - }); + if (!contents.language) { + contents.language = 'node_js' + } + + if (!contents.before_install) { + contents.before_install = [] + } + contents.before_install.push(INSTALL_DREDD) + + if (!contents.before_script) { + contents.before_script = [] + } + contents.before_script.push(RUN_DREDD) + }) } - -function updateWercker(options = {}) { - const edit = options.editYaml || editYaml; +export const updateWercker = (options = {}) => { + const edit = options.editYaml || editYaml edit('wercker.yml', (contents) => { - if (!contents.box) { contents.box = 'node'; } + if (!contents.box) { + contents.box = 'node' + } - if (!contents.build) { contents.build = {}; } + if (!contents.build) { + contents.build = {} + } contents.build.steps = [].concat( [{ script: { name: 'install-dredd', code: INSTALL_DREDD } }], contents.build.steps || [], - [{ script: { name: 'dredd', code: RUN_DREDD } }] - ); - }); + [{ script: { name: 'dredd', code: RUN_DREDD } }], + ) + }) } - -function detectLanguage(files) { - const lcFiles = files.map(f => f.toLowerCase()); +export const detectLanguage = (files) => { + const lcFiles = files.map((f) => f.toLowerCase()) if (lcFiles.includes('cargo.toml')) { - return 'rust'; + return 'rust' } - if (lcFiles.filter(f => f.match(/\.go$/)).length) { - return 'go'; + if (lcFiles.filter((f) => f.match(/\.go$/)).length) { + return 'go' } if (lcFiles.includes('composer.json')) { - return 'php'; + return 'php' } if ( - lcFiles.includes('minil.toml') || lcFiles.includes('cpanfile') - || lcFiles.includes('meta.json') || lcFiles.includes('build.pl') + lcFiles.includes('minil.toml') || + lcFiles.includes('cpanfile') || + lcFiles.includes('meta.json') || + lcFiles.includes('build.pl') ) { - return 'perl'; + return 'perl' } if ( - lcFiles.includes('setup.py') || lcFiles.includes('requirements.txt') - || lcFiles.includes('pipfile') || lcFiles.includes('pyproject.toml') - || lcFiles.includes('setup.cfg') || lcFiles.includes('manifest.in') + lcFiles.includes('setup.py') || + lcFiles.includes('requirements.txt') || + lcFiles.includes('pipfile') || + lcFiles.includes('pyproject.toml') || + lcFiles.includes('setup.cfg') || + lcFiles.includes('manifest.in') ) { - return 'python'; + return 'python' } if ( - lcFiles.includes('gemfile') || lcFiles.includes('gemfile.lock') - || lcFiles.filter(f => f.match(/\.gemspec$/)).length + lcFiles.includes('gemfile') || + lcFiles.includes('gemfile.lock') || + lcFiles.filter((f) => f.match(/\.gemspec$/)).length ) { - return 'ruby'; + return 'ruby' } - return 'nodejs'; + return 'nodejs' } - -function detectServer(files) { +export const detectServer = (files) => { const commands = { nodejs: 'npm start', ruby: 'bundle exec rails server', python: 'python manage.py runserver', - }; - const language = detectLanguage(files); - return commands[language] || commands.nodejs; + } + const language = detectLanguage(files) + return commands[language] || commands.nodejs } +export const detectApiDescription = (files) => { + const apib = files.filter((f) => f.match(/\.apib$/i)) + if (apib.length) { + return apib[0] + } -function detectApiDescription(files) { - const apib = files.filter(f => f.match(/\.apib$/i)); - if (apib.length) { return apib[0]; } - - const openapi2 = files.filter(f => f.match(/\.ya?ml$/i) && f.match(/swagger/)); - if (openapi2.length) { return openapi2[0]; } + const openapi2 = files.filter( + (f) => f.match(/\.ya?ml$/i) && f.match(/swagger/), + ) + if (openapi2.length) { + return openapi2[0] + } - const openapi = files.filter(f => f.match(/\.ya?ml$/i) && f.match(/api/)); - if (openapi.length) { return openapi[0]; } + const openapi = files.filter((f) => f.match(/\.ya?ml$/i) && f.match(/api/)) + if (openapi.length) { + return openapi[0] + } - return 'apiary.apib'; + return 'apiary.apib' } - -function detectCI(files) { +export const detectCI = (files) => { const ci = { 'wercker.yml': 'wercker', 'appveyor.yml': 'appveyor', '.travis.yml': 'travisci', '.circleci': 'circleci', - }; - return files.map(f => ci[f]).filter(f => !!f); + } + return files.map((f) => ci[f]).filter((f) => !!f) } - -// only for the purpose of unit tests -init._applyAnswers = applyAnswers; -init._printClosingMessage = printClosingMessage; -init._detectLanguage = detectLanguage; -init._detectServer = detectServer; -init._detectApiDescription = detectApiDescription; -init._detectCI = detectCI; -init._updateAppVeyor = updateAppVeyor; -init._updateCircleCI = updateCircleCI; -init._updateTravisCI = updateTravisCI; -init._updateWercker = updateWercker; - - -module.exports = init; +export default init diff --git a/lib/isURL.js b/lib/isURL.js deleted file mode 100644 index 2d61015b5..000000000 --- a/lib/isURL.js +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Decides whether given string is a URL or not - * - * @param {string} location - * @returns {boolean} - */ -module.exports = function isURL(location) { - return /^http(s)?:\/\//.test(location); -}; diff --git a/lib/isURL.ts b/lib/isURL.ts new file mode 100644 index 000000000..f64461538 --- /dev/null +++ b/lib/isURL.ts @@ -0,0 +1,6 @@ +/** + * Decides whether given string is a URL or not. + */ +export default function isURL(location: string): boolean { + return /^http(s)?:\/\//.test(location) +} diff --git a/lib/logger.js b/lib/logger.js deleted file mode 100644 index 97e27005d..000000000 --- a/lib/logger.js +++ /dev/null @@ -1,17 +0,0 @@ -const winston = require('winston'); - -module.exports = new (winston.Logger)({ - transports: [ - new (winston.transports.Console)({ colorize: true }), - ], - levels: { - debug: 2, - warn: 1, - error: 0, - }, - colors: { - debug: 'cyan', - warn: 'yellow', - error: 'red', - }, -}); diff --git a/lib/logger.ts b/lib/logger.ts new file mode 100644 index 000000000..45f087f8b --- /dev/null +++ b/lib/logger.ts @@ -0,0 +1,17 @@ +const winston = require('winston') + +const logger = new winston.Logger({ + transports: [new winston.transports.Console({ colorize: true })], + levels: { + debug: 2, + warn: 1, + error: 0, + }, + colors: { + debug: 'cyan', + warn: 'yellow', + error: 'red', + }, +}) + +export default logger diff --git a/lib/performRequest.js b/lib/performRequest.js index bc78cc5f7..e147902d8 100644 --- a/lib/performRequest.js +++ b/lib/performRequest.js @@ -1,8 +1,7 @@ -const defaultRequest = require('request'); -const caseless = require('caseless'); - -const defaultLogger = require('./logger'); +import defaultRequest from 'request' +import caseless from 'caseless' +import defaultLogger from './logger' /** * Performs the HTTP request as described in the 'transaction.request' object @@ -19,74 +18,85 @@ const defaultLogger = require('./logger'); * @param {Function} callback */ function performRequest(uri, transactionReq, options, callback) { - if (typeof options === 'function') { [options, callback] = [{}, options]; } - const logger = options.logger || defaultLogger; - const request = options.request || defaultRequest; + if (typeof options === 'function') { + ;[options, callback] = [{}, options] + } + const logger = options.logger || defaultLogger + const request = options.request || defaultRequest - const httpOptions = Object.assign({}, options.http || {}); - httpOptions.proxy = false; - httpOptions.followRedirect = false; - httpOptions.encoding = null; - httpOptions.method = transactionReq.method; - httpOptions.uri = uri; + const httpOptions = Object.assign({}, options.http || {}) + httpOptions.proxy = false + httpOptions.followRedirect = false + httpOptions.encoding = null + httpOptions.method = transactionReq.method + httpOptions.uri = uri try { - httpOptions.body = getBodyAsBuffer(transactionReq.body, transactionReq.bodyEncoding); - httpOptions.headers = normalizeContentLengthHeader(transactionReq.headers, httpOptions.body); - - const protocol = httpOptions.uri.split(':')[0].toUpperCase(); - logger.debug(`Performing ${protocol} request to the server under test: ` - + `${httpOptions.method} ${httpOptions.uri}`); + httpOptions.body = getBodyAsBuffer( + transactionReq.body, + transactionReq.bodyEncoding + ) + httpOptions.headers = normalizeContentLengthHeader( + transactionReq.headers, + httpOptions.body + ) + + const protocol = httpOptions.uri.split(':')[0].toUpperCase() + logger.debug( + `Performing ${protocol} request to the server under test: ` + + `${httpOptions.method} ${httpOptions.uri}` + ) request(httpOptions, (error, response, responseBody) => { - logger.debug(`Handling ${protocol} response from the server under test`); + logger.debug(`Handling ${protocol} response from the server under test`) if (error) { - callback(error); + callback(error) } else { - callback(null, createTransactionResponse(response, responseBody)); + callback(null, createTransactionResponse(response, responseBody)) } - }); + }) } catch (error) { - process.nextTick(() => callback(error)); + process.nextTick(() => callback(error)) } } - /** * Coerces the HTTP request body to a Buffer * * @param {string|Buffer} body * @param {*} encoding */ -function getBodyAsBuffer(body, encoding) { +export const getBodyAsBuffer = (body, encoding) => { return body instanceof Buffer ? body - : Buffer.from(`${body || ''}`, normalizeBodyEncoding(encoding)); + : Buffer.from(`${body || ''}`, normalizeBodyEncoding(encoding)) } - /** * Returns the encoding as either 'utf-8' or 'base64'. Throws * an error in case any other encoding is provided. * * @param {string} encoding */ -function normalizeBodyEncoding(encoding) { - if (!encoding) { return 'utf-8'; } +export const normalizeBodyEncoding = (encoding) => { + if (!encoding) { + return 'utf-8' + } switch (encoding.toLowerCase()) { case 'utf-8': case 'utf8': - return 'utf-8'; + return 'utf-8' case 'base64': - return 'base64'; + return 'base64' default: - throw new Error(`Unsupported encoding: '${encoding}' (only UTF-8 and ` - + 'Base64 are supported)'); + throw new Error( + `Unsupported encoding: '${encoding}' (only UTF-8 and ` + + 'Base64 are supported)' + ) } } - /** * Detects an existing Content-Length header and overrides the user-provided * header value in case it's out of sync with the real length of the body. @@ -96,26 +106,27 @@ function normalizeBodyEncoding(encoding) { * @param {Object} [options] * @param {Object} [options.logger] Custom logger */ -function normalizeContentLengthHeader(headers, body, options = {}) { - const logger = options.logger || defaultLogger; +export const normalizeContentLengthHeader = (headers, body, options = {}) => { + const logger = options.logger || defaultLogger - const modifiedHeaders = Object.assign({}, headers); - const calculatedValue = Buffer.byteLength(body); - const name = caseless(modifiedHeaders).has('Content-Length'); + const modifiedHeaders = Object.assign({}, headers) + const calculatedValue = Buffer.byteLength(body) + const name = caseless(modifiedHeaders).has('Content-Length') if (name) { - const value = parseInt(modifiedHeaders[name], 10); + const value = parseInt(modifiedHeaders[name], 10) if (value !== calculatedValue) { - modifiedHeaders[name] = `${calculatedValue}`; - logger.warn(`Specified Content-Length header is ${value}, but the real ` - + `body length is ${calculatedValue}. Using ${calculatedValue} instead.`); + modifiedHeaders[name] = `${calculatedValue}` + logger.warn( + `Specified Content-Length header is ${value}, but the real ` + + `body length is ${calculatedValue}. Using ${calculatedValue} instead.` + ) } } else { - modifiedHeaders['Content-Length'] = `${calculatedValue}`; + modifiedHeaders['Content-Length'] = `${calculatedValue}` } - return modifiedHeaders; + return modifiedHeaders } - /** * Real transaction response object factory. Serializes binary responses * to string using Base64 encoding. @@ -123,38 +134,28 @@ function normalizeContentLengthHeader(headers, body, options = {}) { * @param {Object} response Node.js HTTP response * @param {Buffer} body HTTP response body as Buffer */ -function createTransactionResponse(response, body) { +export const createTransactionResponse = (response, body) => { const transactionRes = { statusCode: response.statusCode, - headers: Object.assign({}, response.headers), - }; + headers: Object.assign({}, response.headers) + } if (Buffer.byteLength(body || '')) { - transactionRes.bodyEncoding = detectBodyEncoding(body); - transactionRes.body = body.toString(transactionRes.bodyEncoding); + transactionRes.bodyEncoding = detectBodyEncoding(body) + transactionRes.body = body.toString(transactionRes.bodyEncoding) } - return transactionRes; + return transactionRes } - /** * @param {Buffer} body */ -function detectBodyEncoding(body) { +export const detectBodyEncoding = (body) => { // U+FFFD is a replacement character in UTF-8 and indicates there // are some bytes which could not been translated as UTF-8. Therefore // let's assume the body is in binary format. Dredd encodes binary as // Base64 to be able to transfer it wrapped in JSON over the TCP to non-JS // hooks implementations. - return body.toString().includes('\ufffd') ? 'base64' : 'utf-8'; + return body.toString().includes('\ufffd') ? 'base64' : 'utf-8' } - -// only for the purpose of unit tests -performRequest._normalizeBodyEncoding = normalizeBodyEncoding; -performRequest._getBodyAsBuffer = getBodyAsBuffer; -performRequest._normalizeContentLengthHeader = normalizeContentLengthHeader; -performRequest._createTransactionResponse = createTransactionResponse; -performRequest._detectBodyEncoding = detectBodyEncoding; - - -module.exports = performRequest; +export default performRequest diff --git a/lib/prettifyResponse.js b/lib/prettifyResponse.js index df3723e32..e36bfed02 100644 --- a/lib/prettifyResponse.js +++ b/lib/prettifyResponse.js @@ -1,55 +1,56 @@ -const html = require('html'); +import * as html from 'html' -const logger = require('./logger'); +import logger from './logger' -module.exports = function prettifyResponse(response) { - let contentType; +export default function prettifyResponse(response) { + let contentType function stringify(obj) { try { if (typeof obj === 'string') { - obj = JSON.parse(obj); + obj = JSON.parse(obj) } - obj = JSON.stringify(obj, null, 2); + obj = JSON.stringify(obj, null, 2) } catch (e) { - logger.debug(`Could not stringify: ${obj}`); + logger.debug(`Could not stringify: ${obj}`) } - return obj; + return obj } function prettifyBody(body, contentKind) { switch (contentKind) { case 'text/html': - body = html.prettyPrint(body, { indent_size: 2 }); - break; + body = html.prettyPrint(body, { indent_size: 2 }) + break default: - body = stringify(body); + body = stringify(body) } - return body; + return body } if (response && response.headers) { - contentType = response.headers['content-type'] || response.headers['Content-Type']; + contentType = + response.headers['content-type'] || response.headers['Content-Type'] } - let stringRepresentation = ''; + let stringRepresentation = '' for (const key of Object.keys(response || {})) { - let value = response[key]; + let value = response[key] if (key === 'body') { - value = `\n${prettifyBody(value, contentType)}`; + value = `\n${prettifyBody(value, contentType)}` } else if (key === 'schema') { - value = `\n${stringify(value)}`; + value = `\n${stringify(value)}` } else if (key === 'headers') { - let header = '\n'; + let header = '\n' for (const hkey of Object.keys(value || {})) { - const hval = value[hkey]; - header += ` ${hkey}: ${hval}\n`; + const hval = value[hkey] + header += ` ${hkey}: ${hval}\n` } - value = header; + value = header } - stringRepresentation += `${key}: ${value}\n`; + stringRepresentation += `${key}: ${value}\n` } - return stringRepresentation; -}; + return stringRepresentation +} diff --git a/lib/readLocation.js b/lib/readLocation.js index dce2559b0..5ce386532 100644 --- a/lib/readLocation.js +++ b/lib/readLocation.js @@ -1,58 +1,71 @@ -const fs = require('fs'); -const defaultRequest = require('request'); - -const isURL = require('./isURL'); +import fs from 'fs' +import defaultRequest from 'request' +import isURL from './isURL' function getErrorFromResponse(response, hasBody) { - const contentType = response.headers['content-type']; + const contentType = response.headers['content-type'] if (hasBody) { - const bodyDescription = contentType ? `'${contentType}' body` : 'body without Content-Type'; - return new Error(`Dredd got HTTP ${response.statusCode} response with ${bodyDescription}`); + const bodyDescription = contentType + ? `'${contentType}' body` + : 'body without Content-Type' + return new Error( + `Dredd got HTTP ${response.statusCode} response with ${bodyDescription}`, + ) } - return new Error(`Dredd got HTTP ${response.statusCode} response without body`); + return new Error( + `Dredd got HTTP ${response.statusCode} response without body`, + ) } - function readRemoteFile(uri, options, callback) { - if (typeof options === 'function') { [options, callback] = [{}, options]; } - const request = options.request || defaultRequest; + if (typeof options === 'function') { + ;[options, callback] = [{}, options] + } + const request = options.request || defaultRequest - const httpOptions = Object.assign({}, options.http || {}); - httpOptions.uri = uri; - httpOptions.timeout = 5000; // ms, limits both connection time and server response time + const httpOptions = Object.assign({}, options.http || {}) + httpOptions.uri = uri + httpOptions.timeout = 5000 // ms, limits both connection time and server response time try { request(httpOptions, (error, response, responseBody) => { if (error) { - callback(error); + callback(error) } else if (!response) { - callback(new Error('Unexpected error')); - } else if (!responseBody || response.statusCode < 200 || response.statusCode >= 300) { - callback(getErrorFromResponse(response, !!responseBody)); + callback(new Error('Unexpected error')) + } else if ( + !responseBody || + response.statusCode < 200 || + response.statusCode >= 300 + ) { + callback(getErrorFromResponse(response, !!responseBody)) } else { - callback(null, responseBody); + callback(null, responseBody) } - }); + }) } catch (error) { - process.nextTick(() => callback(error)); + process.nextTick(() => callback(error)) } } - function readLocalFile(path, callback) { fs.readFile(path, 'utf8', (error, data) => { - if (error) { callback(error); return; } - callback(null, data); - }); + if (error) { + callback(error) + return + } + callback(null, data) + }) } - -module.exports = function readLocation(location, options, callback) { - if (typeof options === 'function') { [options, callback] = [{}, options]; } +export default function readLocation(location, options, callback) { + if (typeof options === 'function') { + ;[options, callback] = [{}, options] + } if (isURL(location)) { - readRemoteFile(location, options, callback); + readRemoteFile(location, options, callback) } else { - readLocalFile(location, callback); + readLocalFile(location, callback) } -}; +} diff --git a/lib/reporters/ApiaryReporter.js b/lib/reporters/ApiaryReporter.js index efa8bc6be..45f21b2cd 100644 --- a/lib/reporters/ApiaryReporter.js +++ b/lib/reporters/ApiaryReporter.js @@ -1,12 +1,11 @@ -const clone = require('clone'); -const generateUuid = require('uuid/v4'); -const os = require('os'); -const request = require('request'); - -const logger = require('../logger'); -const reporterOutputLogger = require('./reporterOutputLogger'); -const packageData = require('../../package.json'); +import clone from 'clone'; +import generateUuid from 'uuid/v4'; +import * as os from 'os'; +import request from 'request'; +import logger from '../logger'; +import reporterOutputLogger from './reporterOutputLogger'; +import * as packageData from '../../package.json'; const CONNECTION_ERRORS = [ 'ECONNRESET', @@ -15,10 +14,9 @@ const CONNECTION_ERRORS = [ 'ETIMEDOUT', 'ECONNREFUSED', 'EHOSTUNREACH', - 'EPIPE', + 'EPIPE' ]; - function ApiaryReporter(emitter, stats, config, runner) { this.type = 'apiary'; this.stats = stats; @@ -32,9 +30,13 @@ function ApiaryReporter(emitter, stats, config, runner) { this.errors = []; this.serverError = false; this.configuration = { - apiUrl: (this._get('apiaryApiUrl', 'APIARY_API_URL', 'https://api.apiary.io')).replace(/\/$/, ''), + apiUrl: this._get( + 'apiaryApiUrl', + 'APIARY_API_URL', + 'https://api.apiary.io' + ).replace(/\/$/, ''), apiToken: this._get('apiaryApiKey', 'APIARY_API_KEY', null), - apiSuite: this._get('apiaryApiName', 'APIARY_API_NAME', null), + apiSuite: this._get('apiaryApiName', 'APIARY_API_NAME', null) }; this.configureEmitter(emitter); @@ -48,31 +50,44 @@ Configure Dredd to be able to save test reports alongside your Apiary API projec https://dredd.org/en/latest/how-to-guides/#using-apiary-reporter-and-apiary-tests `); } - if (!this.configuration.apiSuite) { this.configuration.apiSuite = 'public'; } + if (!this.configuration.apiSuite) { + this.configuration.apiSuite = 'public'; + } } - // THIS IS HIIIIGHWAY TO HELL, HIIIIIGHWAY TO HELL. Everything should have one single interface -ApiaryReporter.prototype._get = function _get(customProperty, envProperty, defaultVal) { +ApiaryReporter.prototype._get = function _get( + customProperty, + envProperty, + defaultVal +) { let returnVal = defaultVal; // This will be deprecated if (this.config.custom && this.config.custom[customProperty]) { returnVal = this.config.custom[customProperty]; - // This will be the ONLY supported way how to configure this reporter + // This will be the ONLY supported way how to configure this reporter } else if (this.config.custom && this.config.custom[customProperty]) { returnVal = this.config.custom[customProperty]; - // This will be deprecated - } else if (this.config.custom && this.config.custom.apiaryReporterEnv && this.config.custom.apiaryReporterEnv[customProperty]) { + // This will be deprecated + } else if ( + this.config.custom && + this.config.custom.apiaryReporterEnv && + this.config.custom.apiaryReporterEnv[customProperty] + ) { returnVal = this.config.custom.apiaryReporterEnv[customProperty]; - // This will be deprecated - } else if (this.config.custom && this.config.custom.apiaryReporterEnv && this.config.custom.apiaryReporterEnv[envProperty]) { + // This will be deprecated + } else if ( + this.config.custom && + this.config.custom.apiaryReporterEnv && + this.config.custom.apiaryReporterEnv[envProperty] + ) { returnVal = this.config.custom.apiaryReporterEnv[envProperty]; - // This will be supported for backward compatibility, but can be removed in future. + // This will be supported for backward compatibility, but can be removed in future. } else if (process.env[envProperty]) { returnVal = process.env[envProperty]; } @@ -80,17 +95,21 @@ ApiaryReporter.prototype._get = function _get(customProperty, envProperty, defau return returnVal; }; - ApiaryReporter.prototype._getKeys = function _getKeys() { let returnKeys = []; - returnKeys = returnKeys.concat(Object.keys((this.config.custom && this.config.custom.apiaryReporterEnv) || {})); + returnKeys = returnKeys.concat( + Object.keys( + (this.config.custom && this.config.custom.apiaryReporterEnv) || {} + ) + ); return returnKeys.concat(Object.keys(process.env)); }; - ApiaryReporter.prototype.configureEmitter = function configureEmitter(emitter) { emitter.on('start', (apiDescriptions, callback) => { - if (this.serverError === true) { return callback(); } + if (this.serverError === true) { + return callback(); + } this.uuid = generateUuid(); this.startedAt = Math.round(new Date().getTime() / 1000); @@ -108,19 +127,20 @@ ApiaryReporter.prototype.configureEmitter = function configureEmitter(emitter) { // Transform blueprints data to array const data = { - blueprints: apiDescriptions.map(apiDescription => ({ + blueprints: apiDescriptions.map((apiDescription) => ({ filename: apiDescription.location, raw: apiDescription.content, - annotations: apiDescription.annotations, + annotations: apiDescription.annotations })), endpoint: this.config.server, - agent: this._get('dreddAgent', 'DREDD_AGENT') || this._get('user', 'USER'), + agent: + this._get('dreddAgent', 'DREDD_AGENT') || this._get('user', 'USER'), agentRunUuid: this.uuid, hostname: this._get('dreddHostname', 'DREDD_HOSTNAME') || os.hostname(), startedAt: this.startedAt, public: true, status: 'running', - agentEnvironment: ciEnvVars, + agentEnvironment: ciEnvVars }; if (this.configuration.apiToken && this.configuration.apiSuite) { @@ -129,15 +149,22 @@ ApiaryReporter.prototype.configureEmitter = function configureEmitter(emitter) { const path = `/apis/${this.configuration.apiSuite}/tests/runs`; - this._performRequestAsync(path, 'POST', data, (error, response, parsedBody) => { - if (error) { - callback(error); - } else { - this.remoteId = parsedBody._id; - if (parsedBody.reportUrl) { this.reportUrl = parsedBody.reportUrl; } - callback(); + this._performRequestAsync( + path, + 'POST', + data, + (error, response, parsedBody) => { + if (error) { + callback(error); + } else { + this.remoteId = parsedBody._id; + if (parsedBody.reportUrl) { + this.reportUrl = parsedBody.reportUrl; + } + callback(); + } } - }); + ); }); emitter.on('test pass', this._createStep.bind(this)); @@ -147,60 +174,83 @@ ApiaryReporter.prototype.configureEmitter = function configureEmitter(emitter) { emitter.on('test skip', this._createStep.bind(this)); emitter.on('test error', (error, test, callback) => { - if (this.serverError === true) { return callback(); } + if (this.serverError === true) { + return callback(); + } const data = this._transformTestToReporter(test); if (Array.from(CONNECTION_ERRORS).includes(error.code)) { data.results.errors.push({ - severity: 'error', message: 'Error connecting to server under test!', + severity: 'error', + message: 'Error connecting to server under test!' }); } else { data.results.errors.push({ - severity: 'error', message: 'Unhandled error occured when executing the transaction.', + severity: 'error', + message: 'Unhandled error occured when executing the transaction.' }); } const path = `/apis/${this.configuration.apiSuite}/tests/steps?testRunId=${this.remoteId}`; this._performRequestAsync(path, 'POST', data, (err) => { - if (err) { return callback(err); } + if (err) { + return callback(err); + } callback(); }); }); emitter.on('end', (callback) => { - if (this.serverError === true) { return callback(); } + if (this.serverError === true) { + return callback(); + } const data = { endedAt: Math.round(new Date().getTime() / 1000), result: this.stats, - status: (this.stats.failures > 0 || this.stats.errors > 0) ? 'failed' : 'passed', - logs: (this.runner && this.runner.logs && this.runner.logs.length) ? this.runner.logs : undefined, + status: + this.stats.failures > 0 || this.stats.errors > 0 ? 'failed' : 'passed', + logs: + this.runner && this.runner.logs && this.runner.logs.length + ? this.runner.logs + : undefined }; const path = `/apis/${this.configuration.apiSuite}/tests/run/${this.remoteId}`; this._performRequestAsync(path, 'PATCH', data, (error) => { - if (error) { return callback(error); } - const reportUrl = this.reportUrl || `https://app.apiary.io/${this.configuration.apiSuite}/tests/run/${this.remoteId}`; + if (error) { + return callback(error); + } + const reportUrl = + this.reportUrl || + `https://app.apiary.io/${this.configuration.apiSuite}/tests/run/${this.remoteId}`; reporterOutputLogger.complete(`See results in Apiary at: ${reportUrl}`); callback(); }); }); }; - ApiaryReporter.prototype._createStep = function _createStep(test, callback) { - if (this.serverError === true) { return callback(); } + if (this.serverError === true) { + return callback(); + } const data = this._transformTestToReporter(test); const path = `/apis/${this.configuration.apiSuite}/tests/steps?testRunId=${this.remoteId}`; this._performRequestAsync(path, 'POST', data, (error) => { - if (error) { return callback(error); } + if (error) { + return callback(error); + } callback(); }); }; - -ApiaryReporter.prototype._performRequestAsync = function _performRequestAsync(path, method, reqBody, callback) { +ApiaryReporter.prototype._performRequestAsync = function _performRequestAsync( + path, + method, + reqBody, + callback +) { const handleRequest = (err, res, resBody) => { let parsedBody; if (err) { @@ -208,7 +258,9 @@ ApiaryReporter.prototype._performRequestAsync = function _performRequestAsync(pa logger.debug('Requesting Apiary API errored:', `${err}` || err.code); if (Array.from(CONNECTION_ERRORS).includes(err.code)) { - return callback(new Error('Apiary reporter could not connect to Apiary API')); + return callback( + new Error('Apiary reporter could not connect to Apiary API') + ); } return callback(err); } @@ -226,7 +278,11 @@ ${error.message}\n${resBody} return callback(err); } - const info = { headers: res.headers, statusCode: res.statusCode, body: parsedBody }; + const info = { + headers: res.headers, + statusCode: res.statusCode, + body: parsedBody + }; logger.debug('Apiary reporter response:', JSON.stringify(info, null, 2)); @@ -237,7 +293,7 @@ ${error.message}\n${resBody} const system = `${os.type()} ${os.release()}; ${os.arch()}`; const headers = { 'User-Agent': `Dredd Apiary Reporter/${packageData.version} (${system})`, - 'Content-Type': 'application/json', + 'Content-Type': 'application/json' }; const options = clone(this.config.http || {}); @@ -257,7 +313,10 @@ About to perform an ${protocol} request from Apiary reporter to Apiary API: ${options.method} ${options.uri} \ (${body ? 'with' : 'without'} body) `); - logger.debug('Request details:', JSON.stringify({ options, body }, null, 2)); + logger.debug( + 'Request details:', + JSON.stringify({ options, body }, null, 2) + ); return request(options, handleRequest); } catch (error) { this.serverError = true; @@ -266,8 +325,9 @@ to Apiary API: ${options.method} ${options.uri} \ } }; - -ApiaryReporter.prototype._transformTestToReporter = function _transformTestToReporter(test) { +ApiaryReporter.prototype._transformTestToReporter = function _transformTestToReporter( + test +) { return { testRunId: this.remoteId, origin: test.origin, @@ -279,10 +339,9 @@ ApiaryReporter.prototype._transformTestToReporter = function _transformTestToRep realResponse: test.actual, expectedResponse: test.expected, validationResult: test.results || {}, - errors: test.errors || [], - }, + errors: test.errors || [] + } }; }; - -module.exports = ApiaryReporter; +export default ApiaryReporter; diff --git a/lib/reporters/BaseReporter.js b/lib/reporters/BaseReporter.js index 8f816f046..f1cefea36 100644 --- a/lib/reporters/BaseReporter.js +++ b/lib/reporters/BaseReporter.js @@ -1,59 +1,59 @@ -const logger = require('../logger'); +import logger from '../logger' function BaseReporter(emitter, stats) { - this.type = 'base'; - this.stats = stats; - this.configureEmitter(emitter); - logger.debug(`Using '${this.type}' reporter.`); + this.type = 'base' + this.stats = stats + this.configureEmitter(emitter) + logger.debug(`Using '${this.type}' reporter.`) } BaseReporter.prototype.configureEmitter = function configureEmitter(emitter) { emitter.on('start', (apiDescriptions, callback) => { - this.stats.start = new Date(); - callback(); - }); + this.stats.start = new Date() + callback() + }) emitter.on('end', (callback) => { - this.stats.end = new Date(); - this.stats.duration = this.stats.end - this.stats.start; - callback(); - }); + this.stats.end = new Date() + this.stats.duration = this.stats.end - this.stats.start + callback() + }) emitter.on('test start', (test) => { - this.stats.tests += 1; - test.start = new Date(); - }); + this.stats.tests += 1 + test.start = new Date() + }) emitter.on('test pass', (test) => { - this.stats.passes += 1; - test.end = new Date(); + this.stats.passes += 1 + test.end = new Date() if (typeof test.start === 'string') { - test.start = new Date(test.start); + test.start = new Date(test.start) } - test.duration = test.end - test.start; - }); + test.duration = test.end - test.start + }) emitter.on('test skip', () => { - this.stats.skipped += 1; - }); + this.stats.skipped += 1 + }) emitter.on('test fail', (test) => { - this.stats.failures += 1; - test.end = new Date(); + this.stats.failures += 1 + test.end = new Date() if (typeof test.start === 'string') { - test.start = new Date(test.start); + test.start = new Date(test.start) } - test.duration = test.end - test.start; - }); + test.duration = test.end - test.start + }) emitter.on('test error', (error, test) => { - this.stats.errors += 1; - test.end = new Date(); + this.stats.errors += 1 + test.end = new Date() if (typeof test.start === 'string') { - test.start = new Date(test.start); + test.start = new Date(test.start) } - test.duration = test.end - test.start; - }); -}; + test.duration = test.end - test.start + }) +} -module.exports = BaseReporter; +export default BaseReporter diff --git a/lib/reporters/CLIReporter.js b/lib/reporters/CLIReporter.js index a6acc5d55..27f97d835 100644 --- a/lib/reporters/CLIReporter.js +++ b/lib/reporters/CLIReporter.js @@ -1,7 +1,6 @@ -const logger = require('../logger'); -const reporterOutputLogger = require('./reporterOutputLogger'); -const prettifyResponse = require('../prettifyResponse'); - +import logger from '../logger' +import reporterOutputLogger from './reporterOutputLogger' +import prettifyResponse from '../prettifyResponse' const CONNECTION_ERRORS = [ 'ECONNRESET', @@ -10,88 +9,102 @@ const CONNECTION_ERRORS = [ 'ETIMEDOUT', 'ECONNREFUSED', 'EHOSTUNREACH', - 'EPIPE', -]; - + 'EPIPE' +] function CLIReporter(emitter, stats, inlineErrors, details) { - this.type = 'cli'; - this.stats = stats; - this.inlineErrors = inlineErrors; - this.details = details; - this.errors = []; + this.type = 'cli' + this.stats = stats + this.inlineErrors = inlineErrors + this.details = details + this.errors = [] - this.configureEmitter(emitter); + this.configureEmitter(emitter) - logger.debug(`Using '${this.type}' reporter.`); + logger.debug(`Using '${this.type}' reporter.`) } CLIReporter.prototype.configureEmitter = function configureEmitter(emitter) { emitter.on('start', (apiDescriptions, callback) => { - logger.debug('Beginning Dredd testing...'); - callback(); - }); + logger.debug('Beginning Dredd testing...') + callback() + }) emitter.on('end', (callback) => { if (!this.inlineErrors) { if (this.errors.length) { - reporterOutputLogger.info('Displaying failed tests...'); + reporterOutputLogger.info('Displaying failed tests...') } this.errors.forEach((test) => { - reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`); - reporterOutputLogger.fail(test.message); - if (test.request) reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`); - if (test.expected) reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`); - if (test.actual) reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`); - }); + reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`) + reporterOutputLogger.fail(test.message) + if (test.request) + reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`) + if (test.expected) + reporterOutputLogger.expected( + `\n${prettifyResponse(test.expected)}\n` + ) + if (test.actual) + reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`) + }) } if (this.stats.tests > 0) { - reporterOutputLogger.complete(`${this.stats.passes} passing, ` - + `${this.stats.failures} failing, ` - + `${this.stats.errors} errors, ` - + `${this.stats.skipped} skipped, ` - + `${this.stats.tests} total`); + reporterOutputLogger.complete( + `${this.stats.passes} passing, ` + + `${this.stats.failures} failing, ` + + `${this.stats.errors} errors, ` + + `${this.stats.skipped} skipped, ` + + `${this.stats.tests} total` + ) } - reporterOutputLogger.complete(`Tests took ${this.stats.duration}ms`); - callback(); - }); + reporterOutputLogger.complete(`Tests took ${this.stats.duration}ms`) + callback() + }) emitter.on('test pass', (test) => { - reporterOutputLogger.pass(`${test.title} duration: ${test.duration}ms`); + reporterOutputLogger.pass(`${test.title} duration: ${test.duration}ms`) if (this.details) { - reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`); - reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`); - reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`); + reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`) + reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`) + reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`) } - }); + }) - emitter.on('test skip', test => reporterOutputLogger.skip(test.title)); + emitter.on('test skip', (test) => reporterOutputLogger.skip(test.title)) emitter.on('test fail', (test) => { - reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`); + reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`) if (this.inlineErrors) { - reporterOutputLogger.fail(test.message); - if (test.request) { reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`); } - if (test.expected) { reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`); } - if (test.actual) { reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`); } + reporterOutputLogger.fail(test.message) + if (test.request) { + reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`) + } + if (test.expected) { + reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`) + } + if (test.actual) { + reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`) + } } else { - this.errors.push(test); + this.errors.push(test) } - }); + }) emitter.on('test error', (error, test) => { if (CONNECTION_ERRORS.includes(error.code)) { - test.message = 'Error connecting to server under test!'; - reporterOutputLogger.error(test.message); + test.message = 'Error connecting to server under test!' + reporterOutputLogger.error(test.message) } else { - reporterOutputLogger.error(error.stack); + reporterOutputLogger.error(error.stack) } - reporterOutputLogger.error(`${test.title} duration: ${test.duration}ms`); - if (!this.inlineErrors) { this.errors.push(test); } - }); -}; + reporterOutputLogger.error(`${test.title} duration: ${test.duration}ms`) + if (!this.inlineErrors) { + this.errors.push(test) + } + }) +} -module.exports = CLIReporter; +export default CLIReporter diff --git a/lib/reporters/DotReporter.js b/lib/reporters/DotReporter.js index efd28abd3..7d2a29ffb 100644 --- a/lib/reporters/DotReporter.js +++ b/lib/reporters/DotReporter.js @@ -1,70 +1,74 @@ -const logger = require('../logger'); -const reporterOutputLogger = require('./reporterOutputLogger'); -const prettifyResponse = require('../prettifyResponse'); +import reporterOutputLogger from './reporterOutputLogger' +import prettifyResponse from '../prettifyResponse' +import logger from '../logger' function DotReporter(emitter, stats) { - this.type = 'dot'; - this.stats = stats; - this.errors = []; + this.type = 'dot' + this.stats = stats + this.errors = [] - this.configureEmitter(emitter); + this.configureEmitter(emitter) - logger.debug(`Using '${this.type}' reporter.`); + logger.debug(`Using '${this.type}' reporter.`) } DotReporter.prototype.configureEmitter = function configureEmitter(emitter) { emitter.on('start', (apiDescriptions, callback) => { - logger.debug('Beginning Dredd testing...'); - callback(); - }); + logger.debug('Beginning Dredd testing...') + callback() + }) emitter.on('end', (callback) => { if (this.stats.tests > 0) { if (this.errors.length > 0) { - this.write('\n'); - reporterOutputLogger.info('Displaying failed tests...'); + this.write('\n') + reporterOutputLogger.info('Displaying failed tests...') for (const test of this.errors) { - reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`); - reporterOutputLogger.fail(test.message); - reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`); - reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`); - reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`); + reporterOutputLogger.fail( + `${test.title} duration: ${test.duration}ms` + ) + reporterOutputLogger.fail(test.message) + reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`) + reporterOutputLogger.expected( + `\n${prettifyResponse(test.expected)}\n` + ) + reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`) } } - this.write('\n'); + this.write('\n') reporterOutputLogger.complete(`\ ${this.stats.passes} passing, ${this.stats.failures} failing, \ ${this.stats.errors} errors, ${this.stats.skipped} skipped\ -`); - reporterOutputLogger.complete(`Tests took ${this.stats.duration}ms`); +`) + reporterOutputLogger.complete(`Tests took ${this.stats.duration}ms`) - callback(); + callback() } - }); + }) emitter.on('test pass', () => { - this.write('.'); - }); + this.write('.') + }) emitter.on('test skip', () => { - this.write('-'); - }); + this.write('-') + }) emitter.on('test fail', (test) => { - this.write('F'); - this.errors.push(test); - }); + this.write('F') + this.errors.push(test) + }) emitter.on('test error', (error, test) => { - this.write('E'); - test.message = `\nError: \n${error}\nStacktrace: \n${error.stack}\n`; - this.errors.push(test); - }); -}; + this.write('E') + test.message = `\nError: \n${error}\nStacktrace: \n${error.stack}\n` + this.errors.push(test) + }) +} DotReporter.prototype.write = function write(str) { - process.stdout.write(str); -}; + process.stdout.write(str) +} -module.exports = DotReporter; +export default DotReporter diff --git a/lib/reporters/HTMLReporter.js b/lib/reporters/HTMLReporter.js index d29f1bb8a..c8e008f4c 100644 --- a/lib/reporters/HTMLReporter.js +++ b/lib/reporters/HTMLReporter.js @@ -1,118 +1,136 @@ -const { EventEmitter } = require('events'); -const fs = require('fs'); -const { inherits } = require('util'); +import { EventEmitter } from 'events' +import fs from 'fs' +import { inherits } from 'util' -const untildify = require('untildify'); -const makeDir = require('make-dir'); -const md = require('markdown-it')(); -const pathmodule = require('path'); +import untildify from 'untildify' +import makeDir from 'make-dir' +import createMarkdownIt from 'markdown-it' +import * as pathmodule from 'path' -const logger = require('../logger'); -const reporterOutputLogger = require('./reporterOutputLogger'); -const prettifyResponse = require('../prettifyResponse'); +import logger from '../logger' +import reporterOutputLogger from './reporterOutputLogger' +import prettifyResponse from '../prettifyResponse' + +const md = createMarkdownIt() function HTMLReporter(emitter, stats, path, details) { - EventEmitter.call(this); + EventEmitter.call(this) - this.type = 'html'; - this.stats = stats; - this.buf = ''; - this.level = 1; - this.details = details; - this.path = this.sanitizedPath(path); + this.type = 'html' + this.stats = stats + this.buf = '' + this.level = 1 + this.details = details + this.path = this.sanitizedPath(path) - this.configureEmitter(emitter); + this.configureEmitter(emitter) - logger.debug(`Using '${this.type}' reporter.`); + logger.debug(`Using '${this.type}' reporter.`) } -HTMLReporter.prototype.sanitizedPath = function sanitizedPath(path = './report.html') { - const filePath = pathmodule.resolve(untildify(path)); +HTMLReporter.prototype.sanitizedPath = function sanitizedPath( + path = './report.html' +) { + const filePath = pathmodule.resolve(untildify(path)) if (fs.existsSync(filePath)) { - logger.warn(`File exists at ${filePath}, will be overwritten...`); + logger.warn(`File exists at ${filePath}, will be overwritten...`) } - return filePath; -}; + return filePath +} HTMLReporter.prototype.configureEmitter = function configureEmitter(emitter) { - const title = str => `${Array(this.level).join('#')} ${str}`; + const title = (str) => `${Array(this.level).join('#')} ${str}` emitter.on('start', (apiDescriptions, callback) => { - this.level++; - this.buf += `${title('Dredd Tests')}\n`; - callback(); - }); + this.level++ + this.buf += `${title('Dredd Tests')}\n` + callback() + }) emitter.on('end', (callback) => { - this.buf += '\n---'; - this.buf += `\n${title('Summary')}`; + this.buf += '\n---' + this.buf += `\n${title('Summary')}` this.buf += `\n**Tests completed:** ${this.stats.passes} passing, ${this.stats.failures} failing, ${this.stats.errors} errors, ${this.stats.skipped} skipped, ${this.stats.tests} total. - `; - this.buf += `\n\n**Tests took:** ${this.stats.duration}ms.`; + ` + this.buf += `\n\n**Tests took:** ${this.stats.duration}ms.` - const html = md.render(this.buf); + const html = md.render(this.buf) makeDir(pathmodule.dirname(this.path)) .then(() => { fs.writeFile(this.path, html, (error) => { - if (error) { reporterOutputLogger.error(error); } - callback(); - }); + if (error) { + reporterOutputLogger.error(error) + } + callback() + }) }) .catch((err) => { - reporterOutputLogger.error(err); - callback(); - }); - }); + reporterOutputLogger.error(err) + callback() + }) + }) emitter.on('test start', () => { - this.level++; - }); + this.level++ + }) emitter.on('test pass', (test) => { - this.buf += `${title(`Pass: ${test.title}`)}\n`; + this.buf += `${title(`Pass: ${test.title}`)}\n` if (this.details) { - this.level++; - this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse(test.request)}\n\`\`\`\n\n`; - this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse(test.expected)}\n\`\`\`\n\n`; - this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse(test.actual)}\n\`\`\`\n\n`; - this.level--; + this.level++ + this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse( + test.request + )}\n\`\`\`\n\n` + this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse( + test.expected + )}\n\`\`\`\n\n` + this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse( + test.actual + )}\n\`\`\`\n\n` + this.level-- } - this.level--; - }); + this.level-- + }) emitter.on('test skip', (test) => { - this.buf += `${title(`Skip: ${test.title}`)}\n`; - this.level--; - }); + this.buf += `${title(`Skip: ${test.title}`)}\n` + this.level-- + }) emitter.on('test fail', (test) => { - this.buf += title(`Fail: ${test.title}\n`); - - this.level++; - this.buf += `${title('Message')}\n\`\`\`\n${test.message}\n\`\`\`\n\n`; - this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse(test.request)}\n\`\`\`\n\n`; - this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse(test.expected)}\n\`\`\`\n\n`; - this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse(test.actual)}\n\`\`\`\n\n`; - this.level--; - - this.level--; - }); + this.buf += title(`Fail: ${test.title}\n`) + + this.level++ + this.buf += `${title('Message')}\n\`\`\`\n${test.message}\n\`\`\`\n\n` + this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse( + test.request + )}\n\`\`\`\n\n` + this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse( + test.expected + )}\n\`\`\`\n\n` + this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse( + test.actual + )}\n\`\`\`\n\n` + this.level-- + + this.level-- + }) emitter.on('test error', (error, test) => { - this.buf += title(`Error: ${test.title}\n`); - this.buf += '\n```\n'; - this.buf += `\nError: \n${error}\nStacktrace: \n${error.stack}\n`; - this.buf += '```\n\n'; - this.level--; - }); -}; + this.buf += title(`Error: ${test.title}\n`) + this.buf += '\n```\n' + this.buf += `\nError: \n${error}\nStacktrace: \n${error.stack}\n` + this.buf += '```\n\n' + this.level-- + }) +} -inherits(HTMLReporter, EventEmitter); +inherits(HTMLReporter, EventEmitter) -module.exports = HTMLReporter; +export default HTMLReporter diff --git a/lib/reporters/MarkdownReporter.js b/lib/reporters/MarkdownReporter.js index c7bbe3b89..3ffb10aaf 100644 --- a/lib/reporters/MarkdownReporter.js +++ b/lib/reporters/MarkdownReporter.js @@ -1,106 +1,124 @@ -const { EventEmitter } = require('events'); -const fs = require('fs'); -const { inherits } = require('util'); +import { EventEmitter } from 'events' +import fs from 'fs' +import { inherits } from 'util' -const untildify = require('untildify'); -const makeDir = require('make-dir'); -const pathmodule = require('path'); +import untildify from 'untildify' +import makeDir from 'make-dir' +import * as pathmodule from 'path' -const logger = require('../logger'); -const reporterOutputLogger = require('./reporterOutputLogger'); -const prettifyResponse = require('../prettifyResponse'); +import logger from '../logger' +import reporterOutputLogger from './reporterOutputLogger' +import prettifyResponse from '../prettifyResponse' function MarkdownReporter(emitter, stats, path, details) { - EventEmitter.call(this); + EventEmitter.call(this) - this.type = 'markdown'; - this.stats = stats; - this.buf = ''; - this.level = 1; - this.details = details; - this.path = this.sanitizedPath(path); + this.type = 'markdown' + this.stats = stats + this.buf = '' + this.level = 1 + this.details = details + this.path = this.sanitizedPath(path) - this.configureEmitter(emitter); + this.configureEmitter(emitter) - logger.debug(`Using '${this.type}' reporter.`); + logger.debug(`Using '${this.type}' reporter.`) } -MarkdownReporter.prototype.sanitizedPath = function sanitizedPath(path = './report.md') { - const filePath = pathmodule.resolve(untildify(path)); +MarkdownReporter.prototype.sanitizedPath = function sanitizedPath( + path = './report.md' +) { + const filePath = pathmodule.resolve(untildify(path)) if (fs.existsSync(filePath)) { - logger.warn(`File exists at ${filePath}, will be overwritten...`); + logger.warn(`File exists at ${filePath}, will be overwritten...`) } - return filePath; -}; + return filePath +} -MarkdownReporter.prototype.configureEmitter = function configureEmitter(emitter) { - const title = str => `${Array(this.level).join('#')} ${str}`; +MarkdownReporter.prototype.configureEmitter = function configureEmitter( + emitter +) { + const title = (str) => `${Array(this.level).join('#')} ${str}` emitter.on('start', (apiDescriptions, callback) => { - this.level++; - this.buf += `${title('Dredd Tests')}\n`; - callback(); - }); + this.level++ + this.buf += `${title('Dredd Tests')}\n` + callback() + }) emitter.on('end', (callback) => { makeDir(pathmodule.dirname(this.path)) .then(() => { fs.writeFile(this.path, this.buf, (error) => { - if (error) { reporterOutputLogger.error(error); } - callback(); - }); + if (error) { + reporterOutputLogger.error(error) + } + callback() + }) }) .catch((err) => { - reporterOutputLogger.error(err); - callback(); - }); - }); + reporterOutputLogger.error(err) + callback() + }) + }) emitter.on('test start', () => { - this.level++; - }); + this.level++ + }) emitter.on('test pass', (test) => { - this.buf += `${title(`Pass: ${test.title}`)}\n`; + this.buf += `${title(`Pass: ${test.title}`)}\n` if (this.details) { - this.level++; - this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse(test.request)}\n\`\`\`\n\n`; - this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse(test.expected)}\n\`\`\`\n\n`; - this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse(test.actual)}\n\`\`\`\n\n`; - this.level--; + this.level++ + this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse( + test.request + )}\n\`\`\`\n\n` + this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse( + test.expected + )}\n\`\`\`\n\n` + this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse( + test.actual + )}\n\`\`\`\n\n` + this.level-- } - this.level--; - }); + this.level-- + }) emitter.on('test skip', (test) => { - this.buf += `${title(`Skip: ${test.title}`)}\n`; - this.level--; - }); + this.buf += `${title(`Skip: ${test.title}`)}\n` + this.level-- + }) emitter.on('test fail', (test) => { - this.buf += title(`Fail: ${test.title}\n`); - - this.level++; - this.buf += `${title('Message')}\n\`\`\`\n${test.message}\n\`\`\`\n\n`; - this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse(test.request)}\n\`\`\`\n\n`; - this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse(test.expected)}\n\`\`\`\n\n`; - this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse(test.actual)}\n\`\`\`\n\n`; - this.level--; - - this.level--; - }); + this.buf += title(`Fail: ${test.title}\n`) + + this.level++ + this.buf += `${title('Message')}\n\`\`\`\n${test.message}\n\`\`\`\n\n` + this.buf += `${title('Request')}\n\`\`\`\n${prettifyResponse( + test.request + )}\n\`\`\`\n\n` + this.buf += `${title('Expected')}\n\`\`\`\n${prettifyResponse( + test.expected + )}\n\`\`\`\n\n` + this.buf += `${title('Actual')}\n\`\`\`\n${prettifyResponse( + test.actual + )}\n\`\`\`\n\n` + this.level-- + + this.level-- + }) emitter.on('test error', (error, test) => { - this.buf += title(`Error: ${test.title}\n`); - this.buf += '\n```\n'; - this.buf += `\nError: \n${error}\nStacktrace: \n${error.stack}\n`; - this.buf += '```\n\n'; - this.level--; - }); -}; + this.buf += title(`Error: ${test.title}\n`) + this.buf += '\n```\n' + this.buf += `\nError: \n${error}\nStacktrace: \n${error.stack}\n` + this.buf += '```\n\n' + this.level-- + }) +} -inherits(MarkdownReporter, EventEmitter); +inherits(MarkdownReporter, EventEmitter) -module.exports = MarkdownReporter; +export default MarkdownReporter diff --git a/lib/reporters/NyanReporter.js b/lib/reporters/NyanReporter.js index 2dea8216a..984999502 100644 --- a/lib/reporters/NyanReporter.js +++ b/lib/reporters/NyanReporter.js @@ -1,224 +1,236 @@ -const tty = require('tty'); +import * as tty from 'tty' -const logger = require('../logger'); -const prettifyResponse = require('../prettifyResponse'); -const reporterOutputLogger = require('./reporterOutputLogger'); +import logger from '../logger' +import reporterOutputLogger from './reporterOutputLogger' +import prettifyResponse from '../prettifyResponse' function NyanCatReporter(emitter, stats) { - let windowWidth; + let windowWidth - this.type = 'nyan'; - this.stats = stats; - this.isatty = tty.isatty(1) && tty.isatty(2); + this.type = 'nyan' + this.stats = stats + this.isatty = tty.isatty(1) && tty.isatty(2) if (this.isatty) { if (process.stdout.getWindowSize) { - windowWidth = process.stdout.getWindowSize(1)[0]; + windowWidth = process.stdout.getWindowSize(1)[0] } else { - windowWidth = tty.getWindowSize()[1]; + windowWidth = tty.getWindowSize()[1] } } else { - windowWidth = 75; + windowWidth = 75 } - this.rainbowColors = this.generateColors(); - this.colorIndex = 0; - this.numberOfLines = 4; - this.trajectories = [[], [], [], []]; - this.nyanCatWidth = 11; - this.trajectoryWidthMax = (((windowWidth * 0.75) | 0) - this.nyanCatWidth); // eslint-disable-line no-bitwise - this.scoreboardWidth = 5; - this.tick = 0; - this.errors = []; + this.rainbowColors = this.generateColors() + this.colorIndex = 0 + this.numberOfLines = 4 + this.trajectories = [[], [], [], []] + this.nyanCatWidth = 11 + this.trajectoryWidthMax = ((windowWidth * 0.75) | 0) - this.nyanCatWidth // eslint-disable-line no-bitwise + this.scoreboardWidth = 5 + this.tick = 0 + this.errors = [] - this.configureEmitter(emitter); + this.configureEmitter(emitter) - logger.debug(`Using '${this.type}' reporter.`); + logger.debug(`Using '${this.type}' reporter.`) } -NyanCatReporter.prototype.configureEmitter = function configureEmitter(emitter) { +NyanCatReporter.prototype.configureEmitter = function configureEmitter( + emitter +) { emitter.on('start', (apiDescriptions, callback) => { - this.cursorHide(); - this.draw(); - callback(); - }); + this.cursorHide() + this.draw() + callback() + }) emitter.on('end', (callback) => { - this.cursorShow(); - let i = 0; + this.cursorShow() + let i = 0 while (i < this.numberOfLines) { - this.write('\n'); - i++; + this.write('\n') + i++ } if (this.errors.length > 0) { - this.write('\n'); - reporterOutputLogger.info('Displaying failed tests...'); + this.write('\n') + reporterOutputLogger.info('Displaying failed tests...') for (const test of this.errors) { - reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`); - reporterOutputLogger.fail(test.message); - reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`); - reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`); - reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`); + reporterOutputLogger.fail(`${test.title} duration: ${test.duration}ms`) + reporterOutputLogger.fail(test.message) + reporterOutputLogger.request(`\n${prettifyResponse(test.request)}\n`) + reporterOutputLogger.expected(`\n${prettifyResponse(test.expected)}\n`) + reporterOutputLogger.actual(`\n${prettifyResponse(test.actual)}\n\n`) } } - reporterOutputLogger.complete(`${this.stats.passes} passing, ${this.stats.failures} failing, ${this.stats.errors} errors, ${this.stats.skipped} skipped`); - reporterOutputLogger.complete(`Tests took ${this.stats.duration}ms`); - callback(); - }); + reporterOutputLogger.complete( + `${this.stats.passes} passing, ${this.stats.failures} failing, ${this.stats.errors} errors, ${this.stats.skipped} skipped` + ) + reporterOutputLogger.complete(`Tests took ${this.stats.duration}ms`) + callback() + }) emitter.on('test pass', () => { - this.draw(); - }); + this.draw() + }) emitter.on('test skip', () => { - this.draw(); - }); + this.draw() + }) emitter.on('test fail', (test) => { - this.errors.push(test); - this.draw(); - }); + this.errors.push(test) + this.draw() + }) emitter.on('test error', (error, test) => { - test.message = `\nError: \n${error}\nStacktrace: \n${error.stack}\n`; - this.errors.push(test); - this.draw(); - }); -}; + test.message = `\nError: \n${error}\nStacktrace: \n${error.stack}\n` + this.errors.push(test) + this.draw() + }) +} NyanCatReporter.prototype.draw = function draw() { - this.appendRainbow(); - this.drawScoreboard(); - this.drawRainbow(); - this.drawNyanCat(); - this.tick = !this.tick; -}; + this.appendRainbow() + this.drawScoreboard() + this.drawRainbow() + this.drawNyanCat() + this.tick = !this.tick +} NyanCatReporter.prototype.drawScoreboard = function drawScoreboard() { const colors = { fail: 31, skipped: 36, - pass: 32, - }; + pass: 32 + } // Capture outer `this` const draw = (color, n) => { - this.write(' '); - this.write(`\u001b[${color}m${n}\u001b[0m`); - this.write('\n'); - }; + this.write(' ') + this.write(`\u001b[${color}m${n}\u001b[0m`) + this.write('\n') + } - draw(colors.pass, this.stats.passes); - draw(colors.fail, this.stats.failures); - draw(colors.fail, this.stats.errors); - draw(colors.skipped, this.stats.skipped); + draw(colors.pass, this.stats.passes) + draw(colors.fail, this.stats.failures) + draw(colors.fail, this.stats.errors) + draw(colors.skipped, this.stats.skipped) - this.write('\n'); - this.cursorUp(this.numberOfLines + 1); -}; + this.write('\n') + this.cursorUp(this.numberOfLines + 1) +} NyanCatReporter.prototype.appendRainbow = function appendRainbow() { - const segment = (this.tick ? '_' : '-'); - const rainbowified = this.rainbowify(segment); - const result = []; + const segment = this.tick ? '_' : '-' + const rainbowified = this.rainbowify(segment) + const result = [] - let index = 0; + let index = 0 while (index < this.numberOfLines) { - const trajectory = this.trajectories[index]; - if (trajectory.length >= this.trajectoryWidthMax) { trajectory.shift(); } - trajectory.push(rainbowified); - result.push(index++); + const trajectory = this.trajectories[index] + if (trajectory.length >= this.trajectoryWidthMax) { + trajectory.shift() + } + trajectory.push(rainbowified) + result.push(index++) } - return result; -}; + return result +} NyanCatReporter.prototype.drawRainbow = function drawRainbow() { this.trajectories.forEach((line) => { - this.write(`\u001b[${this.scoreboardWidth}C`); - this.write(line.join('')); - this.write('\n'); - }); + this.write(`\u001b[${this.scoreboardWidth}C`) + this.write(line.join('')) + this.write('\n') + }) - this.cursorUp(this.numberOfLines); -}; + this.cursorUp(this.numberOfLines) +} NyanCatReporter.prototype.drawNyanCat = function drawNyanCat() { - const startWidth = this.scoreboardWidth + this.trajectories[0].length; - const color = `\u001b[${startWidth}C`; - let padding = ''; - this.write(color); - this.write('_,------,'); - this.write('\n'); - this.write(color); - padding = (this.tick ? ' ' : ' '); - this.write(`_|${padding}/\\_/\\ `); - this.write('\n'); - this.write(color); - padding = (this.tick ? '_' : '__'); - const tail = (this.tick ? '~' : '^'); - this.write(`${tail}|${padding}${this.face()} `); - this.write('\n'); - this.write(color); - padding = (this.tick ? ' ' : ' '); - this.write(`${padding}'' '' `); - this.write('\n'); - this.cursorUp(this.numberOfLines); -}; + const startWidth = this.scoreboardWidth + this.trajectories[0].length + const color = `\u001b[${startWidth}C` + let padding = '' + this.write(color) + this.write('_,------,') + this.write('\n') + this.write(color) + padding = this.tick ? ' ' : ' ' + this.write(`_|${padding}/\\_/\\ `) + this.write('\n') + this.write(color) + padding = this.tick ? '_' : '__' + const tail = this.tick ? '~' : '^' + this.write(`${tail}|${padding}${this.face()} `) + this.write('\n') + this.write(color) + padding = this.tick ? ' ' : ' ' + this.write(`${padding}'' '' `) + this.write('\n') + this.cursorUp(this.numberOfLines) +} NyanCatReporter.prototype.face = function face() { if (this.stats.failures) { - return '( x .x)'; - } if (this.stats.skipped) { - return '( o .o)'; - } if (this.stats.passes) { - return '( ^ .^)'; + return '( x .x)' } - return '( - .-)'; -}; + if (this.stats.skipped) { + return '( o .o)' + } + if (this.stats.passes) { + return '( ^ .^)' + } + return '( - .-)' +} NyanCatReporter.prototype.cursorUp = function cursorUp(n) { - this.write(`\u001b[${n}A`); -}; + this.write(`\u001b[${n}A`) +} NyanCatReporter.prototype.cursorDown = function cursorDown(n) { - this.write(`\u001b[${n}B`); -}; + this.write(`\u001b[${n}B`) +} NyanCatReporter.prototype.cursorShow = function cursorShow() { - if (this.isatty) { this.write('\u001b[?25h'); } -}; + if (this.isatty) { + this.write('\u001b[?25h') + } +} NyanCatReporter.prototype.cursorHide = function cursorHide() { - if (this.isatty) { this.write('\u001b[?25l'); } -}; + if (this.isatty) { + this.write('\u001b[?25l') + } +} NyanCatReporter.prototype.generateColors = function generateColors() { - const colors = []; - let i = 0; - - while (i < (6 * 7)) { - const pi3 = Math.floor(Math.PI / 3); - const n = (i * (1.0 / 6)); - const r = Math.floor((3 * Math.sin(n)) + 3); - const g = Math.floor((3 * Math.sin(n + (2 * pi3))) + 3); - const b = Math.floor((3 * Math.sin(n + (4 * pi3))) + 3); - colors.push((36 * r) + (6 * g) + b + 16); - i++; + const colors = [] + let i = 0 + + while (i < 6 * 7) { + const pi3 = Math.floor(Math.PI / 3) + const n = i * (1.0 / 6) + const r = Math.floor(3 * Math.sin(n) + 3) + const g = Math.floor(3 * Math.sin(n + 2 * pi3) + 3) + const b = Math.floor(3 * Math.sin(n + 4 * pi3) + 3) + colors.push(36 * r + 6 * g + b + 16) + i++ } - return colors; -}; + return colors +} NyanCatReporter.prototype.rainbowify = function rainbowify(str) { - const color = this.rainbowColors[this.colorIndex % this.rainbowColors.length]; - this.colorIndex += 1; - return `\u001b[38;5;${color}m${str}\u001b[0m`; -}; + const color = this.rainbowColors[this.colorIndex % this.rainbowColors.length] + this.colorIndex += 1 + return `\u001b[38;5;${color}m${str}\u001b[0m` +} NyanCatReporter.prototype.write = function write(str) { - process.stdout.write(str); -}; + process.stdout.write(str) +} -module.exports = NyanCatReporter; +export default NyanCatReporter diff --git a/lib/reporters/XUnitReporter.js b/lib/reporters/XUnitReporter.js index 9f8253648..7896c7a56 100644 --- a/lib/reporters/XUnitReporter.js +++ b/lib/reporters/XUnitReporter.js @@ -1,15 +1,15 @@ -const { EventEmitter } = require('events'); -const fs = require('fs'); -const { inherits } = require('util'); +import { EventEmitter } from 'events'; +import fs from 'fs'; +import { inherits } from 'util'; -const htmlencode = require('htmlencode'); -const untildify = require('untildify'); -const makeDir = require('make-dir'); -const pathmodule = require('path'); +import * as htmlencode from 'htmlencode'; +import untildify from 'untildify'; +import makeDir from 'make-dir'; +import * as pathmodule from 'path'; -const logger = require('../logger'); -const reporterOutputLogger = require('./reporterOutputLogger'); -const prettifyResponse = require('../prettifyResponse'); +import logger from '../logger'; +import reporterOutputLogger from './reporterOutputLogger'; +import prettifyResponse from '../prettifyResponse'; function XUnitReporter(emitter, stats, path, details) { EventEmitter.call(this); @@ -24,27 +24,41 @@ function XUnitReporter(emitter, stats, path, details) { logger.debug(`Using '${this.type}' reporter.`); } -XUnitReporter.prototype.updateSuiteStats = function updateSuiteStats(path, stats, callback) { +XUnitReporter.prototype.updateSuiteStats = function updateSuiteStats( + path, + stats, + callback +) { fs.readFile(path, (err, data) => { if (!err) { data = data.toString(); const position = data.toString().indexOf('\n'); if (position !== -1) { const restOfFile = data.substr(position + 1); - const newStats = this.toTag('testsuite', { - name: 'Dredd Tests', - tests: stats.tests, - failures: stats.failures, - errors: stats.errors, - skip: stats.skipped, - timestamp: (new Date()).toUTCString(), - time: stats.duration / 1000, - }, false); + const newStats = this.toTag( + 'testsuite', + { + name: 'Dredd Tests', + tests: stats.tests, + failures: stats.failures, + errors: stats.errors, + skip: stats.skipped, + timestamp: new Date().toUTCString(), + time: stats.duration / 1000 + }, + false + ); const xmlHeader = ''; - fs.writeFile(path, `${xmlHeader}\n${newStats}\n${restOfFile}`, (error) => { - if (error) { reporterOutputLogger.error(error); } - callback(); - }); + fs.writeFile( + path, + `${xmlHeader}\n${newStats}\n${restOfFile}`, + (error) => { + if (error) { + reporterOutputLogger.error(error); + } + callback(); + } + ); } else { callback(); } @@ -67,14 +81,18 @@ XUnitReporter.prototype.toTag = function toTag(name, attrs, close, content) { const end = close ? '/>' : '>'; const pairs = []; if (attrs) { - Object.keys(attrs).forEach(key => pairs.push(`${key}="${attrs[key]}"`)); + Object.keys(attrs).forEach((key) => pairs.push(`${key}="${attrs[key]}"`)); } let tag = `<${name}${pairs.length ? ` ${pairs.join(' ')}` : ''}${end}`; - if (content) { tag += `${content} { makeDir(pathmodule.dirname(this.path)) .then(() => { - this.appendLine(this.path, this.toTag('testsuite', { - name: 'Dredd Tests', - tests: this.stats.tests, - failures: this.stats.failures, - errors: this.stats.errors, - skip: this.stats.skipped, - timestamp: (new Date()).toUTCString(), - time: this.stats.duration / 1000, - }, false)); + this.appendLine( + this.path, + this.toTag( + 'testsuite', + { + name: 'Dredd Tests', + tests: this.stats.tests, + failures: this.stats.failures, + errors: this.stats.errors, + skip: this.stats.skipped, + timestamp: new Date().toUTCString(), + time: this.stats.duration / 1000 + }, + false + ) + ); callback(); }) .catch((err) => { @@ -111,7 +136,7 @@ XUnitReporter.prototype.configureEmitter = function configureEmitter(emitter) { emitter.on('test pass', (test) => { const attrs = { name: htmlencode.htmlEncode(test.title), - time: test.duration / 1000, + time: test.duration / 1000 }; if (this.details) { @@ -125,7 +150,12 @@ ${prettifyResponse(test.actual)}\ `; this.appendLine( this.path, - this.toTag('testcase', attrs, false, this.toTag('system-out', null, false, this.cdata(deets))) + this.toTag( + 'testcase', + attrs, + false, + this.toTag('system-out', null, false, this.cdata(deets)) + ) ); } else { this.appendLine(this.path, this.toTag('testcase', attrs, true)); @@ -135,15 +165,18 @@ ${prettifyResponse(test.actual)}\ emitter.on('test skip', (test) => { const attrs = { name: htmlencode.htmlEncode(test.title), - time: test.duration / 1000, + time: test.duration / 1000 }; - this.appendLine(this.path, this.toTag('testcase', attrs, false, this.toTag('skipped', null, true))); + this.appendLine( + this.path, + this.toTag('testcase', attrs, false, this.toTag('skipped', null, true)) + ); }); emitter.on('test fail', (test) => { const attrs = { name: htmlencode.htmlEncode(test.title), - time: test.duration / 1000, + time: test.duration / 1000 }; const diff = `\ Message: @@ -157,23 +190,33 @@ ${prettifyResponse(test.actual)}\ `; this.appendLine( this.path, - this.toTag('testcase', attrs, false, this.toTag('failure', null, false, this.cdata(diff))) + this.toTag( + 'testcase', + attrs, + false, + this.toTag('failure', null, false, this.cdata(diff)) + ) ); }); emitter.on('test error', (error, test) => { const attrs = { name: htmlencode.htmlEncode(test.title), - time: test.duration / 1000, + time: test.duration / 1000 }; const errorMessage = `\nError: \n${error}\nStacktrace: \n${error.stack}`; this.appendLine( this.path, - this.toTag('testcase', attrs, false, this.toTag('failure', null, false, this.cdata(errorMessage))) + this.toTag( + 'testcase', + attrs, + false, + this.toTag('failure', null, false, this.cdata(errorMessage)) + ) ); }); }; inherits(XUnitReporter, EventEmitter); -module.exports = XUnitReporter; +export default XUnitReporter; diff --git a/lib/reporters/reporterOutputLogger.js b/lib/reporters/reporterOutputLogger.js index 93b8cf03c..9f2883483 100644 --- a/lib/reporters/reporterOutputLogger.js +++ b/lib/reporters/reporterOutputLogger.js @@ -1,8 +1,8 @@ -const winston = require('winston'); +const winston = require('winston') -module.exports = new (winston.Logger)({ +const reporterOutputLogger = new winston.Logger({ transports: [ - new (winston.transports.Console)({ colorize: true, level: 'info' }), + new winston.transports.Console({ colorize: true, level: 'info' }) ], levels: { info: 10, @@ -15,7 +15,7 @@ module.exports = new (winston.Logger)({ hook: 3, request: 2, skip: 1, - error: 0, + error: 0 }, colors: { info: 'blue', @@ -28,6 +28,8 @@ module.exports = new (winston.Logger)({ hook: 'green', request: 'green', skip: 'yellow', - error: 'red', - }, -}); + error: 'red' + } +}) + +export default reporterOutputLogger diff --git a/lib/resolveLocations.js b/lib/resolveLocations.js deleted file mode 100644 index cbfaecd96..000000000 --- a/lib/resolveLocations.js +++ /dev/null @@ -1,28 +0,0 @@ -const resolvePaths = require('./resolvePaths'); -const isURL = require('./isURL'); - - -/** - * Takes an array of strings representing API description document locations - * and resolves all relative paths and globs - * - * Keeps URLs intact. Keeps the original order. Throws in case there's a glob - * pattern which doesn't resolve to any existing files. - * - * @param {string} workingDirectory - * @param {string[]} locations - * @returns {string[]} - */ -module.exports = function resolveLocations(workingDirectory, locations) { - const resolvedLocations = locations - // resolves paths to local files, produces an array of arrays - .map(location => ( - isURL(location) - ? [location] - : resolvePaths(workingDirectory, [location]) - )) - // flattens the array of arrays - .reduce((flatArray, array) => flatArray.concat(array), []); - - return Array.from(new Set(resolvedLocations)); -}; diff --git a/lib/resolveLocations.ts b/lib/resolveLocations.ts new file mode 100644 index 000000000..4b034e718 --- /dev/null +++ b/lib/resolveLocations.ts @@ -0,0 +1,24 @@ +import resolvePaths from './resolvePaths' +import isURL from './isURL' + +/** + * Takes an array of strings representing API description document locations + * and resolves all relative paths and globs + * + * Keeps URLs intact. Keeps the original order. Throws in case there's a glob + * pattern which doesn't resolve to any existing files. + */ +export default function resolveLocations( + workingDirectory: string, + locations: string[], +): string[] { + const resolvedLocations = locations + // resolves paths to local files, produces an array of arrays + .map((location) => + isURL(location) ? [location] : resolvePaths(workingDirectory, [location]), + ) + // flattens the array of arrays + .reduce((flatArray, array) => flatArray.concat(array), []) + + return Array.from(new Set(resolvedLocations)) +} diff --git a/lib/resolveModule.js b/lib/resolveModule.js deleted file mode 100644 index 6dfd3531b..000000000 --- a/lib/resolveModule.js +++ /dev/null @@ -1,10 +0,0 @@ -const fs = require('fs'); -const path = require('path'); - - -module.exports = function resolveModule(workingDirectory, moduleName) { - const absolutePath = path.resolve(workingDirectory, moduleName); - return fs.existsSync(absolutePath) || fs.existsSync(`${absolutePath}.js`) - ? absolutePath - : moduleName; -}; diff --git a/lib/resolveModule.ts b/lib/resolveModule.ts new file mode 100644 index 000000000..8a08d8da7 --- /dev/null +++ b/lib/resolveModule.ts @@ -0,0 +1,12 @@ +import fs from 'fs' +import * as path from 'path' + +export default function resolveModule( + workingDirectory: string, + moduleName: string, +): string { + const absolutePath = path.resolve(workingDirectory, moduleName) + return fs.existsSync(absolutePath) || fs.existsSync(`${absolutePath}.js`) + ? absolutePath + : moduleName +} diff --git a/lib/resolvePaths.js b/lib/resolvePaths.js index bae00d57e..02f10c9eb 100644 --- a/lib/resolvePaths.js +++ b/lib/resolvePaths.js @@ -1,47 +1,48 @@ -const fs = require('fs'); -const path = require('path'); -const glob = require('glob'); - +import fs from 'fs' +import * as path from 'path' +import * as glob from 'glob' // Ensure platform-agnostic 'path.basename' function -const basename = process.platform === 'win32' ? path.win32.basename : path.basename; - +const basename = + process.platform === 'win32' ? path.win32.basename : path.basename function resolveGlob(workingDirectory, pattern) { // 'glob.sync()' does not resolve paths, only glob patterns if (glob.hasMagic(pattern)) { - return glob.sync(pattern, { cwd: workingDirectory }) - .map(matchingPath => path.resolve(workingDirectory, matchingPath)); + return glob + .sync(pattern, { cwd: workingDirectory }) + .map((matchingPath) => path.resolve(workingDirectory, matchingPath)) } - const resolvedPath = path.resolve(workingDirectory, pattern); - return fs.existsSync(resolvedPath) ? [resolvedPath] : []; + const resolvedPath = path.resolve(workingDirectory, pattern) + return fs.existsSync(resolvedPath) ? [resolvedPath] : [] } - /** * Resolve paths to files * * Resolves glob patterns and sorts the files alphabetically by their basename. * Throws in case there's a pattern which doesn't resolve to any existing files. */ -module.exports = function resolvePaths(workingDirectory, patterns) { - if (!patterns || patterns.length < 1) { return []; } +export default function resolvePaths(workingDirectory, patterns) { + if (!patterns || patterns.length < 1) { + return [] + } const resolvedPaths = patterns .map((pattern) => { - const paths = resolveGlob(workingDirectory, pattern); + const paths = resolveGlob(workingDirectory, pattern) if (paths.length < 1) { - throw new Error(`Could not find any files on path: '${pattern}'`); + throw new Error(`Could not find any files on path: '${pattern}'`) } - return paths; + return paths }) .reduce((flatPaths, paths) => flatPaths.concat(paths), []) .sort((p1, p2) => { - const [basename1, basename2] = [basename(p1), basename(p2)]; - if (basename1 < basename2) return -1; - if (basename1 > basename2) return 1; - return 0; - }); + const [basename1, basename2] = [basename(p1), basename(p2)] + if (basename1 < basename2) return -1 + if (basename1 > basename2) return 1 + return 0 + }) - return Array.from(new Set(resolvedPaths)); // keep only unique items -}; + return Array.from(new Set(resolvedPaths)) // keep only unique items +} diff --git a/lib/sortTransactions.js b/lib/sortTransactions.js deleted file mode 100644 index a12bf0894..000000000 --- a/lib/sortTransactions.js +++ /dev/null @@ -1,30 +0,0 @@ -// Often, API description is arranged with a sequence of methods that lends -// itself to understanding by the human reading the documentation. -// -// However, the sequence of methods may not be appropriate for the machine -// reading the documentation in order to test the API. -// -// By sorting the transactions by their methods, it is possible to ensure that -// objects are created before they are read, updated, or deleted. -module.exports = function sortTransactions(arr) { - arr.forEach((a, i) => { a._index = i; }); - - arr.sort((a, b) => { - const sortedMethods = [ - 'CONNECT', 'OPTIONS', - 'POST', 'GET', 'HEAD', 'PUT', 'PATCH', 'DELETE', - 'TRACE', - ]; - - const methodIndexA = sortedMethods.indexOf(a.request.method); - const methodIndexB = sortedMethods.indexOf(b.request.method); - - if (methodIndexA < methodIndexB) { return -1; } - if (methodIndexA > methodIndexB) { return 1; } - return a._index - b._index; - }); - - arr.forEach(a => delete a._index); - - return arr; -}; diff --git a/lib/sortTransactions.ts b/lib/sortTransactions.ts new file mode 100644 index 000000000..15fa992e6 --- /dev/null +++ b/lib/sortTransactions.ts @@ -0,0 +1,57 @@ +import { RESTMethod, Transaction } from './__general' + +const sortedMethods: RESTMethod[] = [ + RESTMethod.CONNECT, + RESTMethod.OPTIONS, + RESTMethod.POST, + RESTMethod.GET, + RESTMethod.HEAD, + RESTMethod.PUT, + RESTMethod.PATCH, + RESTMethod.DELETE, + RESTMethod.TRACE, +] + +// Often, API description is arranged with a sequence of methods that lends +// itself to understanding by the human reading the documentation. +// +// However, the sequence of methods may not be appropriate for the machine +// reading the documentation in order to test the API. +// +// By sorting the transactions by their methods, it is possible to ensure that +// objects are created before they are read, updated, or deleted. +export default function sortTransactions(transactions: Transaction[]) { + // Convert the list of transactions into a list of tuples + // that hold each trasnaction index and details. + const tempTransactions: Array<[number, Transaction]> = transactions.map( + (transaction, index) => [index, transaction], + ) + + tempTransactions.sort( + ([leftIndex, leftTransaction], [rightIndex, rightTransaction]) => { + const methodIndexA = sortedMethods.indexOf(leftTransaction.request.method) + const methodIndexB = sortedMethods.indexOf( + rightTransaction.request.method, + ) + + // Sort transactions according to the transaction's request method + if (methodIndexA < methodIndexB) { + return -1 + } + + if (methodIndexA > methodIndexB) { + return 1 + } + + // In case two transactions' request methods are the same, + // preserve the original order of those transactions + return leftIndex - rightIndex + }, + ) + + const cleanTransactions = tempTransactions.map( + ([_, transaction]) => transaction, + ) + + return cleanTransactions +} diff --git a/lib/which.js b/lib/which.js deleted file mode 100644 index 167459507..000000000 --- a/lib/which.js +++ /dev/null @@ -1,12 +0,0 @@ -const which = require('which'); - -module.exports = { - which(command) { - try { - which.sync(command); - return true; - } catch (e) { - return false; - } - }, -}; diff --git a/lib/which.ts b/lib/which.ts new file mode 100644 index 000000000..af34e8299 --- /dev/null +++ b/lib/which.ts @@ -0,0 +1,12 @@ +import which from 'which' + +export default { + which(command: string) { + try { + which.sync(command) + return true + } catch (e) { + return false + } + } +} diff --git a/lib/options.json b/options.json similarity index 100% rename from lib/options.json rename to options.json diff --git a/package.json b/package.json index 53608080a..77bdf7633 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,9 @@ "name": "dredd", "version": "0.0.0-semantically-released", "description": "HTTP API Testing Framework", - "main": "lib/Dredd.js", + "esnext": "lib/index.ts", + "main": "build/index.js", + "typings": "typings/", "bin": { "dredd": "bin/dredd" }, @@ -10,6 +12,8 @@ "node": ">=8" }, "scripts": { + "start": "tsc --build tsconfig.json --watch", + "build": "tsc --build tsconfig.json", "docs:lint": "sphinx-build -nW -b linkcheck ./docs ./docs/_build", "docs:test-extensions": "python -m unittest docs/_extensions/*.py --verbose", "docs:build": "sphinx-build -nW -b html ./docs ./docs/_build", @@ -24,7 +28,8 @@ "ci:docs": "npm run docs:build", "ci:test": "npm test", "ci:smoke": "bash ./scripts/smoke.sh", - "ci:release": "semantic-release && npm dist-tag add \"dredd@$(npm view dredd version)\" stable" + "ci:release": "semantic-release && npm dist-tag add \"dredd@$(npm view dredd version)\" stable", + "prepack": "npm run build" }, "repository": { "type": "git", @@ -32,7 +37,8 @@ }, "files": [ "bin", - "lib", + "build", + "options.json", "README.md" ], "dependencies": { @@ -81,12 +87,6 @@ "mocha": "6.2.0", "nock": "10.0.6", "ps-node": "0.1.6", - "rollup": "1.21.2", - "rollup-plugin-commonjs": "10.1.0", - "rollup-plugin-json": "4.0.0", - "rollup-plugin-node-externals": "2.0.0", - "rollup-plugin-node-resolve": "5.2.0", - "rollup-plugin-typescript2": "0.24.1", "semantic-release": "15.13.24", "sinon": "7.4.2", "ts-node": "8.3.0", diff --git a/test/fixtures/apiDescriptions.js b/test/fixtures/apiDescriptions.js index 06ba73b63..7e9ad0e9f 100644 --- a/test/fixtures/apiDescriptions.js +++ b/test/fixtures/apiDescriptions.js @@ -1,50 +1,69 @@ -module.exports = [ +export default [ { - content: 'FORMAT: 1A\n\n# Machines API\n\n# Group Machines\n\n# Machines collection [/machines/{id}]\n + Parameters\n - id (number, `1`)\n\n## Get Machines [GET]\n\n- Request (application/json)\n + Parameters\n - id (number, `2`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n\n- Request (application/json)\n + Parameters\n - id (number, `3`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n', + content: + 'FORMAT: 1A\n\n# Machines API\n\n# Group Machines\n\n# Machines collection [/machines/{id}]\n + Parameters\n - id (number, `1`)\n\n## Get Machines [GET]\n\n- Request (application/json)\n + Parameters\n - id (number, `2`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n\n- Request (application/json)\n + Parameters\n - id (number, `3`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n', location: './test/fixtures/multiple-examples.apib', annotations: [ { component: 'apiDescriptionParser', code: 3, - message: 'no parameters specified, expected a nested list of parameters, one parameter per list item', + message: + 'no parameters specified, expected a nested list of parameters, one parameter per list item', location: [[178, 13]], - type: 'warning', + type: 'warning' }, { component: 'apiDescriptionParser', code: 5, message: 'ignoring unrecognized block', location: [[195, 19]], - type: 'warning', + type: 'warning' }, { component: 'apiDescriptionParser', code: 10, - message: 'message-body asset is expected to be a pre-formatted code block, every of its line indented by exactly 8 spaces or 2 tabs', - location: [[269, 2], [275, 4], [283, 25], [312, 20], [336, 4], [344, 2]], - type: 'warning', + message: + 'message-body asset is expected to be a pre-formatted code block, every of its line indented by exactly 8 spaces or 2 tabs', + location: [ + [269, 2], + [275, 4], + [283, 25], + [312, 20], + [336, 4], + [344, 2] + ], + type: 'warning' }, { component: 'apiDescriptionParser', code: 3, - message: 'no parameters specified, expected a nested list of parameters, one parameter per list item', + message: + 'no parameters specified, expected a nested list of parameters, one parameter per list item', location: [[378, 13]], - type: 'warning', + type: 'warning' }, { component: 'apiDescriptionParser', code: 5, message: 'ignoring unrecognized block', location: [[395, 19]], - type: 'warning', + type: 'warning' }, { component: 'apiDescriptionParser', code: 10, - message: 'message-body asset is expected to be a pre-formatted code block, every of its line indented by exactly 8 spaces or 2 tabs', - location: [[469, 2], [475, 4], [483, 25], [512, 20], [536, 4], [544, 2]], - type: 'warning', - }, - ], - }, -]; + message: + 'message-body asset is expected to be a pre-formatted code block, every of its line indented by exactly 8 spaces or 2 tabs', + location: [ + [469, 2], + [475, 4], + [483, 25], + [512, 20], + [536, 4], + [544, 2] + ], + type: 'warning' + } + ] + } +] diff --git a/test/integration/annotations-test.js b/test/integration/annotations-test.js index 9f5d7fbfb..719e2607b 100644 --- a/test/integration/annotations-test.js +++ b/test/integration/annotations-test.js @@ -1,138 +1,152 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); - -const Dredd = require('../../lib/Dredd'); +import sinon from 'sinon' +import { assert } from 'chai' +import Dredd from '../../lib/Dredd' function compileTransactions(apiDescription, logger, callback) { - const dredd = new Dredd({ apiDescriptions: [apiDescription] }); - dredd.logger = logger; - dredd.transactionRunner.run = sinon.stub().callsArg(1); - dredd.run(callback); + const dredd = new Dredd({ apiDescriptions: [apiDescription] }) + dredd.logger = logger + dredd.transactionRunner.run = sinon.stub().callsArg(1) + dredd.run(callback) } - describe('Parser and compiler annotations', () => { describe('when processing a file with parser warnings', () => { - const logger = { debug: sinon.spy(), log: sinon.spy() }; - let error; + const logger = { debug: sinon.spy(), log: sinon.spy() } + let error before((done) => { - compileTransactions(` + compileTransactions( + ` FORMAT: 1A # Dummy API ## Index [GET /] + Response - `, logger, (compileError) => { - error = compileError; - done(); - }); - }); + `, + logger, + (compileError) => { + error = compileError + done() + } + ) + }) it("doesn't abort Dredd", () => { - assert.isUndefined(error); - }); + assert.isUndefined(error) + }) it('logs warnings', () => { - assert.equal(logger.log.getCall(0).args[0], 'warn'); - }); + assert.equal(logger.log.getCall(0).args[0], 'warn') + }) it('logs the warnings with line numbers', () => { assert.match( logger.log.getCall(0).args[1], /parser warning in configuration\.apiDescriptions\[0\]:5 \(from line 5 column 3 to column 11\)/i - ); - }); - }); + ) + }) + }) describe('when processing a file with parser errors', () => { - const logger = { debug: sinon.spy(), log: sinon.spy() }; - let error; + const logger = { debug: sinon.spy(), log: sinon.spy() } + let error before((done) => { - compileTransactions(` + compileTransactions( + ` FORMAT: 1A # Dummy API ## Index [GET /] + Response \t+ Body - `, logger, (compileError) => { - error = compileError; - done(); - }); - }); + `, + logger, + (compileError) => { + error = compileError + done() + } + ) + }) it('aborts Dredd', () => { - assert.instanceOf(error, Error); - }); + assert.instanceOf(error, Error) + }) it('logs errors', () => { - assert.equal(logger.log.getCall(0).args[0], 'error'); - }); + assert.equal(logger.log.getCall(0).args[0], 'error') + }) it('logs the errors with line numbers', () => { assert.match( logger.log.getCall(0).args[1], /parser error in configuration\.apiDescriptions\[0\]:6 \(line 6 column 1\)/i - ); - }); - }); + ) + }) + }) describe('when processing a file with compilation warnings', () => { - const logger = { debug: sinon.spy(), log: sinon.spy() }; - let error; + const logger = { debug: sinon.spy(), log: sinon.spy() } + let error before((done) => { - compileTransactions(` + compileTransactions( + ` FORMAT: 1A # Dummy API ## Index [GET /{foo}] + Response 200 - `, logger, (compileError) => { - error = compileError; - done(); - }); - }); + `, + logger, + (compileError) => { + error = compileError + done() + } + ) + }) it("doesn't abort Dredd", () => { - assert.isUndefined(error); - }); + assert.isUndefined(error) + }) it('logs warnings', () => { - assert.equal(logger.log.getCall(0).args[0], 'warn'); - }); + assert.equal(logger.log.getCall(0).args[0], 'warn') + }) it('logs the warnings with a transaction path', () => { assert.match( logger.log.getCall(0).args[1], /uri template expansion warning in configuration\.apiDescriptions\[0\] \(Dummy API > Index > Index\)/i - ); - }); - }); + ) + }) + }) describe('when processing a file with compilation errors', () => { - const logger = { debug: sinon.spy(), log: sinon.spy() }; - let error; + const logger = { debug: sinon.spy(), log: sinon.spy() } + let error before((done) => { - compileTransactions(` + compileTransactions( + ` FORMAT: 1A # Dummy API ## Index [DELETE /{?param}] + Parameters + param (required) + Response 204 - `, logger, (compileError) => { - error = compileError; - done(); - }); - }); + `, + logger, + (compileError) => { + error = compileError + done() + } + ) + }) it('aborts Dredd', () => { - assert.instanceOf(error, Error); - }); + assert.instanceOf(error, Error) + }) it('logs errors', () => { - assert.equal(logger.log.getCall(0).args[0], 'error'); - }); + assert.equal(logger.log.getCall(0).args[0], 'error') + }) it('logs the errors with a transaction path', () => { assert.match( logger.log.getCall(0).args[1], /uri parameters validation error in configuration\.apiDescriptions\[0\] \(Dummy API > Index > Index\)/i - ); - }); - }); -}); + ) + }) + }) +}) diff --git a/test/integration/apiary-reporter-test.js b/test/integration/apiary-reporter-test.js index e3919fc2d..e3bee323a 100644 --- a/test/integration/apiary-reporter-test.js +++ b/test/integration/apiary-reporter-test.js @@ -1,382 +1,450 @@ -const R = require('ramda'); -const bodyParser = require('body-parser'); -const clone = require('clone'); -const express = require('express'); -const fs = require('fs'); -const { assert } = require('chai'); +import R from 'ramda' +import bodyParser from 'body-parser' +import clone from 'clone' +import express from 'express' +import fs from 'fs' +import { assert } from 'chai' -const logger = require('../../lib/logger'); -const reporterOutputLogger = require('../../lib/reporters/reporterOutputLogger'); -const Dredd = require('../../lib/Dredd'); +import logger from '../../lib/logger' +import reporterOutputLogger from '../../lib/reporters/reporterOutputLogger' +import Dredd from '../../lib/Dredd' -const PORT = 9876; +const PORT = 9876 -let exitStatus; +let exitStatus -let output = ''; +let output = '' function execCommand(options = {}, cb) { - output = ''; - exitStatus = null; - let finished = false; + output = '' + exitStatus = null + let finished = false const defaultOptions = { server: `http://127.0.0.1:${PORT}`, options: { - loglevel: 'warning', - }, - }; - const dreddOptions = R.mergeDeepLeft(options, defaultOptions); + loglevel: 'warning' + } + } + const dreddOptions = R.mergeDeepLeft(options, defaultOptions) new Dredd(dreddOptions).run((error, stats = {}) => { if (!finished) { - finished = true; + finished = true if (error ? error.message : undefined) { - output += error.message; + output += error.message } - exitStatus = (error || (((1 * stats.failures) + (1 * stats.errors)) > 0)) ? 1 : 0; - cb(); + exitStatus = error || 1 * stats.failures + 1 * stats.errors > 0 ? 1 : 0 + cb() } - }); + }) } - function record(transport, level, message) { - output += `\n${level}: ${message}`; + output += `\n${level}: ${message}` } - // These tests were separated out from a larger file. They deserve a rewrite, // see https://github.com/apiaryio/dredd/issues/1288 describe('Apiary reporter', () => { before(() => { - logger.transports.console.silent = true; - logger.on('logging', record); + logger.transports.console.silent = true + logger.on('logging', record) - reporterOutputLogger.transports.console.silent = true; - reporterOutputLogger.on('logging', record); - }); + reporterOutputLogger.transports.console.silent = true + reporterOutputLogger.on('logging', record) + }) after(() => { - logger.transports.console.silent = false; - logger.removeListener('logging', record); + logger.transports.console.silent = false + logger.removeListener('logging', record) - reporterOutputLogger.transports.console.silent = false; - reporterOutputLogger.removeListener('logging', record); - }); + reporterOutputLogger.transports.console.silent = false + reporterOutputLogger.removeListener('logging', record) + }) describe("when using reporter -r apiary with 'debug' logging with custom apiaryApiKey and apiaryApiName", () => { - let server; - let server2; - let receivedRequest; - let receivedRequestTestRuns; - let receivedHeaders; - let receivedHeadersRuns; - exitStatus = null; + let server + let server2 + let receivedRequest + let receivedRequestTestRuns + let receivedHeaders + let receivedHeadersRuns + exitStatus = null before((done) => { - const cmd = { - options: { - path: ['./test/fixtures/single-get.apib'], - reporter: ['apiary'], - loglevel: 'debug', - }, - custom: { - apiaryApiUrl: `http://127.0.0.1:${PORT + 1}`, - apiaryApiKey: 'the-key', - apiaryApiName: 'the-api-name', - }, - }; - - receivedHeaders = {}; - receivedHeadersRuns = {}; - - const apiary = express(); - const app = express(); - - apiary.use(bodyParser.json({ size: '5mb' })); - - apiary.post('/apis/*', (req, res) => { - if (req.body && (req.url.indexOf('/tests/steps') > -1)) { - if (!receivedRequest) { receivedRequest = clone(req.body); } - Object.keys(req.headers).forEach((name) => { - receivedHeaders[name.toLowerCase()] = req.headers[name]; - }); - } - if (req.body && (req.url.indexOf('/tests/runs') > -1)) { - if (!receivedRequestTestRuns) { receivedRequestTestRuns = clone(req.body); } - Object.keys(req.headers).forEach((name) => { - receivedHeadersRuns[name.toLowerCase()] = req.headers[name]; - }); + try { + const cmd = { + options: { + path: ['./test/fixtures/single-get.apib'], + reporter: ['apiary'], + loglevel: 'debug' + }, + custom: { + apiaryApiUrl: `http://127.0.0.1:${PORT + 1}`, + apiaryApiKey: 'the-key', + apiaryApiName: 'the-api-name' + } } - res.status(201).json({ - _id: '1234_id', - testRunId: '6789_testRunId', - reportUrl: 'http://url.me/test/run/1234_id', - }); - }); - apiary.all('*', (req, res) => res.json({})); + receivedHeaders = {} + receivedHeadersRuns = {} - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const apiary = express() + const app = express() - server = app.listen(PORT, () => { - server2 = apiary.listen((PORT + 1), () => { - execCommand(cmd, () => server2.close(() => server.close(() => done()))); - }); - }); - }); + apiary.use(bodyParser.json({ size: '5mb' })) - it('should not print warning about missing Apiary API settings', () => assert.notInclude(output, 'Apiary API Key or API Project Subdomain were not provided.')); + apiary.post('/apis/*', (req, res) => { + if (req.body && req.url.indexOf('/tests/steps') > -1) { + if (!receivedRequest) { + receivedRequest = clone(req.body) + } + Object.keys(req.headers).forEach((name) => { + receivedHeaders[name.toLowerCase()] = req.headers[name] + }) + } + if (req.body && req.url.indexOf('/tests/runs') > -1) { + if (!receivedRequestTestRuns) { + receivedRequestTestRuns = clone(req.body) + } + Object.keys(req.headers).forEach((name) => { + receivedHeadersRuns[name.toLowerCase()] = req.headers[name] + }) + } + res.status(201).json({ + _id: '1234_id', + testRunId: '6789_testRunId', + reportUrl: 'http://url.me/test/run/1234_id' + }) + }) + + apiary.all('*', (req, res) => res.json({})) + + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) + + server = app.listen(PORT, () => { + server2 = apiary.listen(PORT + 1, () => { + execCommand(cmd, () => + server2.close(() => server.close(() => done())) + ) + }) + }) + } catch (error) { + throw error + } + }) + + it('should not print warning about missing Apiary API settings', () => + assert.notInclude( + output, + 'Apiary API Key or API Project Subdomain were not provided.' + )) it('should contain Authentication header thanks to apiaryApiKey and apiaryApiName configuration', () => { - assert.propertyVal(receivedHeaders, 'authentication', 'Token the-key'); - assert.propertyVal(receivedHeadersRuns, 'authentication', 'Token the-key'); - }); + assert.propertyVal(receivedHeaders, 'authentication', 'Token the-key') + assert.propertyVal(receivedHeadersRuns, 'authentication', 'Token the-key') + }) it('should send the test-run as a non-public one', () => { - assert.isObject(receivedRequestTestRuns); - assert.propertyVal(receivedRequestTestRuns, 'public', false); - }); + assert.isObject(receivedRequestTestRuns) + assert.propertyVal(receivedRequestTestRuns, 'public', false) + }) - it('should print using the new reporter', () => assert.include(output, 'http://url.me/test/run/1234_id')); + it('should print using the new reporter', () => + assert.include(output, 'http://url.me/test/run/1234_id')) it('should send results from Gavel', () => { - assert.isObject(receivedRequest); - assert.nestedProperty(receivedRequest, 'results.request'); - assert.nestedProperty(receivedRequest, 'results.realResponse'); - assert.nestedProperty(receivedRequest, 'results.expectedResponse'); - assert.nestedProperty(receivedRequest, 'results.validationResult.fields.body.kind'); - assert.nestedProperty(receivedRequest, 'results.validationResult.fields.headers.kind'); - assert.nestedProperty(receivedRequest, 'results.validationResult.fields.statusCode.kind'); - - it('prints out an error message', () => assert.notEqual(exitStatus, 0)); - }); - }); + assert.isObject(receivedRequest) + assert.nestedProperty(receivedRequest, 'results.request') + assert.nestedProperty(receivedRequest, 'results.realResponse') + assert.nestedProperty(receivedRequest, 'results.expectedResponse') + assert.nestedProperty( + receivedRequest, + 'results.validationResult.fields.body.kind' + ) + assert.nestedProperty( + receivedRequest, + 'results.validationResult.fields.headers.kind' + ) + assert.nestedProperty( + receivedRequest, + 'results.validationResult.fields.statusCode.kind' + ) + + it('prints out an error message', () => assert.notEqual(exitStatus, 0)) + }) + }) describe('when called with arguments', () => { describe("when using reporter -r apiary and the server isn't running", () => { - let server2; - let receivedRequest; - exitStatus = null; + let server2 + let receivedRequest + exitStatus = null before((done) => { const cmd = { options: { path: ['./test/fixtures/single-get.apib'], reporter: ['apiary'], - loglevel: 'debug', + loglevel: 'debug' }, custom: { apiaryReporterEnv: { - APIARY_API_URL: `http://127.0.0.1:${PORT + 1}`, - }, - }, - }; + APIARY_API_URL: `http://127.0.0.1:${PORT + 1}` + } + } + } - const apiary = express(); + const apiary = express() - apiary.use(bodyParser.json({ size: '5mb' })); + apiary.use(bodyParser.json({ size: '5mb' })) apiary.post('/apis/*', (req, res) => { - if (req.body && (req.url.indexOf('/tests/steps') > -1)) { - if (!receivedRequest) { receivedRequest = clone(req.body); } + if (req.body && req.url.indexOf('/tests/steps') > -1) { + if (!receivedRequest) { + receivedRequest = clone(req.body) + } } res.status(201).json({ _id: '1234_id', testRunId: '6789_testRunId', - reportUrl: 'http://url.me/test/run/1234_id', - }); - }); + reportUrl: 'http://url.me/test/run/1234_id' + }) + }) - apiary.all('*', (req, res) => res.json({})); + apiary.all('*', (req, res) => res.json({})) - server2 = apiary.listen((PORT + 1), () => execCommand(cmd, () => server2.close(() => {}))); + server2 = apiary.listen(PORT + 1, () => + execCommand(cmd, () => server2.close(() => {})) + ) - server2.on('close', done); - }); + server2.on('close', done) + }) - it('should print using the reporter', () => assert.include(output, 'http://url.me/test/run/1234_id')); + it('should print using the reporter', () => + assert.include(output, 'http://url.me/test/run/1234_id')) it('should send results from gavel', () => { - assert.isObject(receivedRequest); - assert.nestedProperty(receivedRequest, 'results.request'); - assert.nestedProperty(receivedRequest, 'results.expectedResponse'); - assert.nestedProperty(receivedRequest, 'results.errors'); - }); + assert.isObject(receivedRequest) + assert.nestedProperty(receivedRequest, 'results.request') + assert.nestedProperty(receivedRequest, 'results.expectedResponse') + assert.nestedProperty(receivedRequest, 'results.errors') + }) it('report should have message about server being down', () => { - const message = receivedRequest.results.errors[0].message; - assert.include(message, 'connect'); - }); - }); + const message = receivedRequest.results.errors[0].message + assert.include(message, 'connect') + }) + }) describe('when using reporter -r apiary', () => { - let server; - let server2; - let receivedRequest; - exitStatus = null; + let server + let server2 + let receivedRequest + exitStatus = null before((done) => { const cmd = { options: { path: ['./test/fixtures/single-get.apib'], reporter: ['apiary'], - loglevel: 'debug', + loglevel: 'debug' }, custom: { apiaryReporterEnv: { - APIARY_API_URL: `http://127.0.0.1:${PORT + 1}`, - }, - }, - }; + APIARY_API_URL: `http://127.0.0.1:${PORT + 1}` + } + } + } - const apiary = express(); - const app = express(); + const apiary = express() + const app = express() - apiary.use(bodyParser.json({ size: '5mb' })); + apiary.use(bodyParser.json({ size: '5mb' })) apiary.post('/apis/*', (req, res) => { - if (req.body && (req.url.indexOf('/tests/steps') > -1)) { - if (!receivedRequest) { receivedRequest = clone(req.body); } + if (req.body && req.url.indexOf('/tests/steps') > -1) { + if (!receivedRequest) { + receivedRequest = clone(req.body) + } } res.status(201).json({ _id: '1234_id', testRunId: '6789_testRunId', - reportUrl: 'http://url.me/test/run/1234_id', - }); - }); + reportUrl: 'http://url.me/test/run/1234_id' + }) + }) - apiary.all('*', (req, res) => res.json({})); + apiary.all('*', (req, res) => res.json({})) - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - server = app.listen(PORT, () => { server2 = apiary.listen((PORT + 1), () => {}); }); + server = app.listen(PORT, () => { + server2 = apiary.listen(PORT + 1, () => {}) + }) - execCommand(cmd, () => server2.close(() => server.close(() => {}))); + execCommand(cmd, () => server2.close(() => server.close(() => {}))) - server.on('close', done); - }); + server.on('close', done) + }) - it('should print warning about missing Apiary API settings', () => assert.include(output, 'Apiary API Key or API Project Subdomain were not provided.')); + it('should print warning about missing Apiary API settings', () => + assert.include( + output, + 'Apiary API Key or API Project Subdomain were not provided.' + )) - it('should print link to documentation', () => assert.include(output, 'https://dredd.org/en/latest/how-to-guides/#using-apiary-reporter-and-apiary-tests')); + it('should print link to documentation', () => + assert.include( + output, + 'https://dredd.org/en/latest/how-to-guides/#using-apiary-reporter-and-apiary-tests' + )) - it('should print using the new reporter', () => assert.include(output, 'http://url.me/test/run/1234_id')); + it('should print using the new reporter', () => + assert.include(output, 'http://url.me/test/run/1234_id')) it('should send results from Gavel', () => { - assert.isObject(receivedRequest); - assert.nestedProperty(receivedRequest, 'results.request'); - assert.nestedProperty(receivedRequest, 'results.realResponse'); - assert.nestedProperty(receivedRequest, 'results.expectedResponse'); - assert.nestedProperty(receivedRequest, 'results.validationResult.fields.body.kind'); - assert.nestedProperty(receivedRequest, 'results.validationResult.fields.headers.kind'); - assert.nestedProperty(receivedRequest, 'results.validationResult.fields.statusCode.kind'); - }); - }); - }); + assert.isObject(receivedRequest) + assert.nestedProperty(receivedRequest, 'results.request') + assert.nestedProperty(receivedRequest, 'results.realResponse') + assert.nestedProperty(receivedRequest, 'results.expectedResponse') + assert.nestedProperty( + receivedRequest, + 'results.validationResult.fields.body.kind' + ) + assert.nestedProperty( + receivedRequest, + 'results.validationResult.fields.headers.kind' + ) + assert.nestedProperty( + receivedRequest, + 'results.validationResult.fields.statusCode.kind' + ) + }) + }) + }) describe("when API description document should be loaded from 'http(s)://...' url", () => { - let app; - let server; - let connectedToServer = null; - let notFound; - let fileFound; + let app + let server + let connectedToServer = null + let notFound + let fileFound const errorCmd = { server: `http://127.0.0.1:${PORT + 1}`, options: { - path: [`http://127.0.0.1:${PORT + 1}/connection-error.apib`], - }, - }; + path: [`http://127.0.0.1:${PORT + 1}/connection-error.apib`] + } + } const wrongCmd = { options: { - path: [`http://127.0.0.1:${PORT}/not-found.apib`], - }, - }; + path: [`http://127.0.0.1:${PORT}/not-found.apib`] + } + } const goodCmd = { options: { - path: [`http://127.0.0.1:${PORT}/file.apib`], - }, - }; + path: [`http://127.0.0.1:${PORT}/file.apib`] + } + } - afterEach(() => { connectedToServer = null; }); + afterEach(() => { + connectedToServer = null + }) before((done) => { - app = express(); + app = express() app.use((req, res, next) => { - connectedToServer = true; - next(); - }); + connectedToServer = true + next() + }) - app.get('/', (req, res) => res.sendStatus(404)); + app.get('/', (req, res) => res.sendStatus(404)) app.get('/file.apib', (req, res) => { - fileFound = true; - res.type('text'); - fs.createReadStream('./test/fixtures/single-get.apib').pipe(res); - }); + fileFound = true + res.type('text') + fs.createReadStream('./test/fixtures/single-get.apib').pipe(res) + }) - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) app.get('/not-found.apib', (req, res) => { - notFound = true; - res.status(404).end(); - }); + notFound = true + res.status(404).end() + }) - server = app.listen(PORT, () => done()); - }); + server = app.listen(PORT, () => done()) + }) - after(done => server.close(() => { - app = null; - server = null; - done(); - })); + after((done) => + server.close(() => { + app = null + server = null + done() + }) + ) describe('and I try to load a file from bad hostname at all', () => { - before(done => execCommand(errorCmd, () => done())); + before((done) => execCommand(errorCmd, () => done())) - after(() => { connectedToServer = null; }); + after(() => { + connectedToServer = null + }) - it('should not send a GET to the server', () => assert.isNull(connectedToServer)); + it('should not send a GET to the server', () => + assert.isNull(connectedToServer)) - it('should exit with status 1', () => assert.equal(exitStatus, 1)); + it('should exit with status 1', () => assert.equal(exitStatus, 1)) it('should print error message to the output', () => { - assert.include(output, 'Unable to load API description document from'); - assert.include(output, 'connection-error.apib'); - }); - }); + assert.include(output, 'Unable to load API description document from') + assert.include(output, 'connection-error.apib') + }) + }) describe('and I try to load a file that does not exist from an existing server', () => { - before(done => execCommand(wrongCmd, () => done())); + before((done) => execCommand(wrongCmd, () => done())) - after(() => { connectedToServer = null; }); + after(() => { + connectedToServer = null + }) - it('should connect to the right server', () => assert.isTrue(connectedToServer)); + it('should connect to the right server', () => + assert.isTrue(connectedToServer)) - it('should send a GET to server at wrong URL', () => assert.isTrue(notFound)); + it('should send a GET to server at wrong URL', () => + assert.isTrue(notFound)) - it('should exit with status 1', () => assert.equal(exitStatus, 1)); + it('should exit with status 1', () => assert.equal(exitStatus, 1)) it('should print error message to the output', () => { - assert.include(output, 'Unable to load API description document from'); - assert.include(output, 'Dredd got HTTP 404 response without body'); - assert.include(output, 'not-found.apib'); - }); - }); + assert.include(output, 'Unable to load API description document from') + assert.include(output, 'Dredd got HTTP 404 response without body') + assert.include(output, 'not-found.apib') + }) + }) describe('and I try to load a file that actually is there', () => { - before(done => execCommand(goodCmd, () => done())); + before((done) => execCommand(goodCmd, () => done())) - it('should send a GET to the right server', () => assert.isTrue(connectedToServer)); + it('should send a GET to the right server', () => + assert.isTrue(connectedToServer)) - it('should send a GET to server at good URL', () => assert.isTrue(fileFound)); + it('should send a GET to server at good URL', () => + assert.isTrue(fileFound)) - it('should exit with status 0', () => assert.equal(exitStatus, 0)); - }); - }); -}); + it('should exit with status 0', () => assert.equal(exitStatus, 0)) + }) + }) +}) diff --git a/test/integration/childProcess-test.js b/test/integration/childProcess-test.js index c295ba7a5..f092699bf 100644 --- a/test/integration/childProcess-test.js +++ b/test/integration/childProcess-test.js @@ -1,8 +1,8 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); +import sinon from 'sinon'; +import { assert } from 'chai'; -const helpers = require('./helpers'); -const { spawn, signalTerm, signalKill } = require('../../lib/childProcess'); +import * as helpers from './helpers'; +import { spawn, signalTerm, signalKill } from '../../lib/childProcess'; const COFFEE_BIN = 'node_modules/.bin/coffee'; const WAIT_AFTER_COMMAND_SPAWNED_MS = 500; @@ -18,13 +18,17 @@ function runChildProcess(command, fn, callback) { terminated: false, exitStatus: undefined, signal: undefined, - onCrash, + onCrash }; const childProcess = spawn(COFFEE_BIN, [command]); - childProcess.stdout.on('data', (data) => { processInfo.stdout += data.toString(); }); - childProcess.stderr.on('data', (data) => { processInfo.stderr += data.toString(); }); + childProcess.stdout.on('data', (data) => { + processInfo.stdout += data.toString(); + }); + childProcess.stderr.on('data', (data) => { + processInfo.stderr += data.toString(); + }); function onExit(exitStatus, signal) { processInfo.terminated = true; @@ -33,7 +37,9 @@ function runChildProcess(command, fn, callback) { } childProcess.on('exit', onExit); - const onError = (err) => { processInfo.error = err; }; + const onError = (err) => { + processInfo.error = err; + }; childProcess.on('error', onError); childProcess.on('crash', onCrash); @@ -48,10 +54,8 @@ function runChildProcess(command, fn, callback) { processInfo.childProcess = childProcess; callback(null, processInfo); - }, - WAIT_AFTER_COMMAND_TERMINATED_MS); - }, - WAIT_AFTER_COMMAND_SPAWNED_MS); + }, WAIT_AFTER_COMMAND_TERMINATED_MS); + }, WAIT_AFTER_COMMAND_SPAWNED_MS); } describe('Babysitting Child Processes', () => { @@ -59,20 +63,28 @@ describe('Babysitting Child Processes', () => { describe('process with support for graceful termination', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', childProcess => childProcess.signalKill(), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => childProcess.signalKill(), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); - it('does not log a message about being gracefully terminated', () => assert.notInclude(processInfo.stdout, 'exiting')); + it('does not log a message about being gracefully terminated', () => + assert.notInclude(processInfo.stdout, 'exiting')); it('gets terminated', () => assert.isTrue(processInfo.terminated)); if (process.platform === 'win32') { - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); } else { it('gets killed', () => assert.equal(processInfo.signal, 'SIGKILL')); - it('returns no status code', () => assert.isNull(processInfo.exitStatus)); + it('returns no status code', () => + assert.isNull(processInfo.exitStatus)); } it('does not emit an error', () => assert.isUndefined(processInfo.error)); }); @@ -80,105 +92,150 @@ describe('Babysitting Child Processes', () => { describe('process without support for graceful termination', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/endless-ignore-term.coffee', childProcess => childProcess.signalKill(), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/endless-ignore-term.coffee', + (childProcess) => childProcess.signalKill(), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); - it('does not log a message about ignoring graceful termination', () => assert.notInclude(processInfo.stdout, 'ignoring')); + it('does not log a message about ignoring graceful termination', () => + assert.notInclude(processInfo.stdout, 'ignoring')); it('gets terminated', () => assert.isTrue(processInfo.terminated)); if (process.platform === 'win32') { - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); } else { it('gets killed', () => assert.equal(processInfo.signal, 'SIGKILL')); - it('returns no status code', () => assert.isNull(processInfo.exitStatus)); + it('returns no status code', () => + assert.isNull(processInfo.exitStatus)); } it('does not emit an error', () => assert.isUndefined(processInfo.error)); }); }); - - ['signalTerm', 'terminate'].forEach(functionName => describe(`when gracefully terminated by childProcess.${functionName}()`, () => { - describe('process with support for graceful termination', () => { - let processInfo; - - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', childProcess => childProcess[functionName](), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('logs a message about being gracefully terminated', () => assert.include(processInfo.stdout, 'exiting')); - it('gets terminated', () => assert.isTrue(processInfo.terminated)); - if (process.platform !== 'win32') { // Windows does not have signals - it('does not get terminated directly by the signal', () => assert.isNull(processInfo.signal)); - } - it('returns zero status code', () => assert.equal(processInfo.exitStatus, 0)); - it('does not emit an error', () => assert.isUndefined(processInfo.error)); - }); - - describe('process without support for graceful termination', () => { - let processInfo; - - before(done => runChildProcess('test/fixtures/scripts/endless-ignore-term.coffee', childProcess => childProcess.terminate(), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('logs a message about ignoring the graceful termination attempt', () => assert.include(processInfo.stdout, 'ignoring')); - it('does not get terminated', () => assert.isFalse(processInfo.terminated)); - it('has undefined status code', () => assert.isUndefined(processInfo.exitStatus)); - it('emits an error', () => assert.instanceOf(processInfo.error, Error)); - it('the error has a message about unsuccessful termination', () => assert.equal( - processInfo.error.message, - `Unable to gracefully terminate process ${processInfo.childProcess.pid}` - )); - }); - })); - - describe('when gracefully terminated by childProcess.terminate({\'force\': true})', () => { + ['signalTerm', 'terminate'].forEach((functionName) => + describe(`when gracefully terminated by childProcess.${functionName}()`, () => { + describe('process with support for graceful termination', () => { + let processInfo; + + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => childProcess[functionName](), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('logs a message about being gracefully terminated', () => + assert.include(processInfo.stdout, 'exiting')); + it('gets terminated', () => assert.isTrue(processInfo.terminated)); + if (process.platform !== 'win32') { + // Windows does not have signals + it('does not get terminated directly by the signal', () => + assert.isNull(processInfo.signal)); + } + it('returns zero status code', () => + assert.equal(processInfo.exitStatus, 0)); + it('does not emit an error', () => + assert.isUndefined(processInfo.error)); + }); + + describe('process without support for graceful termination', () => { + let processInfo; + + before((done) => + runChildProcess( + 'test/fixtures/scripts/endless-ignore-term.coffee', + (childProcess) => childProcess.terminate(), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('logs a message about ignoring the graceful termination attempt', () => + assert.include(processInfo.stdout, 'ignoring')); + it('does not get terminated', () => + assert.isFalse(processInfo.terminated)); + it('has undefined status code', () => + assert.isUndefined(processInfo.exitStatus)); + it('emits an error', () => assert.instanceOf(processInfo.error, Error)); + it('the error has a message about unsuccessful termination', () => + assert.equal( + processInfo.error.message, + `Unable to gracefully terminate process ${processInfo.childProcess.pid}` + )); + }); + }) + ); + + describe("when gracefully terminated by childProcess.terminate({'force': true})", () => { describe('process with support for graceful termination', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', childProcess => childProcess.terminate({ force: true }), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => childProcess.terminate({ force: true }), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); - it('logs a message about being gracefully terminated', () => assert.include(processInfo.stdout, 'exiting')); + it('logs a message about being gracefully terminated', () => + assert.include(processInfo.stdout, 'exiting')); it('gets terminated', () => assert.isTrue(processInfo.terminated)); - if (process.platform !== 'win32') { // Windows does not have signals - it('does not get terminated directly by the signal', () => assert.isNull(processInfo.signal)); + if (process.platform !== 'win32') { + // Windows does not have signals + it('does not get terminated directly by the signal', () => + assert.isNull(processInfo.signal)); } - it('returns zero status code', () => assert.equal(processInfo.exitStatus, 0)); + it('returns zero status code', () => + assert.equal(processInfo.exitStatus, 0)); it('does not emit an error', () => assert.isUndefined(processInfo.error)); }); describe('process without support for graceful termination', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/endless-ignore-term.coffee', childProcess => childProcess.terminate({ force: true }), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/endless-ignore-term.coffee', + (childProcess) => childProcess.terminate({ force: true }), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); - it('logs a message about ignoring the graceful termination attempt', () => assert.include(processInfo.stdout, 'ignoring')); + it('logs a message about ignoring the graceful termination attempt', () => + assert.include(processInfo.stdout, 'ignoring')); it('gets terminated', () => assert.isTrue(processInfo.terminated)); if (process.platform === 'win32') { // Windows does not have signals and when a process gets // forcefully terminated, it has a non-zero status code. - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); } else { it('gets killed', () => assert.equal(processInfo.signal, 'SIGKILL')); - it('returns no status code', () => assert.isNull(processInfo.exitStatus)); + it('returns no status code', () => + assert.isNull(processInfo.exitStatus)); } it('does not emit an error', () => assert.isUndefined(processInfo.error)); }); @@ -188,177 +245,265 @@ describe('Babysitting Child Processes', () => { describe('normally with zero status code', () => { let processInfo; - before(done => + before((done) => // eslint-disable-next-line - runChildProcess('test/fixtures/scripts/exit-0.coffee', childProcess => true, + runChildProcess( + 'test/fixtures/scripts/exit-0.coffee', + () => true, (err, info) => { processInfo = info; done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('returns zero status code', () => assert.equal(processInfo.exitStatus, 0)); - it('does not emit the \'crash\' event', () => assert.isFalse(processInfo.onCrash.called)); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is not flagged as intentionally terminated', () => assert.isFalse(processInfo.childProcess.terminatedIntentionally)); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('returns zero status code', () => + assert.equal(processInfo.exitStatus, 0)); + it("does not emit the 'crash' event", () => + assert.isFalse(processInfo.onCrash.called)); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is not flagged as intentionally terminated', () => + assert.isFalse(processInfo.childProcess.terminatedIntentionally)); }); describe('normally with non-zero status code', () => { let processInfo; - before(done => + before((done) => // eslint-disable-next-line - runChildProcess('test/fixtures/scripts/exit-3.coffee', childProcess => true, + runChildProcess( + 'test/fixtures/scripts/exit-3.coffee', + () => true, (err, info) => { processInfo = info; done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); - it('does emit the \'crash\' event', () => assert.isTrue(processInfo.onCrash.called)); - it('the \'crash\' event is provided with non-zero status code', () => assert.isAbove(processInfo.onCrash.getCall(0).args[0], 0)); - it('the \'crash\' event is not provided with killed flag', () => assert.isFalse(processInfo.onCrash.getCall(0).args[1])); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is not flagged as intentionally terminated', () => assert.isFalse(processInfo.childProcess.terminatedIntentionally)); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); + it("does emit the 'crash' event", () => + assert.isTrue(processInfo.onCrash.called)); + it("the 'crash' event is provided with non-zero status code", () => + assert.isAbove(processInfo.onCrash.getCall(0).args[0], 0)); + it("the 'crash' event is not provided with killed flag", () => + assert.isFalse(processInfo.onCrash.getCall(0).args[1])); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is not flagged as intentionally terminated', () => + assert.isFalse(processInfo.childProcess.terminatedIntentionally)); }); describe('intentionally gracefully with zero status code', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', childProcess => childProcess.signalTerm(), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('returns zero status code', () => assert.equal(processInfo.exitStatus, 0)); - it('does not emit the \'crash\' event', () => assert.isFalse(processInfo.onCrash.called)); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is flagged as intentionally terminated', () => assert.isTrue(processInfo.childProcess.terminatedIntentionally)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => childProcess.signalTerm(), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('returns zero status code', () => + assert.equal(processInfo.exitStatus, 0)); + it("does not emit the 'crash' event", () => + assert.isFalse(processInfo.onCrash.called)); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is flagged as intentionally terminated', () => + assert.isTrue(processInfo.childProcess.terminatedIntentionally)); }); describe('intentionally gracefully with non-zero status code', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout-exit-3.coffee', childProcess => childProcess.signalTerm(), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); - it('does not emit the \'crash\' event', () => assert.isFalse(processInfo.onCrash.called)); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is flagged as intentionally terminated', () => assert.isTrue(processInfo.childProcess.terminatedIntentionally)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout-exit-3.coffee', + (childProcess) => childProcess.signalTerm(), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); + it("does not emit the 'crash' event", () => + assert.isFalse(processInfo.onCrash.called)); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is flagged as intentionally terminated', () => + assert.isTrue(processInfo.childProcess.terminatedIntentionally)); }); describe('intentionally forcefully', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', childProcess => childProcess.signalKill(), - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => childProcess.signalKill(), + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); if (process.platform === 'win32') { - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); } else { it('gets killed', () => assert.equal(processInfo.signal, 'SIGKILL')); - it('returns no status code', () => assert.isNull(processInfo.exitStatus)); + it('returns no status code', () => + assert.isNull(processInfo.exitStatus)); } - it('does not emit the \'crash\' event', () => assert.isFalse(processInfo.onCrash.called)); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is flagged as intentionally killed', () => assert.isTrue(processInfo.childProcess.killedIntentionally)); - it('is not flagged as intentionally terminated', () => assert.isFalse(processInfo.childProcess.terminatedIntentionally)); + it("does not emit the 'crash' event", () => + assert.isFalse(processInfo.onCrash.called)); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is flagged as intentionally killed', () => + assert.isTrue(processInfo.childProcess.killedIntentionally)); + it('is not flagged as intentionally terminated', () => + assert.isFalse(processInfo.childProcess.terminatedIntentionally)); }); describe('gracefully with zero status code', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', (childProcess) => { - // Simulate that the process was terminated externally - const emit = sinon.stub(childProcess, 'emit'); - signalTerm(childProcess, () => {}); - emit.restore(); - }, - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('returns zero status code', () => assert.equal(processInfo.exitStatus, 0)); - it('does not emit the \'crash\' event', () => assert.isFalse(processInfo.onCrash.called)); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is not flagged as intentionally terminated', () => assert.isFalse(processInfo.childProcess.terminatedIntentionally)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => { + // Simulate that the process was terminated externally + const emit = sinon.stub(childProcess, 'emit'); + signalTerm(childProcess, () => {}); + emit.restore(); + }, + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('returns zero status code', () => + assert.equal(processInfo.exitStatus, 0)); + it("does not emit the 'crash' event", () => + assert.isFalse(processInfo.onCrash.called)); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is not flagged as intentionally terminated', () => + assert.isFalse(processInfo.childProcess.terminatedIntentionally)); }); describe('gracefully with non-zero status code', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout-exit-3.coffee', (childProcess) => { - // Simulate that the process was terminated externally - const emit = sinon.stub(childProcess, 'emit'); - signalTerm(childProcess, () => {}); - emit.restore(); - }, - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); - - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); - it('does emit the \'crash\' event', () => assert.isTrue(processInfo.onCrash.called)); - it('the \'crash\' event is provided with non-zero status code', () => assert.isAbove(processInfo.onCrash.getCall(0).args[0], 0)); - it('the \'crash\' event is not provided with killed flag', () => assert.isFalse(processInfo.onCrash.getCall(0).args[1])); - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is not flagged as intentionally terminated', () => assert.isFalse(processInfo.childProcess.terminatedIntentionally)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout-exit-3.coffee', + (childProcess) => { + // Simulate that the process was terminated externally + const emit = sinon.stub(childProcess, 'emit'); + signalTerm(childProcess, () => {}); + emit.restore(); + }, + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); + + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); + it("does emit the 'crash' event", () => + assert.isTrue(processInfo.onCrash.called)); + it("the 'crash' event is provided with non-zero status code", () => + assert.isAbove(processInfo.onCrash.getCall(0).args[0], 0)); + it("the 'crash' event is not provided with killed flag", () => + assert.isFalse(processInfo.onCrash.getCall(0).args[1])); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is not flagged as intentionally terminated', () => + assert.isFalse(processInfo.childProcess.terminatedIntentionally)); }); describe('forcefully', () => { let processInfo; - before(done => runChildProcess('test/fixtures/scripts/stdout.coffee', (childProcess) => { - // Simulate that the process was killed externally - const emit = sinon.stub(childProcess, 'emit'); - signalKill(childProcess, () => {}); - emit.restore(); - }, - (err, info) => { - processInfo = info; - done(err); - })); - after(done => helpers.kill(processInfo.childProcess.pid, done)); + before((done) => + runChildProcess( + 'test/fixtures/scripts/stdout.coffee', + (childProcess) => { + // Simulate that the process was killed externally + const emit = sinon.stub(childProcess, 'emit'); + signalKill(childProcess, () => {}); + emit.restore(); + }, + (err, info) => { + processInfo = info; + done(err); + } + ) + ); + after((done) => helpers.kill(processInfo.childProcess.pid, done)); if (process.platform === 'win32') { - it('returns non-zero status code', () => assert.isAbove(processInfo.exitStatus, 0)); + it('returns non-zero status code', () => + assert.isAbove(processInfo.exitStatus, 0)); } else { it('gets killed', () => assert.equal(processInfo.signal, 'SIGKILL')); - it('returns no status code', () => assert.isNull(processInfo.exitStatus)); + it('returns no status code', () => + assert.isNull(processInfo.exitStatus)); } - it('does emit the \'crash\' event', () => assert.isTrue(processInfo.onCrash.called)); + it("does emit the 'crash' event", () => + assert.isTrue(processInfo.onCrash.called)); if (process.platform === 'win32') { - it('the \'crash\' event is provided with non-zero status code', () => assert.isAbove(processInfo.onCrash.getCall(0).args[0], 0)); - it('the \'crash\' event is not provided with killed flag (cannot be detected on Windows)', () => assert.isFalse(processInfo.onCrash.getCall(0).args[1])); + it("the 'crash' event is provided with non-zero status code", () => + assert.isAbove(processInfo.onCrash.getCall(0).args[0], 0)); + it("the 'crash' event is not provided with killed flag (cannot be detected on Windows)", () => + assert.isFalse(processInfo.onCrash.getCall(0).args[1])); } else { - it('the \'crash\' event is provided with no status code', () => assert.isNull(processInfo.onCrash.getCall(0).args[0])); - it('the \'crash\' event is provided with killed flag', () => assert.isTrue(processInfo.onCrash.getCall(0).args[1])); + it("the 'crash' event is provided with no status code", () => + assert.isNull(processInfo.onCrash.getCall(0).args[0])); + it("the 'crash' event is provided with killed flag", () => + assert.isTrue(processInfo.onCrash.getCall(0).args[1])); } - it('is flagged as terminated', () => assert.isTrue(processInfo.childProcess.terminated)); - it('is not flagged as intentionally killed', () => assert.isFalse(processInfo.childProcess.killedIntentionally)); - it('is not flagged as intentionally terminated', () => assert.isFalse(processInfo.childProcess.terminatedIntentionally)); + it('is flagged as terminated', () => + assert.isTrue(processInfo.childProcess.terminated)); + it('is not flagged as intentionally killed', () => + assert.isFalse(processInfo.childProcess.killedIntentionally)); + it('is not flagged as intentionally terminated', () => + assert.isFalse(processInfo.childProcess.terminatedIntentionally)); }); }); }); diff --git a/test/integration/cli/api-blueprint-cli-test.js b/test/integration/cli/api-blueprint-cli-test.js index 43d592199..4967dfb96 100644 --- a/test/integration/cli/api-blueprint-cli-test.js +++ b/test/integration/cli/api-blueprint-cli-test.js @@ -1,64 +1,81 @@ -const { assert } = require('chai'); +import { assert } from 'chai' -const { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } = require('../helpers'); +import { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } from '../helpers' describe('CLI - API Blueprint Document', () => { describe('when loaded from file', () => { describe('when successfully loaded', () => { - let runtimeInfo; - const args = ['./test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`]; + let runtimeInfo + const args = [ + './test/fixtures/single-get.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should request /machines', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('should request /machines', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when API Blueprint is loaded with errors', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/error-blueprint.apib', - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); + const app = createServer() runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - it('should print error message to stderr', () => assert.include(runtimeInfo.dredd.stderr, 'API description processing error')); - }); + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + it('should print error message to stderr', () => + assert.include( + runtimeInfo.dredd.stderr, + 'API description processing error' + )) + }) describe('when API Blueprint is loaded with warnings', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/warning-blueprint.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--no-color', - ]; + '--no-color' + ] before((done) => { - const app = createServer(); + const app = createServer() runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - it('should print warning to stdout', () => assert.include(runtimeInfo.dredd.stdout, 'API description URI template expansion warning')); - }); - }); -}); + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + it('should print warning to stdout', () => + assert.include( + runtimeInfo.dredd.stdout, + 'API description URI template expansion warning' + )) + }) + }) +}) diff --git a/test/integration/cli/api-description-cli-test.js b/test/integration/cli/api-description-cli-test.js index 1edccc9fa..0f8e4d7ba 100644 --- a/test/integration/cli/api-description-cli-test.js +++ b/test/integration/cli/api-description-cli-test.js @@ -1,270 +1,358 @@ -const fs = require('fs'); -const os = require('os'); -const { assert } = require('chai'); +import fs from 'fs' +import os from 'os' +import { assert } from 'chai' -const { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } = require('../helpers'); +import { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } from '../helpers' -const NON_EXISTENT_PORT = DEFAULT_SERVER_PORT + 1; +const NON_EXISTENT_PORT = DEFAULT_SERVER_PORT + 1 describe('CLI - API Description Document', () => { describe('when loaded from file', () => { describe('when loaded by glob pattern', () => { - let runtimeInfo; - const args = ['./test/fixtures/single-g*t.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`]; + let runtimeInfo + const args = [ + './test/fixtures/single-g*t.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should request /machines', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('should request /machines', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when file not found', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/__non-existent__.apib', - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); + const app = createServer() runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - it('should print error message to stderr', () => assert.include(runtimeInfo.dredd.stderr.toLowerCase(), 'could not find')); - }); - - describe('when given path exists, but can\'t be read', () => { - let runtimeInfo; - const args = [ - os.homedir(), - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + runtimeInfo = info + done(err) + }) + }) + + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + it('should print error message to stderr', () => + assert.include( + runtimeInfo.dredd.stderr.toLowerCase(), + 'could not find' + )) + }) + + describe("when given path exists, but can't be read", () => { + let runtimeInfo + const args = [os.homedir(), `http://127.0.0.1:${DEFAULT_SERVER_PORT}`] before((done) => { - const app = createServer(); + const app = createServer() runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - it('should print error message to stderr', () => assert.include(runtimeInfo.dredd.stderr, 'Unable to load API description document')); - }); - }); - + runtimeInfo = info + done(err) + }) + }) + + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + it('should print error message to stderr', () => + assert.include( + runtimeInfo.dredd.stderr, + 'Unable to load API description document' + )) + }) + }) describe('when loaded from URL', () => { describe('when successfully loaded from URL', () => { - let runtimeInfo; + let runtimeInfo const args = [ `http://127.0.0.1:${DEFAULT_SERVER_PORT}/single-get.apib`, - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); + const app = createServer() app.get('/single-get.apib', (req, res) => { - res.type('text/vnd.apiblueprint'); - fs.createReadStream('./test/fixtures/single-get.apib').pipe(res); - }); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + res.type('text/vnd.apiblueprint') + fs.createReadStream('./test/fixtures/single-get.apib').pipe(res) + }) + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should download API Description Document from server', () => assert.equal(runtimeInfo.server.requestCounts['/single-get.apib'], 1)); - it('should request /machines', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1, '/single-get.apib': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should download API Description Document from server', () => + assert.equal(runtimeInfo.server.requestCounts['/single-get.apib'], 1)) + it('should request /machines', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1, + '/single-get.apib': 1 + })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when URL points to non-existent server', () => { - let runtimeInfo; + let runtimeInfo const args = [ `http://127.0.0.1:${NON_EXISTENT_PORT}/single-get.apib`, - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); + const app = createServer() runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should not request server', () => assert.isFalse(runtimeInfo.server.requested)); - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); + runtimeInfo = info + done(err) + }) + }) + + it('should not request server', () => + assert.isFalse(runtimeInfo.server.requested)) + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) it('should print error message to stderr', () => { - assert.include(runtimeInfo.dredd.stderr, 'Unable to load API description document from'); - assert.include(runtimeInfo.dredd.stderr, `http://127.0.0.1:${NON_EXISTENT_PORT}/single-get.apib`); - }); - }); + assert.include( + runtimeInfo.dredd.stderr, + 'Unable to load API description document from' + ) + assert.include( + runtimeInfo.dredd.stderr, + `http://127.0.0.1:${NON_EXISTENT_PORT}/single-get.apib` + ) + }) + }) describe('when URL points to non-existent resource', () => { - let runtimeInfo; + let runtimeInfo const args = [ `http://127.0.0.1:${DEFAULT_SERVER_PORT}/__non-existent__.apib`, - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); - app.get('/__non-existent__.apib', (req, res) => res.sendStatus(404)); + const app = createServer() + app.get('/__non-existent__.apib', (req, res) => res.sendStatus(404)) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should request server', () => assert.isTrue(runtimeInfo.server.requested)); - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); + runtimeInfo = info + done(err) + }) + }) + + it('should request server', () => + assert.isTrue(runtimeInfo.server.requested)) + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) it('should print error message to stderr', () => { - assert.include(runtimeInfo.dredd.stderr, 'Unable to load API description document from'); - assert.include(runtimeInfo.dredd.stderr, "Dredd got HTTP 404 response with 'text/plain; charset=utf-8' body"); - assert.include(runtimeInfo.dredd.stderr, `http://127.0.0.1:${DEFAULT_SERVER_PORT}/__non-existent__.apib`); - }); - }); - }); - + assert.include( + runtimeInfo.dredd.stderr, + 'Unable to load API description document from' + ) + assert.include( + runtimeInfo.dredd.stderr, + "Dredd got HTTP 404 response with 'text/plain; charset=utf-8' body" + ) + assert.include( + runtimeInfo.dredd.stderr, + `http://127.0.0.1:${DEFAULT_SERVER_PORT}/__non-existent__.apib` + ) + }) + }) + }) describe('when loaded by -p/--path', () => { describe('when loaded from file', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--path=./test/fixtures/single-get-uri-template.apib', - ]; + '--path=./test/fixtures/single-get-uri-template.apib' + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); - app.get('/machines/willy', (req, res) => res.json({ type: 'bulldozer', name: 'willy' })); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) + app.get('/machines/willy', (req, res) => + res.json({ type: 'bulldozer', name: 'willy' }) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should request /machines, /machines/willy', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1, '/machines/willy': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should request /machines, /machines/willy', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1, + '/machines/willy': 1 + })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when loaded from URL', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/single-get-uri-template.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - `--path=http://127.0.0.1:${DEFAULT_SERVER_PORT}/single-get.yaml`, - ]; + `--path=http://127.0.0.1:${DEFAULT_SERVER_PORT}/single-get.yaml` + ] before((done) => { - const app = createServer(); + const app = createServer() app.get('/single-get.yaml', (req, res) => { - res.type('application/yaml'); - fs.createReadStream('./test/fixtures/single-get.yaml').pipe(res); - }); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); - app.get('/machines/willy', (req, res) => res.json({ type: 'bulldozer', name: 'willy' })); + res.type('application/yaml') + fs.createReadStream('./test/fixtures/single-get.yaml').pipe(res) + }) + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) + app.get('/machines/willy', (req, res) => + res.json({ type: 'bulldozer', name: 'willy' }) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should download API Description Document from server', () => assert.equal(runtimeInfo.server.requestCounts['/single-get.yaml'], 1)); - it('should request /machines, /machines/willy', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1, '/machines/willy': 1, '/single-get.yaml': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should download API Description Document from server', () => + assert.equal(runtimeInfo.server.requestCounts['/single-get.yaml'], 1)) + it('should request /machines, /machines/willy', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1, + '/machines/willy': 1, + '/single-get.yaml': 1 + })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when used multiple times', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--path=./test/fixtures/single-get-uri-template.apib', - '--path=./test/fixtures/single-get-path.apib', - ]; + '--path=./test/fixtures/single-get-path.apib' + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); - app.get('/machines/willy', (req, res) => res.json({ type: 'bulldozer', name: 'willy' })); - app.get('/machines/caterpillar', (req, res) => res.json({ type: 'bulldozer', name: 'caterpillar' })); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) + app.get('/machines/willy', (req, res) => + res.json({ type: 'bulldozer', name: 'willy' }) + ) + app.get('/machines/caterpillar', (req, res) => + res.json({ type: 'bulldozer', name: 'caterpillar' }) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should request /machines, /machines/willy, /machines/caterpillar', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1, '/machines/willy': 1, '/machines/caterpillar': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should request /machines, /machines/willy, /machines/caterpillar', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1, + '/machines/willy': 1, + '/machines/caterpillar': 1 + })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when loaded by glob pattern', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--path=./test/fixtures/single-get-uri-temp*.apib', - ]; + '--path=./test/fixtures/single-get-uri-temp*.apib' + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); - app.get('/machines/willy', (req, res) => res.json({ type: 'bulldozer', name: 'willy' })); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) + app.get('/machines/willy', (req, res) => + res.json({ type: 'bulldozer', name: 'willy' }) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should request /machines, /machines/willy', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1, '/machines/willy': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should request /machines, /machines/willy', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1, + '/machines/willy': 1 + })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when additional file not found', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--path=./test/fixtures/__non-existent__.apib', - ]; + '--path=./test/fixtures/__non-existent__.apib' + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should not request server', () => assert.isFalse(runtimeInfo.server.requested)); - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - it('should print error message to stderr', () => assert.include(runtimeInfo.dredd.stderr.toLowerCase(), 'could not find')); - }); - }); -}); + runtimeInfo = info + done(err) + }) + }) + + it('should not request server', () => + assert.isFalse(runtimeInfo.server.requested)) + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + it('should print error message to stderr', () => + assert.include( + runtimeInfo.dredd.stderr.toLowerCase(), + 'could not find' + )) + }) + }) +}) diff --git a/test/integration/cli/configuration-cli-test.js b/test/integration/cli/configuration-cli-test.js index e54d0db1b..ec32eeaf8 100644 --- a/test/integration/cli/configuration-cli-test.js +++ b/test/integration/cli/configuration-cli-test.js @@ -1,12 +1,13 @@ -const express = require('express'); -const fs = require('fs'); -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); -const { assert } = require('chai'); +import express from 'express'; +import fs from 'fs'; +import { noCallThru } from 'proxyquire'; +import sinon from 'sinon'; +import { assert } from 'chai'; -const loggerStub = require('../../../lib/logger'); -const configUtils = require('../../../lib/configUtils'); +import loggerStub from '../../../lib/logger'; +import * as configUtils from '../../../lib/configUtils'; +const proxyquire = noCallThru(); const PORT = 9876; let exitStatus; @@ -14,37 +15,37 @@ let exitStatus; let stderr = ''; const addHooksStub = proxyquire('../../../lib/addHooks', { - './logger': loggerStub, -}); + './logger': loggerStub +}).default; const transactionRunner = proxyquire('../../../lib/TransactionRunner', { './addHooks': addHooksStub, - './logger': loggerStub, -}); + './logger': loggerStub +}).default; const dreddStub = proxyquire('../../../lib/Dredd', { './TransactionRunner': transactionRunner, - './logger': loggerStub, -}); + './logger': loggerStub +}).default; const CLIStub = proxyquire('../../../lib/CLI', { './Dredd': dreddStub, './configUtils': configUtils, console: loggerStub, - fs, -}); + fs +}).default; function execCommand(custom = {}, cb) { stderr = ''; exitStatus = null; let finished = false; - const cli = new CLIStub({ custom }, ((exitStatusCode) => { + const cli = new CLIStub({ custom }, (exitStatusCode) => { if (!finished) { finished = true; - exitStatus = (exitStatusCode != null ? exitStatusCode : 0); + exitStatus = exitStatusCode != null ? exitStatusCode : 0; cb(); } - })); + }); cli.run(); } @@ -52,9 +53,9 @@ function execCommand(custom = {}, cb) { describe('CLI class Integration', () => { before(() => { ['warn', 'error', 'debug'].forEach((method) => { - sinon - .stub(loggerStub, method) - .callsFake((chunk) => { stderr += `\n${method}: ${chunk}`; }); + sinon.stub(loggerStub, method).callsFake((chunk) => { + stderr += `\n${method}: ${chunk}`; + }); }); }); @@ -75,7 +76,9 @@ describe('CLI class Integration', () => { before((done) => { fsExistsSync = sinon.stub(fs, 'existsSync').callsFake(() => true); - configUtilsLoad = sinon.stub(configUtils, 'load').callsFake(() => options); + configUtilsLoad = sinon + .stub(configUtils, 'load') + .callsFake(() => options); execCommand(cmd, done); }); after(() => { @@ -83,9 +86,12 @@ describe('CLI class Integration', () => { configUtilsLoad.restore(); }); - it('should call fs.existsSync with given path', () => assert.isTrue(fsExistsSync.calledWith(configPath))); - it('should call configUtils.load with given path', () => assert.isTrue(configUtilsLoad.calledWith(configPath))); - it('should print message about using given configuration file', () => assert.include(stderr, `debug: Configuration '${configPath}' found`)); + it('should call fs.existsSync with given path', () => + assert.isTrue(fsExistsSync.calledWith(configPath))); + it('should call configUtils.load with given path', () => + assert.isTrue(configUtilsLoad.calledWith(configPath))); + it('should print message about using given configuration file', () => + assert.include(stderr, `debug: Configuration '${configPath}' found`)); }); describe('When dredd.yml exists', () => { @@ -98,7 +104,9 @@ describe('CLI class Integration', () => { before((done) => { fsExistsSync = sinon.stub(fs, 'existsSync').callsFake(() => true); - configUtilsLoad = sinon.stub(configUtils, 'load').callsFake(() => options); + configUtilsLoad = sinon + .stub(configUtils, 'load') + .callsFake(() => options); execCommand(cmd, done); }); after(() => { @@ -106,9 +114,12 @@ describe('CLI class Integration', () => { configUtilsLoad.restore(); }); - it('should call fs.existsSync with dredd.yml', () => assert.isTrue(fsExistsSync.calledWith(configPath))); - it('should call configUtils.load with dredd.yml', () => assert.isTrue(configUtilsLoad.calledWith(configPath))); - it('should print message about using dredd.yml', () => assert.include(stderr, `debug: Configuration '${configPath}' found`)); + it('should call fs.existsSync with dredd.yml', () => + assert.isTrue(fsExistsSync.calledWith(configPath))); + it('should call configUtils.load with dredd.yml', () => + assert.isTrue(configUtilsLoad.calledWith(configPath))); + it('should print message about using dredd.yml', () => + assert.include(stderr, `debug: Configuration '${configPath}' found`)); }); describe('When dredd.yml does not exist', () => { @@ -128,9 +139,12 @@ describe('CLI class Integration', () => { configUtilsLoad.restore(); }); - it('should call fs.existsSync with dredd.yml', () => assert.isTrue(fsExistsSync.calledWith(configPath))); - it('should never call configUtils.load', () => assert.isFalse(configUtilsLoad.called)); - it('should not print message about using configuration file', () => assert.notInclude(stderr, 'debug: Configuration')); + it('should call fs.existsSync with dredd.yml', () => + assert.isTrue(fsExistsSync.calledWith(configPath))); + it('should never call configUtils.load', () => + assert.isFalse(configUtilsLoad.called)); + it('should not print message about using configuration file', () => + assert.notInclude(stderr, 'debug: Configuration')); }); }); @@ -141,20 +155,17 @@ describe('CLI class Integration', () => { const errorCmd = { argv: [ `http://127.0.0.1:${PORT + 1}/connection-error.apib`, - `http://127.0.0.1:${PORT + 1}`, - ], + `http://127.0.0.1:${PORT + 1}` + ] }; const wrongCmd = { argv: [ `http://127.0.0.1:${PORT}/not-found.apib`, - `http://127.0.0.1:${PORT}`, - ], + `http://127.0.0.1:${PORT}` + ] }; const goodCmd = { - argv: [ - `http://127.0.0.1:${PORT}/file.apib`, - `http://127.0.0.1:${PORT}`, - ], + argv: [`http://127.0.0.1:${PORT}/file.apib`, `http://127.0.0.1:${PORT}`] }; before((done) => { @@ -163,24 +174,30 @@ describe('CLI class Integration', () => { app.get('/', (req, res) => res.sendStatus(404)); app.get('/file.apib', (req, res) => { - fs.createReadStream('./test/fixtures/single-get.apib').pipe(res.type('text')); + fs.createReadStream('./test/fixtures/single-get.apib').pipe( + res.type('text') + ); }); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ); app.get('/not-found.apib', (req, res) => res.status(404).end()); server = app.listen(PORT, () => done()); }); - after(done => server.close(() => { - app = null; - server = null; - done(); - })); + after((done) => + server.close(() => { + app = null; + server = null; + done(); + }) + ); describe('and I try to load a file from bad hostname at all', () => { - before(done => execCommand(errorCmd, done)); + before((done) => execCommand(errorCmd, done)); it('should exit with status 1', () => assert.equal(exitStatus, 1)); @@ -191,7 +208,7 @@ describe('CLI class Integration', () => { }); describe('and I try to load a file that does not exist from an existing server', () => { - before(done => execCommand(wrongCmd, done)); + before((done) => execCommand(wrongCmd, done)); it('should exit with status 1', () => assert.equal(exitStatus, 1)); @@ -203,7 +220,7 @@ describe('CLI class Integration', () => { }); describe('and I try to load a file that actually is there', () => { - before(done => execCommand(goodCmd, done)); + before((done) => execCommand(goodCmd, done)); it('should exit with status 0', () => assert.equal(exitStatus, 0)); }); diff --git a/test/integration/cli/hookfiles-cli-test.js b/test/integration/cli/hookfiles-cli-test.js index 220a0ba2d..8cb454a4c 100644 --- a/test/integration/cli/hookfiles-cli-test.js +++ b/test/integration/cli/hookfiles-cli-test.js @@ -1,149 +1,190 @@ -const net = require('net'); -const path = require('path'); -const { assert } = require('chai'); - -const { - isProcessRunning, killAll, createServer, runCLIWithServer, runCLI, DEFAULT_SERVER_PORT, -} = require('../helpers'); - -const COFFEE_BIN = 'node_modules/.bin/coffee'; -const DEFAULT_HOOK_HANDLER_PORT = 61321; +import net from 'net' +import path from 'path' +import { assert } from 'chai' + +import { + isProcessRunning, + killAll, + createServer, + runCLIWithServer, + runCLI, + DEFAULT_SERVER_PORT +} from '../helpers' + +const COFFEE_BIN = 'node_modules/.bin/coffee' +const DEFAULT_HOOK_HANDLER_PORT = 61321 describe('CLI', () => { describe('Arguments with existing API description document and responding server', () => { describe('when executing the command and the server is responding as specified in the API description', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - const args = ['./test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`]; + const args = [ + './test/fixtures/single-get.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('exit status should be 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('exit status should be 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when executing the command and the server is responding as specified in the API description, endpoint with path', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/v2/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/v2/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - const args = ['./test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}/v2/`]; + const args = [ + './test/fixtures/single-get.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}/v2/` + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('exit status should be 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('exit status should be 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when executing the command and the server is sending different response', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.status(201).json([{ kind: 'bulldozer', imatriculation: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.status(201).json([{ kind: 'bulldozer', imatriculation: 'willy' }]) + ) - const args = ['./test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`]; + const args = [ + './test/fixtures/single-get.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('exit status should be 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - }); - }); + it('exit status should be 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + }) + }) describe('when called with arguments', () => { describe('when using language hooks handler and spawning the server', () => { describe("and handler file doesn't exist", () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--server-wait=0', '--language=foo/bar/hook-handler', - '--hookfiles=./test/fixtures/scripts/emptyfile', - ]; + '--hookfiles=./test/fixtures/scripts/emptyfile' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - after(done => killAll('test/fixtures/scripts/', done)); + after((done) => killAll('test/fixtures/scripts/', done)) - it('should return with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); + it('should return with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) it('should not return message containing exited or killed', () => { - assert.notInclude(runtimeInfo.dredd.stderr, 'exited'); - assert.notInclude(runtimeInfo.dredd.stderr, 'killed'); - }); + assert.notInclude(runtimeInfo.dredd.stderr, 'exited') + assert.notInclude(runtimeInfo.dredd.stderr, 'killed') + }) - it('should not return message announcing the fact', () => assert.include(runtimeInfo.dredd.stderr, 'not found')); + it('should not return message announcing the fact', () => + assert.include(runtimeInfo.dredd.stderr, 'not found')) - it('should term or kill the server', done => isProcessRunning('endless-ignore-term', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); + it('should term or kill the server', (done) => + isProcessRunning('endless-ignore-term', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) - it('should not execute any transaction', () => assert.deepEqual(runtimeInfo.server.requestCounts, {})); - }); + it('should not execute any transaction', () => + assert.deepEqual(runtimeInfo.server.requestCounts, {})) + }) describe('and handler crashes before execution', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--server-wait=0', '--language=node ./test/fixtures/scripts/exit-3.js', - '--hookfiles=./test/fixtures/scripts/emptyfile', - ]; + '--hookfiles=./test/fixtures/scripts/emptyfile' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - after(done => killAll('test/fixtures/scripts/', done)); + after((done) => killAll('test/fixtures/scripts/', done)) - it('should return with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); + it('should return with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) - it('should return message announcing the fact', () => assert.include(runtimeInfo.dredd.stderr, 'exited')); + it('should return message announcing the fact', () => + assert.include(runtimeInfo.dredd.stderr, 'exited')) - it('should term or kill the server', done => isProcessRunning('endless-ignore-term', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); + it('should term or kill the server', (done) => + isProcessRunning('endless-ignore-term', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) - it('should not execute any transaction', () => assert.deepEqual(runtimeInfo.server.requestCounts, {})); - }); + it('should not execute any transaction', () => + assert.deepEqual(runtimeInfo.server.requestCounts, {})) + }) describe('and handler is killed before execution', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', @@ -151,54 +192,63 @@ describe('CLI', () => { `--server=${COFFEE_BIN} ./test/fixtures/scripts/endless-ignore-term.coffee`, '--server-wait=0', '--language=node ./test/fixtures/scripts/kill-self.js', - '--hookfiles=./test/fixtures/scripts/emptyfile', - ]; + '--hookfiles=./test/fixtures/scripts/emptyfile' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - after(done => killAll('test/fixtures/scripts/', done)); + after((done) => killAll('test/fixtures/scripts/', done)) - it('should return with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); + it('should return with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) it('should return message announcing the fact', () => { if (process.platform === 'win32') { // On Windows there's no way to detect a process was killed - return assert.include(runtimeInfo.dredd.stderr, 'exited'); + return assert.include(runtimeInfo.dredd.stderr, 'exited') } - assert.include(runtimeInfo.dredd.stderr, 'killed'); - }); + assert.include(runtimeInfo.dredd.stderr, 'killed') + }) - it('should term or kill the server', done => isProcessRunning('endless-ignore-term', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); + it('should term or kill the server', (done) => + isProcessRunning('endless-ignore-term', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) - it('should not execute any transaction', () => assert.deepEqual(runtimeInfo.server.requestCounts, {})); - }); + it('should not execute any transaction', () => + assert.deepEqual(runtimeInfo.server.requestCounts, {})) + }) describe('and handler is killed during execution', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); + const app = createServer() app.get('/machines', (req, res) => { // path.posix|win32.normalize and path.join do not do the job in this case, // hence this ingenious hack - const normalizedPath = path.normalize('test/fixtures/hooks.js').replace(/\\/g, '\\\\'); + const normalizedPath = path + .normalize('test/fixtures/hooks.js') + .replace(/\\/g, '\\\\') killAll(`endless-ignore-term.+[^=]${normalizedPath}`, (err) => { - if (err) { done(err); } - res.json([{ type: 'bulldozer', name: 'willy' }]); - }); - }); + if (err) { + done(err) + } + res.json([{ type: 'bulldozer', name: 'willy' }]) + }) + }) // TCP server echoing transactions back const hookHandler = net.createServer((socket) => { - socket.on('data', data => socket.write(data)); - socket.on('error', err => console.error(err)); - }); + socket.on('data', (data) => socket.write(data)) + socket.on('error', (err) => console.error(err)) + }) const args = [ './test/fixtures/single-get.apib', @@ -206,48 +256,59 @@ describe('CLI', () => { `--server=${COFFEE_BIN} ./test/fixtures/scripts/endless-ignore-term.coffee`, '--server-wait=0', `--language=${COFFEE_BIN} ./test/fixtures/scripts/endless-ignore-term.coffee`, - '--hookfiles=test/fixtures/hooks.js', - ]; - hookHandler.listen(DEFAULT_HOOK_HANDLER_PORT, () => runCLIWithServer(args, app, (err, info) => { - hookHandler.close(); - runtimeInfo = info; - done(err); - })); - }); - - after(done => killAll('test/fixtures/scripts/', done)); - - it('should return with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); + '--hookfiles=test/fixtures/hooks.js' + ] + hookHandler.listen(DEFAULT_HOOK_HANDLER_PORT, () => + runCLIWithServer(args, app, (err, info) => { + hookHandler.close() + runtimeInfo = info + done(err) + }) + ) + }) + + after((done) => killAll('test/fixtures/scripts/', done)) + + it('should return with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) it('should return message announcing the fact', () => { if (process.platform === 'win32') { // On Windows there's no way to detect a process was killed - return assert.include(runtimeInfo.dredd.stderr, 'exited'); + return assert.include(runtimeInfo.dredd.stderr, 'exited') } - assert.include(runtimeInfo.dredd.stderr, 'killed'); - }); - - it('should term or kill the server', done => isProcessRunning('endless-ignore-term', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); - - it('should execute the transaction', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })); - }); + assert.include(runtimeInfo.dredd.stderr, 'killed') + }) + + it('should term or kill the server', (done) => + isProcessRunning('endless-ignore-term', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) + + it('should execute the transaction', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1 + })) + }) describe("and handler didn't quit but all Dredd tests were OK", () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); + const app = createServer() - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) // TCP server echoing transactions back const hookHandler = net.createServer((socket) => { - socket.on('data', data => socket.write(data)); - socket.on('error', err => console.error(err)); - }); + socket.on('data', (data) => socket.write(data)) + socket.on('error', (err) => console.error(err)) + }) const args = [ './test/fixtures/single-get.apib', @@ -255,544 +316,627 @@ describe('CLI', () => { `--server=${COFFEE_BIN} ./test/fixtures/scripts/endless-ignore-term.coffee`, '--server-wait=0', `--language=${COFFEE_BIN} ./test/fixtures/scripts/endless-ignore-term.coffee`, - '--hookfiles=./test/fixtures/scripts/emptyfile', - ]; - hookHandler.listen(DEFAULT_HOOK_HANDLER_PORT, () => runCLIWithServer(args, app, (err, info) => { - hookHandler.close(); - runtimeInfo = info; - done(err); - })); - }); - - after(done => killAll('test/fixtures/scripts/', done)); - - it('should return with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); + '--hookfiles=./test/fixtures/scripts/emptyfile' + ] + hookHandler.listen(DEFAULT_HOOK_HANDLER_PORT, () => + runCLIWithServer(args, app, (err, info) => { + hookHandler.close() + runtimeInfo = info + done(err) + }) + ) + }) + + after((done) => killAll('test/fixtures/scripts/', done)) + + it('should return with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) it('should not return any killed or exited message', () => { - assert.notInclude(runtimeInfo.dredd.stderr, 'killed'); - assert.notInclude(runtimeInfo.dredd.stderr, 'exited'); - }); - - it('should kill both the handler and the server', done => isProcessRunning('endless-ignore-term', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); - - it('should execute some transaction', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })); - }); - }); + assert.notInclude(runtimeInfo.dredd.stderr, 'killed') + assert.notInclude(runtimeInfo.dredd.stderr, 'exited') + }) + + it('should kill both the handler and the server', (done) => + isProcessRunning('endless-ignore-term', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) + + it('should execute some transaction', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1 + })) + }) + }) describe('when adding additional headers with -h', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '-h', - 'Accept:application/json', - ]; + 'Accept:application/json' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should have an additional header in the request', () => assert.nestedPropertyVal(runtimeInfo.server.requests['/machines'][0], 'headers.accept', 'application/json')); - }); - + runtimeInfo = info + done(err) + }) + }) + + it('should have an additional header in the request', () => + assert.nestedPropertyVal( + runtimeInfo.server.requests['/machines'][0], + 'headers.accept', + 'application/json' + )) + }) describe('when adding basic auth credentials with -u', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '-u', - 'username:password', - ]; + 'username:password' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should have an authorization header in the request', () => assert.isOk(runtimeInfo.server.requests['/machines'][0].headers.authorization)); + runtimeInfo = info + done(err) + }) + }) - it('should contain a base64 encoded string of the username and password', () => assert.isOk(runtimeInfo.server.requests['/machines'][0].headers.authorization === (`Basic ${Buffer.from('username:password').toString('base64')}`))); - }); + it('should have an authorization header in the request', () => + assert.isOk( + runtimeInfo.server.requests['/machines'][0].headers.authorization + )) + it('should contain a base64 encoded string of the username and password', () => + assert.isOk( + runtimeInfo.server.requests['/machines'][0].headers.authorization === + `Basic ${Buffer.from('username:password').toString('base64')}` + )) + }) describe('when sorting requests with -s', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/apiary.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '-s', - ]; + '-s' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should perform the POST, GET, PUT, DELETE in order', () => { assert.isOk( - runtimeInfo.dredd.stdout.indexOf('POST') - < runtimeInfo.dredd.stdout.indexOf('GET') - < runtimeInfo.dredd.stdout.indexOf('PUT') - < runtimeInfo.dredd.stdout.indexOf('DELETE') - ); - }); - }); + runtimeInfo.dredd.stdout.indexOf('POST') < + runtimeInfo.dredd.stdout.indexOf('GET') < + runtimeInfo.dredd.stdout.indexOf('PUT') < + runtimeInfo.dredd.stdout.indexOf('DELETE') + ) + }) + }) describe('when displaying errors inline with -e', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.status(201).json([{ kind: 'bulldozer', imatriculation: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.status(201).json([{ kind: 'bulldozer', imatriculation: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '-e', - ]; + '-e' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should display errors inline', () => { // When displayed inline, a single fail request only creates two "fail:" messages, // as opposed to the usual three - const count = runtimeInfo.dredd.stdout.split('fail').length - 2; // Says fail in the epilogue - assert.equal(count, 2); - }); - }); + const count = runtimeInfo.dredd.stdout.split('fail').length - 2 // Says fail in the epilogue + assert.equal(count, 2) + }) + }) describe('when showing details for all requests with -d', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '-d', - ]; + '-d' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should display details on passing tests', () => { // The request: block is not shown for passing tests normally - assert.isOk(runtimeInfo.dredd.stdout.indexOf('request') > -1); - }); - }); + assert.isOk(runtimeInfo.dredd.stdout.indexOf('request') > -1) + }) + }) describe('when filtering request methods with -m', () => { describe('when blocking a request', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '-m', - 'POST', - ]; + 'POST' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should not send the request request', () => assert.deepEqual(runtimeInfo.server.requestCounts, {})); - }); + it('should not send the request request', () => + assert.deepEqual(runtimeInfo.server.requestCounts, {})) + }) describe('when not blocking a request', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '-m', - 'GET', - ]; + 'GET' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should allow the request to go through', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })); - }); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should allow the request to go through', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { + '/machines': 1 + })) + }) + }) describe('when filtering transaction to particular name with -x or --only', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - app.get('/message', (req, res) => res.type('text/plain').send('Hello World!\n')); + app.get('/message', (req, res) => + res.type('text/plain').send('Hello World!\n') + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--path=./test/fixtures/multifile/*.apib', '--only=Message API > /message > GET', - '--no-color', - ]; + '--no-color' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should notify skipping to the stdout', () => assert.include(runtimeInfo.dredd.stdout, 'skip: GET (200) /machines')); + it('should notify skipping to the stdout', () => + assert.include(runtimeInfo.dredd.stdout, 'skip: GET (200) /machines')) - it('should hit the only transaction', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/message': 1 })); + it('should hit the only transaction', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { '/message': 1 })) - it('exit status should be 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('exit status should be 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when suppressing color with --no-color', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--no-color', - ]; + '--no-color' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should print without colors', () => { // If colors are not on, there is no closing color code between // the "pass" and the ":" - assert.include(runtimeInfo.dredd.stdout, 'pass:'); - }); - }); + assert.include(runtimeInfo.dredd.stdout, 'pass:') + }) + }) describe('when setting the log output level with --loglevel', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--loglevel=error', - '--no-color', - ]; + '--no-color' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should not display any debug logging', () => { - assert.notInclude(runtimeInfo.dredd.output, 'debug:'); - }); - }); + assert.notInclude(runtimeInfo.dredd.output, 'debug:') + }) + }) describe('when showing timestamps with --loglevel=debug', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--loglevel=debug', - ]; + '--loglevel=debug' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should display timestamps', () => { // Look for the prefix for cli output with timestamps - assert.include(runtimeInfo.dredd.stderr, 'Z -'); - }); - }); - }); + assert.include(runtimeInfo.dredd.stderr, 'Z -') + }) + }) + }) describe('when loading hooks with --hookfiles', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--hookfiles=./test/fixtures/*_hooks.*', - ]; + '--hookfiles=./test/fixtures/*_hooks.*' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('should modify the transaction with hooks', () => assert.equal(runtimeInfo.server.requests['/machines'][0].headers.header, '123232323')); - }); + runtimeInfo = info + done(err) + }) + }) + + it('should modify the transaction with hooks', () => + assert.equal( + runtimeInfo.server.requests['/machines'][0].headers.header, + '123232323' + )) + }) describe('when describing events in hookfiles', () => { - let runtimeInfo; + let runtimeInfo function containsLine(str, expected) { - const lines = str.split('\n'); + const lines = str.split('\n') for (const line of lines) { if (line.indexOf(expected) > -1) { - return true; + return true } } - return false; + return false } before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--hookfiles=./test/fixtures/*_events.*', - ]; + '--hookfiles=./test/fixtures/*_events.*' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should execute the before and after events', () => { - assert.isOk(containsLine(runtimeInfo.dredd.stdout, 'hooks.beforeAll'), (runtimeInfo.dredd.stdout)); - assert.isOk(containsLine(runtimeInfo.dredd.stdout, 'hooks.afterAll'), (runtimeInfo.dredd.stdout)); - }); - }); + assert.isOk( + containsLine(runtimeInfo.dredd.stdout, 'hooks.beforeAll'), + runtimeInfo.dredd.stdout + ) + assert.isOk( + containsLine(runtimeInfo.dredd.stdout, 'hooks.afterAll'), + runtimeInfo.dredd.stdout + ) + }) + }) describe('when describing both hooks and events in hookfiles', () => { - let runtimeInfo; + let runtimeInfo function getResults(str) { - const ret = []; - const lines = str.split('\n'); + const ret = [] + const lines = str.split('\n') for (const line of lines) { if (line.indexOf('*** ') > -1) { - ret.push(line.substr(line.indexOf('*** ') + 4)); + ret.push(line.substr(line.indexOf('*** ') + 4)) } } - return ret.join(','); + return ret.join(',') } before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--require=coffeescript/register', - '--hookfiles=./test/fixtures/*_all.*', - ]; + '--hookfiles=./test/fixtures/*_all.*' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should execute hooks and events in order', () => { - const events = getResults(runtimeInfo.dredd.stdout); - assert.isOk(events === 'beforeAll,before,after,afterAll'); - }); - }); + const events = getResults(runtimeInfo.dredd.stdout) + assert.isOk(events === 'beforeAll,before,after,afterAll') + }) + }) describe('tests an API description containing an endpoint with schema', () => { describe('and server is responding in accordance with the schema', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/', (req, res) => res.json({ - data: { - expires: 1234, - token: 'this should pass since it is a string', - }, - })); + const app = createServer() + app.get('/', (req, res) => + res.json({ + data: { + expires: 1234, + token: 'this should pass since it is a string' + } + }) + ) const args = [ './test/fixtures/schema.apib', - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('exit status should be 0 (success)', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('exit status should be 0 (success)', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('and server is NOT responding in accordance with the schema', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/', (req, res) => res.json({ - data: { - expires: 'this should fail since it is a string', - token: 'this should pass since it is a string', - }, - })); + const app = createServer() + app.get('/', (req, res) => + res.json({ + data: { + expires: 'this should fail since it is a string', + token: 'this should pass since it is a string' + } + }) + ) const args = [ './test/fixtures/schema.apib', - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('exit status should be 1 (failure)', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - }); - }); + it('exit status should be 1 (failure)', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + }) + }) describe('when API description document path is a glob', () => { describe('and called with --names options', () => { - let cliInfo; + let cliInfo before((done) => { const args = [ './test/fixtures/multifile/*.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--names', - '--loglevel=debug', - ]; + '--loglevel=debug' + ] runCLI(args, (err, info) => { - cliInfo = info; - done(err); - }); - }); + cliInfo = info + done(err) + }) + }) it('it should include all paths from all API description documents matching the glob', () => { - assert.include(cliInfo.stdout, '> /greeting > GET'); - assert.include(cliInfo.stdout, '> /message > GET'); - assert.include(cliInfo.stdout, '> /name > GET'); - }); + assert.include(cliInfo.stdout, '> /greeting > GET') + assert.include(cliInfo.stdout, '> /message > GET') + assert.include(cliInfo.stdout, '> /name > GET') + }) - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); - }); + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) + }) describe('and called with hooks', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/name', (req, res) => res.type('text/plain').send('Adam\n')); + const app = createServer() + app.get('/name', (req, res) => res.type('text/plain').send('Adam\n')) - app.get('/greeting', (req, res) => res.type('text/plain').send('Howdy!\n')); + app.get('/greeting', (req, res) => + res.type('text/plain').send('Howdy!\n') + ) - app.get('/message', (req, res) => res.type('text/plain').send('Hello World!\n')); + app.get('/message', (req, res) => + res.type('text/plain').send('Hello World!\n') + ) const args = [ './test/fixtures/multifile/*.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--require=coffeescript/register', - '--hookfiles=./test/fixtures/multifile/multifile_hooks.coffee', - ]; + '--hookfiles=./test/fixtures/multifile/multifile_hooks.coffee' + ] runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('should eval the hook for each transaction', () => { - assert.include(runtimeInfo.dredd.stdout, 'after name'); - assert.include(runtimeInfo.dredd.stdout, 'after greeting'); - assert.include(runtimeInfo.dredd.stdout, 'after message'); - }); + assert.include(runtimeInfo.dredd.stdout, 'after name') + assert.include(runtimeInfo.dredd.stdout, 'after greeting') + assert.include(runtimeInfo.dredd.stdout, 'after message') + }) - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0, (runtimeInfo.dredd.output))); + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0, runtimeInfo.dredd.output)) it('server should receive 3 requests', () => { assert.deepEqual(runtimeInfo.server.requestCounts, { '/name': 1, '/greeting': 1, - '/message': 1, - }); - }); - }); - }); + '/message': 1 + }) + }) + }) + }) + describe('when called with additional --path argument which is a glob', () => + describe('and called with --names options', () => { + let cliInfo - describe('when called with additional --path argument which is a glob', () => describe('and called with --names options', () => { - let cliInfo; - - before((done) => { - const args = [ - './test/fixtures/multiple-examples.apib', - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--path=./test/fixtures/multifile/*.apib', - '--loglevel=debug', - '--names', - ]; - runCLI(args, (err, info) => { - cliInfo = info; - done(err); - }); - }); - - it('it should include all paths from all API description documents matching all paths and globs', () => { - assert.include(cliInfo.stdout, 'Greeting API > /greeting > GET'); - assert.include(cliInfo.stdout, 'Message API > /message > GET'); - assert.include(cliInfo.stdout, 'Name API > /name > GET'); - assert.include(cliInfo.stdout, 'Machines API > Machines > Machines collection > Get Machines > Example 1'); - assert.include(cliInfo.stdout, 'Machines API > Machines > Machines collection > Get Machines > Example 2'); - }); - - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); - })); -}); + before((done) => { + const args = [ + './test/fixtures/multiple-examples.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, + '--path=./test/fixtures/multifile/*.apib', + '--loglevel=debug', + '--names' + ] + runCLI(args, (err, info) => { + cliInfo = info + done(err) + }) + }) + + it('it should include all paths from all API description documents matching all paths and globs', () => { + assert.include(cliInfo.stdout, 'Greeting API > /greeting > GET') + assert.include(cliInfo.stdout, 'Message API > /message > GET') + assert.include(cliInfo.stdout, 'Name API > /name > GET') + assert.include( + cliInfo.stdout, + 'Machines API > Machines > Machines collection > Get Machines > Example 1' + ) + assert.include( + cliInfo.stdout, + 'Machines API > Machines > Machines collection > Get Machines > Example 2' + ) + }) + + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) + })) +}) diff --git a/test/integration/cli/openapi2-cli-test.js b/test/integration/cli/openapi2-cli-test.js index 6cc8347e6..088af98d5 100644 --- a/test/integration/cli/openapi2-cli-test.js +++ b/test/integration/cli/openapi2-cli-test.js @@ -1,66 +1,85 @@ -const { assert } = require('chai'); +import { assert } from 'chai' -const { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } = require('../helpers'); +import { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } from '../helpers' describe('CLI - OpenAPI 2 Document', () => { describe('when loaded from file', () => { describe('when successfully loaded', () => { - let runtimeInfo; - const args = ['./test/fixtures/single-get.yaml', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`]; + let runtimeInfo + const args = [ + './test/fixtures/single-get.yaml', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should request /machines', () => assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })); - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - }); + it('should request /machines', () => + assert.deepEqual(runtimeInfo.server.requestCounts, { '/machines': 1 })) + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + }) describe('when OpenAPI 2 is loaded with errors', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/error-openapi2.yaml', - `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - ]; + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] before((done) => { - const app = createServer(); + const app = createServer() runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should exit with status 1', () => assert.equal(runtimeInfo.dredd.exitStatus, 1)); - it('should print error message to stderr', () => assert.include(runtimeInfo.dredd.stderr, 'API description processing error')); - }); + it('should exit with status 1', () => + assert.equal(runtimeInfo.dredd.exitStatus, 1)) + it('should print error message to stderr', () => + assert.include( + runtimeInfo.dredd.stderr, + 'API description processing error' + )) + }) describe('when OpenAPI 2 is loaded with warnings', () => { - let runtimeInfo; + let runtimeInfo const args = [ './test/fixtures/warning-openapi2.yaml', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--no-color', - ]; + '--no-color' + ] before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) runCLIWithServer(args, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) - it('should exit with status 0', () => assert.equal(runtimeInfo.dredd.exitStatus, 0)); - it('should print warning to stdout', () => assert.include(runtimeInfo.dredd.stdout, 'API description parser warning')); - }); - }); -}); + it('should exit with status 0', () => + assert.equal(runtimeInfo.dredd.exitStatus, 0)) + it('should print warning to stdout', () => + assert.include( + runtimeInfo.dredd.stdout, + 'API description parser warning' + )) + }) + }) +}) diff --git a/test/integration/cli/reporters-cli-test.js b/test/integration/cli/reporters-cli-test.js index 5f6d17bb9..cfa5d0aa3 100644 --- a/test/integration/cli/reporters-cli-test.js +++ b/test/integration/cli/reporters-cli-test.js @@ -1,209 +1,277 @@ -const clone = require('clone'); -const fs = require('fs'); -const { assert } = require('chai'); +import clone from 'clone' +import fs from 'fs' +import { assert } from 'chai' -const { runCLI, createServer, DEFAULT_SERVER_PORT } = require('../helpers'); +import { runCLI, createServer, DEFAULT_SERVER_PORT } from '../helpers' -const APIARY_PORT = DEFAULT_SERVER_PORT + 1; +const APIARY_PORT = DEFAULT_SERVER_PORT + 1 describe('CLI - Reporters', () => { - let server; + let server before((done) => { - const app = createServer(); + const app = createServer() - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) server = app.listen((err) => { - done(err); - }); - }); - - after(done => server.close(done)); + done(err) + }) + }) + after((done) => server.close(done)) describe('when -r/--reporter is provided to use additional reporters', () => { - let cliInfo; + let cliInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--reporter=nyan', - ]; + '--reporter=nyan' + ] before((done) => { runCLI(args, (err, info) => { - cliInfo = info; - done(err); - }); - }); + cliInfo = info + done(err) + }) + }) it('should use given reporter', () => { // Nyan cat ears should exist in stdout - assert.include(cliInfo.stdout, '/\\_/\\'); - }); - }); - + assert.include(cliInfo.stdout, '/\\_/\\') + }) + }) describe('when apiary reporter is used', () => { - let apiary; - let apiaryRuntimeInfo; + let apiary + let apiaryRuntimeInfo - const env = clone(process.env); - env.APIARY_API_URL = `http://127.0.0.1:${APIARY_PORT}`; + const env = clone(process.env) + env.APIARY_API_URL = `http://127.0.0.1:${APIARY_PORT}` before((done) => { - const app = createServer(); + const app = createServer() app.post('/apis/*', (req, res) => { res.json({ _id: '1234_id', testRunId: '6789_testRunId', - reportUrl: 'http://example.com/test/run/1234_id', - }); - }); + reportUrl: 'http://example.com/test/run/1234_id' + }) + }) - app.all('*', (req, res) => res.json({})); + app.all('*', (req, res) => res.json({})) apiary = app.listen(APIARY_PORT, (err, info) => { - apiaryRuntimeInfo = info; - done(err); - }); - }); + apiaryRuntimeInfo = info + done(err) + }) + }) - after(done => apiary.close(done)); + after((done) => apiary.close(done)) describe('when Dredd successfully performs requests to Apiary', () => { - let cliInfo; - let stepRequest; + let cliInfo + let stepRequest const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--reporter=apiary', - ]; + '--reporter=apiary' + ] before((done) => { - apiaryRuntimeInfo.reset(); + apiaryRuntimeInfo.reset() runCLI(args, { env }, (err, info) => { - cliInfo = info; - stepRequest = apiaryRuntimeInfo.requests['/apis/public/tests/steps?testRunId=1234_id'][0]; - done(err); - }); - }); - - it('should print URL of the test report', () => assert.include(cliInfo.stdout, 'http://example.com/test/run/1234_id')); - it('should print warning about missing Apiary API settings', () => assert.include(cliInfo.stdout, 'Apiary API Key or API Project Subdomain were not provided.')); - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); + cliInfo = info + stepRequest = + apiaryRuntimeInfo.requests[ + '/apis/public/tests/steps?testRunId=1234_id' + ][0] + done(err) + }) + }) + + it('should print URL of the test report', () => + assert.include(cliInfo.stdout, 'http://example.com/test/run/1234_id')) + it('should print warning about missing Apiary API settings', () => + assert.include( + cliInfo.stdout, + 'Apiary API Key or API Project Subdomain were not provided.' + )) + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) it('should perform 3 requests to Apiary', () => { assert.deepEqual(apiaryRuntimeInfo.requestCounts, { '/apis/public/tests/runs': 1, '/apis/public/tests/run/1234_id': 1, - '/apis/public/tests/steps?testRunId=1234_id': 1, - }); - }); + '/apis/public/tests/steps?testRunId=1234_id': 1 + }) + }) it('should send results from gavel', () => { - assert.isObject(stepRequest.body); - assert.nestedProperty(stepRequest.body, 'results.request'); - assert.nestedProperty(stepRequest.body, 'results.realResponse'); - assert.nestedProperty(stepRequest.body, 'results.expectedResponse'); - assert.nestedProperty(stepRequest.body, 'results.validationResult.fields.body'); - assert.nestedProperty(stepRequest.body, 'results.validationResult.fields.headers'); - assert.nestedProperty(stepRequest.body, 'results.validationResult.fields.statusCode'); - }); - }); + assert.isObject(stepRequest.body) + assert.nestedProperty(stepRequest.body, 'results.request') + assert.nestedProperty(stepRequest.body, 'results.realResponse') + assert.nestedProperty(stepRequest.body, 'results.expectedResponse') + assert.nestedProperty( + stepRequest.body, + 'results.validationResult.fields.body' + ) + assert.nestedProperty( + stepRequest.body, + 'results.validationResult.fields.headers' + ) + assert.nestedProperty( + stepRequest.body, + 'results.validationResult.fields.statusCode' + ) + }) + }) describe('when hooks file uses hooks.log function for logging', () => { - let cliInfo; - let updateRequest; - let stepRequest; + let cliInfo + let updateRequest + let stepRequest const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--reporter=apiary', '--require=coffeescript/register', - '--hookfiles=./test/fixtures/hooks-log.coffee', - ]; + '--hookfiles=./test/fixtures/hooks-log.coffee' + ] before((done) => { - apiaryRuntimeInfo.reset(); + apiaryRuntimeInfo.reset() runCLI(args, { env }, (err, info) => { - cliInfo = info; - updateRequest = apiaryRuntimeInfo.requests['/apis/public/tests/run/1234_id'][0]; - stepRequest = apiaryRuntimeInfo.requests['/apis/public/tests/steps?testRunId=1234_id'][0]; - return done(err); - }); - }); + cliInfo = info + updateRequest = + apiaryRuntimeInfo.requests['/apis/public/tests/run/1234_id'][0] + stepRequest = + apiaryRuntimeInfo.requests[ + '/apis/public/tests/steps?testRunId=1234_id' + ][0] + return done(err) + }) + }) it('hooks.log should print also to console', () => { - assert.include(cliInfo.output, 'using hooks.log to debug'); - }); - it('hooks.log should use toString on objects', () => assert.include(cliInfo.output, 'Error object!')); - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); + assert.include(cliInfo.output, 'using hooks.log to debug') + }) + it('hooks.log should use toString on objects', () => + assert.include(cliInfo.output, 'Error object!')) + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) it('should request Apiary API to start a test run', () => { - assert.equal(apiaryRuntimeInfo.requestCounts['/apis/public/tests/runs'], 1); - assert.equal(apiaryRuntimeInfo.requests['/apis/public/tests/runs'][0].method, 'POST'); - }); + assert.equal( + apiaryRuntimeInfo.requestCounts['/apis/public/tests/runs'], + 1 + ) + assert.equal( + apiaryRuntimeInfo.requests['/apis/public/tests/runs'][0].method, + 'POST' + ) + }) it('should request Apiary API to create a test step', () => { - assert.equal(apiaryRuntimeInfo.requestCounts['/apis/public/tests/steps?testRunId=1234_id'], 1); - assert.equal(apiaryRuntimeInfo.requests['/apis/public/tests/steps?testRunId=1234_id'][0].method, 'POST'); - }); + assert.equal( + apiaryRuntimeInfo.requestCounts[ + '/apis/public/tests/steps?testRunId=1234_id' + ], + 1 + ) + assert.equal( + apiaryRuntimeInfo.requests[ + '/apis/public/tests/steps?testRunId=1234_id' + ][0].method, + 'POST' + ) + }) it('should request Apiary API to update the test run', () => { - assert.equal(apiaryRuntimeInfo.requestCounts['/apis/public/tests/run/1234_id'], 1); - assert.equal(apiaryRuntimeInfo.requests['/apis/public/tests/run/1234_id'][0].method, 'PATCH'); - }); + assert.equal( + apiaryRuntimeInfo.requestCounts['/apis/public/tests/run/1234_id'], + 1 + ) + assert.equal( + apiaryRuntimeInfo.requests['/apis/public/tests/run/1234_id'][0] + .method, + 'PATCH' + ) + }) context('the update request', () => { it('should have result stats with logs', () => { - assert.isObject(updateRequest.body); - assert.nestedPropertyVal(updateRequest.body, 'status', 'passed'); - assert.nestedProperty(updateRequest.body, 'endedAt'); - assert.nestedProperty(updateRequest.body, 'logs'); - assert.isArray(updateRequest.body.logs); - assert.lengthOf(updateRequest.body.logs, 3); - assert.property(updateRequest.body.logs[0], 'timestamp'); - assert.include(updateRequest.body.logs[0].content, 'Error object!'); - assert.property(updateRequest.body.logs[1], 'timestamp'); - assert.nestedPropertyVal(updateRequest.body.logs[1], 'content', 'true'); - assert.property(updateRequest.body.logs[2], 'timestamp'); - assert.nestedPropertyVal(updateRequest.body.logs[2], 'content', 'using hooks.log to debug'); - assert.nestedProperty(updateRequest.body, 'result.tests'); - assert.nestedProperty(updateRequest.body, 'result.failures'); - assert.nestedProperty(updateRequest.body, 'result.errors'); - assert.nestedProperty(updateRequest.body, 'result.passes'); - assert.nestedProperty(updateRequest.body, 'result.start'); - assert.nestedProperty(updateRequest.body, 'result.end'); - }); - it('should have startedAt larger than \'before\' hook log timestamp', () => { - assert.isObject(stepRequest.body); - assert.isNumber(stepRequest.body.startedAt); - assert.operator(stepRequest.body.startedAt, '>=', updateRequest.body.logs[0].timestamp); - assert.operator(stepRequest.body.startedAt, '>=', updateRequest.body.logs[1].timestamp); - }); - it('should have startedAt smaller than \'after\' hook log timestamp', () => { - assert.isObject(stepRequest.body); - assert.isNumber(stepRequest.body.startedAt); - assert.operator(stepRequest.body.startedAt, '<=', updateRequest.body.logs[2].timestamp); - }); - }); - }); - }); + assert.isObject(updateRequest.body) + assert.nestedPropertyVal(updateRequest.body, 'status', 'passed') + assert.nestedProperty(updateRequest.body, 'endedAt') + assert.nestedProperty(updateRequest.body, 'logs') + assert.isArray(updateRequest.body.logs) + assert.lengthOf(updateRequest.body.logs, 3) + assert.property(updateRequest.body.logs[0], 'timestamp') + assert.include(updateRequest.body.logs[0].content, 'Error object!') + assert.property(updateRequest.body.logs[1], 'timestamp') + assert.nestedPropertyVal( + updateRequest.body.logs[1], + 'content', + 'true' + ) + assert.property(updateRequest.body.logs[2], 'timestamp') + assert.nestedPropertyVal( + updateRequest.body.logs[2], + 'content', + 'using hooks.log to debug' + ) + assert.nestedProperty(updateRequest.body, 'result.tests') + assert.nestedProperty(updateRequest.body, 'result.failures') + assert.nestedProperty(updateRequest.body, 'result.errors') + assert.nestedProperty(updateRequest.body, 'result.passes') + assert.nestedProperty(updateRequest.body, 'result.start') + assert.nestedProperty(updateRequest.body, 'result.end') + }) + it("should have startedAt larger than 'before' hook log timestamp", () => { + assert.isObject(stepRequest.body) + assert.isNumber(stepRequest.body.startedAt) + assert.operator( + stepRequest.body.startedAt, + '>=', + updateRequest.body.logs[0].timestamp + ) + assert.operator( + stepRequest.body.startedAt, + '>=', + updateRequest.body.logs[1].timestamp + ) + }) + it("should have startedAt smaller than 'after' hook log timestamp", () => { + assert.isObject(stepRequest.body) + assert.isNumber(stepRequest.body.startedAt) + assert.operator( + stepRequest.body.startedAt, + '<=', + updateRequest.body.logs[2].timestamp + ) + }) + }) + }) + }) describe('when -o/--output is used to specify output file', () => { const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--reporter=xunit', - '--output=__test_file_output__.xml', - ]; + '--output=__test_file_output__.xml' + ] - before(done => runCLI(args, (err) => { - done(err); - })); + before((done) => + runCLI(args, (err) => { + done(err) + }) + ) - after(() => fs.unlinkSync(`${process.cwd()}/__test_file_output__.xml`)); + after(() => fs.unlinkSync(`${process.cwd()}/__test_file_output__.xml`)) - it('should create given file', () => assert.isOk(fs.existsSync(`${process.cwd()}/__test_file_output__.xml`))); - }); + it('should create given file', () => + assert.isOk(fs.existsSync(`${process.cwd()}/__test_file_output__.xml`))) + }) describe('when -o/--output is used multiple times to specify output files', () => { const args = [ @@ -212,75 +280,92 @@ describe('CLI - Reporters', () => { '--reporter=xunit', '--output=__test_file_output1__.xml', '--reporter=xunit', - '--output=__test_file_output2__.xml', - ]; + '--output=__test_file_output2__.xml' + ] - before(done => runCLI(args, (err) => { - done(err); - })); + before((done) => + runCLI(args, (err) => { + done(err) + }) + ) after(() => { - fs.unlinkSync(`${process.cwd()}/__test_file_output1__.xml`); - fs.unlinkSync(`${process.cwd()}/__test_file_output2__.xml`); - }); + fs.unlinkSync(`${process.cwd()}/__test_file_output1__.xml`) + fs.unlinkSync(`${process.cwd()}/__test_file_output2__.xml`) + }) it('should create given files', () => { - assert.isOk(fs.existsSync(`${process.cwd()}/__test_file_output1__.xml`)); - assert.isOk(fs.existsSync(`${process.cwd()}/__test_file_output2__.xml`)); - }); - }); + assert.isOk(fs.existsSync(`${process.cwd()}/__test_file_output1__.xml`)) + assert.isOk(fs.existsSync(`${process.cwd()}/__test_file_output2__.xml`)) + }) + }) describe('when -o/--output is used to specify output file but directory is not existent', () => { const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--reporter=xunit', - '--output=./__test_directory/__test_file_output__.xml', - ]; + '--output=./__test_directory/__test_file_output__.xml' + ] before((done) => { try { - fs.unlinkSync(`${process.cwd()}/__test_directory/__test_file_output__.xml`); + fs.unlinkSync( + `${process.cwd()}/__test_directory/__test_file_output__.xml` + ) } catch (error) { // Do nothing } runCLI(args, (err) => { - done(err); - }); - }); + done(err) + }) + }) after(() => { - fs.unlinkSync(`${process.cwd()}/__test_directory/__test_file_output__.xml`); - fs.rmdirSync(`${process.cwd()}/__test_directory`); - }); - - it('should create given file', () => assert.isOk(fs.existsSync(`${process.cwd()}/__test_directory/__test_file_output__.xml`))); - }); - - describe('when the \'apiary\' reporter fails', () => { - let apiaryApiUrl; - let cliInfo; + fs.unlinkSync( + `${process.cwd()}/__test_directory/__test_file_output__.xml` + ) + fs.rmdirSync(`${process.cwd()}/__test_directory`) + }) + + it('should create given file', () => + assert.isOk( + fs.existsSync( + `${process.cwd()}/__test_directory/__test_file_output__.xml` + ) + )) + }) + + describe("when the 'apiary' reporter fails", () => { + let apiaryApiUrl + let cliInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, - '--reporter=apiary', - ]; + '--reporter=apiary' + ] before((done) => { - apiaryApiUrl = process.env.APIARY_API_URL; + apiaryApiUrl = process.env.APIARY_API_URL - const nonExistentPort = DEFAULT_SERVER_PORT + 42; - process.env.APIARY_API_URL = `http://127.0.0.1:${nonExistentPort}`; + const nonExistentPort = DEFAULT_SERVER_PORT + 42 + process.env.APIARY_API_URL = `http://127.0.0.1:${nonExistentPort}` runCLI(args, (err, info) => { - cliInfo = info; - done(err); - }); - }); - after(() => { process.env.APIARY_API_URL = apiaryApiUrl; }); - - it('ends successfully', () => assert.equal(cliInfo.exitStatus, 0)); - it('prints error about Apiary API connection issues', () => assert.include(cliInfo.stderr, 'Apiary reporter could not connect to Apiary API')); - }); -}); + cliInfo = info + done(err) + }) + }) + after(() => { + process.env.APIARY_API_URL = apiaryApiUrl + }) + + it('ends successfully', () => assert.equal(cliInfo.exitStatus, 0)) + it('prints error about Apiary API connection issues', () => + assert.include( + cliInfo.stderr, + 'Apiary reporter could not connect to Apiary API' + )) + }) +}) diff --git a/test/integration/cli/server-process-cli-test.js b/test/integration/cli/server-process-cli-test.js index b3286f72b..85568f2df 100644 --- a/test/integration/cli/server-process-cli-test.js +++ b/test/integration/cli/server-process-cli-test.js @@ -1,184 +1,259 @@ -const { assert } = require('chai'); -const { - isProcessRunning, killAll, runCLI, createServer, DEFAULT_SERVER_PORT, -} = require('../helpers'); +import { assert } from 'chai' +import { + isProcessRunning, + killAll, + runCLI, + createServer, + DEFAULT_SERVER_PORT +} from '../helpers' -const NON_EXISTENT_PORT = DEFAULT_SERVER_PORT + 1; +const NON_EXISTENT_PORT = DEFAULT_SERVER_PORT + 1 describe('CLI - Server Process', () => { describe('when specified by URL', () => { - let server; - let serverRuntimeInfo; + let server + let serverRuntimeInfo before((done) => { - const app = createServer(); + const app = createServer() - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - app.get('/machines/willy', (req, res) => res.json({ type: 'bulldozer', name: 'willy' })); + app.get('/machines/willy', (req, res) => + res.json({ type: 'bulldozer', name: 'willy' }) + ) server = app.listen((err, info) => { - serverRuntimeInfo = info; - done(err); - }); - }); - - after(done => server.close(done)); + serverRuntimeInfo = info + done(err) + }) + }) + after((done) => server.close(done)) describe('when is running', () => { - let cliInfo; - const args = ['./test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`]; + let cliInfo + const args = [ + './test/fixtures/single-get.apib', + `http://127.0.0.1:${DEFAULT_SERVER_PORT}` + ] - before(done => runCLI(args, (err, info) => { - cliInfo = info; - done(err); - })); + before((done) => + runCLI(args, (err, info) => { + cliInfo = info + done(err) + }) + ) - it('should request /machines', () => assert.deepEqual(serverRuntimeInfo.requestCounts, { '/machines': 1 })); - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); - }); + it('should request /machines', () => + assert.deepEqual(serverRuntimeInfo.requestCounts, { '/machines': 1 })) + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) + }) describe('when is not running', () => { - let cliInfo; - const args = ['./test/fixtures/apiary.apib', `http://127.0.0.1:${NON_EXISTENT_PORT}`]; + let cliInfo + const args = [ + './test/fixtures/apiary.apib', + `http://127.0.0.1:${NON_EXISTENT_PORT}` + ] - before(done => runCLI(args, (err, info) => { - cliInfo = info; - done(err); - })); + before((done) => + runCLI(args, (err, info) => { + cliInfo = info + done(err) + }) + ) - it('should return understandable message', () => assert.include(cliInfo.stdout, 'Error connecting')); + it('should return understandable message', () => + assert.include(cliInfo.stdout, 'Error connecting')) it('should report error for all transactions', () => { - const occurences = (cliInfo.stdout.match(/Error connecting/g) || []).length; - assert.equal(occurences, 5); - }); - it('should return stats', () => assert.include(cliInfo.stdout, '5 errors')); - it('should exit with status 1', () => assert.equal(cliInfo.exitStatus, 1)); - }); - }); - + const occurences = (cliInfo.stdout.match(/Error connecting/g) || []) + .length + assert.equal(occurences, 5) + }) + it('should return stats', () => + assert.include(cliInfo.stdout, '5 errors')) + it('should exit with status 1', () => assert.equal(cliInfo.exitStatus, 1)) + }) + }) describe('when specified by -g/--server', () => { - afterEach(done => killAll('test/fixtures/scripts/', done)); + afterEach((done) => killAll('test/fixtures/scripts/', done)) describe('when works as expected', () => { - let cliInfo; + let cliInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, `--server=node ./test/fixtures/scripts/dummy-server.js ${DEFAULT_SERVER_PORT}`, '--server-wait=1', - '--loglevel=debug', - ]; + '--loglevel=debug' + ] + + before((done) => + runCLI(args, (err, info) => { + if (err) { + throw err + } - before(done => runCLI(args, (err, info) => { - cliInfo = info; - done(err); - })); + cliInfo = info + done(err) + }) + ) - it('should inform about starting server with custom command', () => assert.include(cliInfo.stderr, 'Starting backend server process with command')); - it('should redirect server\'s welcome message', () => assert.include(cliInfo.stdout, `Dummy server listening on port ${DEFAULT_SERVER_PORT}`)); - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); - }); + it('should inform about starting server with custom command', () => + assert.include( + cliInfo.stderr, + 'Starting backend server process with command' + )) + it("should redirect server's welcome message", () => + assert.include( + cliInfo.stdout, + `Dummy server listening on port ${DEFAULT_SERVER_PORT}` + )) + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) + }) describe('when it fails to start', () => { - let cliInfo; + let cliInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--server=/foo/bar/baz', '--server-wait=1', - '--loglevel=debug', - ]; - - before(done => runCLI(args, (err, info) => { - cliInfo = info; - done(err); - })); - - it('should inform about starting server with custom command', () => assert.include(cliInfo.stderr, 'Starting backend server process with command')); - it('should report problem with server process spawn', () => assert.include(cliInfo.stderr, 'Command to start backend server process failed, exiting Dredd')); - it('should exit with status 1', () => assert.equal(cliInfo.exitStatus, 1)); - }); - - for (const scenario of [{ - description: 'when crashes before requests', - apiDescriptionDocument: './test/fixtures/single-get.apib', - server: 'node test/fixtures/scripts/exit-3.js', - expectServerBoot: false, - }, - { - description: 'when crashes during requests', - apiDescriptionDocument: './test/fixtures/apiary.apib', - server: `node test/fixtures/scripts/dummy-server-crash.js ${DEFAULT_SERVER_PORT}`, - expectServerBoot: true, - }, - { - description: 'when killed before requests', - apiDescriptionDocument: './test/fixtures/single-get.apib', - server: 'node test/fixtures/scripts/kill-self.js', - expectServerBoot: false, - }, - { - description: 'when killed during requests', - apiDescriptionDocument: './test/fixtures/apiary.apib', - server: `node test/fixtures/scripts/dummy-server-kill.js ${DEFAULT_SERVER_PORT}`, - expectServerBoot: true, - }, + '--loglevel=debug' + ] + + before((done) => + runCLI(args, (err, info) => { + cliInfo = info + done(err) + }) + ) + + it('should inform about starting server with custom command', () => + assert.include( + cliInfo.stderr, + 'Starting backend server process with command' + )) + it('should report problem with server process spawn', () => + assert.include( + cliInfo.stderr, + 'Command to start backend server process failed, exiting Dredd' + )) + it('should exit with status 1', () => assert.equal(cliInfo.exitStatus, 1)) + }) + + for (const scenario of [ + { + description: 'when crashes before requests', + apiDescriptionDocument: './test/fixtures/single-get.apib', + server: 'node test/fixtures/scripts/exit-3.js', + expectServerBoot: false + }, + { + description: 'when crashes during requests', + apiDescriptionDocument: './test/fixtures/apiary.apib', + server: `node test/fixtures/scripts/dummy-server-crash.js ${DEFAULT_SERVER_PORT}`, + expectServerBoot: true + }, + { + description: 'when killed before requests', + apiDescriptionDocument: './test/fixtures/single-get.apib', + server: 'node test/fixtures/scripts/kill-self.js', + expectServerBoot: false + }, + { + description: 'when killed during requests', + apiDescriptionDocument: './test/fixtures/apiary.apib', + server: `node test/fixtures/scripts/dummy-server-kill.js ${DEFAULT_SERVER_PORT}`, + expectServerBoot: true + } ]) { describe(scenario.description, () => { - let cliInfo; + let cliInfo const args = [ scenario.apiDescriptionDocument, `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, `--server=${scenario.server}`, '--server-wait=1', - '--loglevel=debug', - ]; + '--loglevel=debug' + ] - before(done => runCLI(args, (err, info) => { - cliInfo = info; - done(err); - })); + before((done) => + runCLI(args, (err, info) => { + cliInfo = info + done(err) + }) + ) - it('should inform about starting server with custom command', () => assert.include(cliInfo.stderr, 'Starting backend server process with command')); + it('should inform about starting server with custom command', () => + assert.include( + cliInfo.stderr, + 'Starting backend server process with command' + )) if (scenario.expectServerBoot) { - it('should redirect server\'s boot message', () => assert.include(cliInfo.stdout, `Dummy server listening on port ${DEFAULT_SERVER_PORT}`)); + it("should redirect server's boot message", () => + assert.include( + cliInfo.stdout, + `Dummy server listening on port ${DEFAULT_SERVER_PORT}` + )) } - it('the server should not be running', done => isProcessRunning('test/fixtures/scripts/', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); - it('should report problems with connection to server', () => assert.include(cliInfo.stderr, 'Error connecting to server')); - it('should exit with status 1', () => assert.equal(cliInfo.exitStatus, 1)); - }); + it('the server should not be running', (done) => + isProcessRunning('test/fixtures/scripts/', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) + it('should report problems with connection to server', () => + assert.include(cliInfo.stderr, 'Error connecting to server')) + it('should exit with status 1', () => + assert.equal(cliInfo.exitStatus, 1)) + }) } - describe('when didn\'t terminate and had to be killed by Dredd', () => { - let cliInfo; + describe("when didn't terminate and had to be killed by Dredd", () => { + let cliInfo const args = [ './test/fixtures/single-get.apib', `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, `--server=node test/fixtures/scripts/dummy-server-ignore-term.js ${DEFAULT_SERVER_PORT}`, '--server-wait=1', - '--loglevel=debug', - ]; - - before(done => runCLI(args, (err, info) => { - cliInfo = info; - done(err); - })); - - it('should inform about starting server with custom command', () => assert.include(cliInfo.stderr, 'Starting backend server process with command')); - it('should inform about gracefully terminating the server', () => assert.include(cliInfo.stderr, 'Gracefully terminating the backend server process')); - it('should redirect server\'s message about ignoring termination', () => assert.include(cliInfo.stdout, 'ignoring termination')); - it('should inform about forcefully killing the server', () => assert.include(cliInfo.stderr, 'Killing the backend server process')); - it('the server should not be running', done => isProcessRunning('test/fixtures/scripts/', (err, isRunning) => { - if (!err) { assert.isFalse(isRunning); } - done(err); - })); - it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)); - }); - }); -}); + '--loglevel=debug' + ] + + before((done) => + runCLI(args, (err, info) => { + cliInfo = info + done(err) + }) + ) + + it('should inform about starting server with custom command', () => + assert.include( + cliInfo.stderr, + 'Starting backend server process with command' + )) + it('should inform about gracefully terminating the server', () => + assert.include( + cliInfo.stderr, + 'Gracefully terminating the backend server process' + )) + it("should redirect server's message about ignoring termination", () => + assert.include(cliInfo.stdout, 'ignoring termination')) + it('should inform about forcefully killing the server', () => + assert.include(cliInfo.stderr, 'Killing the backend server process')) + it('the server should not be running', (done) => + isProcessRunning('test/fixtures/scripts/', (err, isRunning) => { + if (!err) { + assert.isFalse(isRunning) + } + done(err) + })) + it('should exit with status 0', () => assert.equal(cliInfo.exitStatus, 0)) + }) + }) +}) diff --git a/test/integration/configuration/resolveConfig-test.js b/test/integration/configuration/resolveConfig-test.js index 72953d67f..25f660ce0 100644 --- a/test/integration/configuration/resolveConfig-test.js +++ b/test/integration/configuration/resolveConfig-test.js @@ -1,6 +1,9 @@ -const { assert } = require('chai'); -const { EventEmitter } = require('events'); -const { DEFAULT_CONFIG, resolveConfig } = require('../../../lib/configuration/applyConfiguration'); +import { assert } from 'chai' +import { EventEmitter } from 'events' +import { + DEFAULT_CONFIG, + resolveConfig +} from '../../../lib/configuration/applyConfiguration' describe('resolveConfig()', () => { describe('when flattening config', () => { @@ -9,20 +12,20 @@ describe('resolveConfig()', () => { path: './foo.apib', custom: { apiaryApiKey: 'the-key', - apiaryApiName: 'the-api-name', - }, - }, - }); + apiaryApiName: 'the-api-name' + } + } + }) it('removes nested "options" key', () => { - assert.doesNotHaveAllKeys(config, 'options'); - }); + assert.doesNotHaveAllKeys(config, 'options') + }) it('moves options on the rool level', () => { - assert.containsAllKeys(config, ['path', 'custom']); - assert.containsAllKeys(config.custom, ['apiaryApiKey', 'apiaryApiName']); - }); - }); + assert.containsAllKeys(config, ['path', 'custom']) + assert.containsAllKeys(config.custom, ['apiaryApiKey', 'apiaryApiName']) + }) + }) describe('when merging with default options', () => { const { config } = resolveConfig({ @@ -31,148 +34,148 @@ describe('resolveConfig()', () => { path: './foo.apib', custom: { apiaryApiKey: 'the-key', - apiaryApiName: 'the-api-name', - }, - }, - }); + apiaryApiName: 'the-api-name' + } + } + }) it('contains default options', () => { - assert.hasAllKeys(config, Object.keys(DEFAULT_CONFIG).concat('emitter')); - }); + assert.hasAllKeys(config, Object.keys(DEFAULT_CONFIG).concat('emitter')) + }) it('overrides default options with custom ones', () => { - assert.deepEqual(config.path, ['./foo.apib']); - }); + assert.deepEqual(config.path, ['./foo.apib']) + }) describe('deep merges "custom" properties', () => { it('preserves default "cwd" property', () => { - assert.equal(config.custom.cwd, DEFAULT_CONFIG.custom.cwd); - }); + assert.equal(config.custom.cwd, DEFAULT_CONFIG.custom.cwd) + }) it('includes custom properties', () => { - assert.equal(config.custom.apiaryApiKey, 'the-key'); - assert.equal(config.custom.apiaryApiName, 'the-api-name'); - }); - }); - }); + assert.equal(config.custom.apiaryApiKey, 'the-key') + assert.equal(config.custom.apiaryApiName, 'the-api-name') + }) + }) + }) // Options describe('option: server', () => { describe('when no "server" set', () => { const { config: nextConfig } = resolveConfig({ - path: [], - }); + path: [] + }) it('has default "endpoint" option value', () => { - assert.propertyVal(nextConfig, 'endpoint', DEFAULT_CONFIG.endpoint); - }); + assert.propertyVal(nextConfig, 'endpoint', DEFAULT_CONFIG.endpoint) + }) it('has no "server" option', () => { - assert.notProperty(nextConfig, 'server'); - }); - }); + assert.notProperty(nextConfig, 'server') + }) + }) describe('when "server" set', () => { const { config: nextConfig } = resolveConfig({ - server: 'http://127.0.0.1', - }); + server: 'http://127.0.0.1' + }) it('sets "endpoint" based on "server" value', () => { - assert.propertyVal(nextConfig, 'endpoint', 'http://127.0.0.1'); - }); + assert.propertyVal(nextConfig, 'endpoint', 'http://127.0.0.1') + }) it('removes deprecated "server" root option', () => { - assert.notProperty(nextConfig, 'server'); - }); - }); + assert.notProperty(nextConfig, 'server') + }) + }) describe('when both "server" and "options.endpoint" set', () => { const { config } = resolveConfig({ server: 'http://127.0.0.1', options: { - endpoint: 'https://apiary.io', - }, - }); + endpoint: 'https://apiary.io' + } + }) it('treats "options.endpoint" as higher priority', () => { - assert.propertyVal(config, 'endpoint', 'https://apiary.io'); - }); + assert.propertyVal(config, 'endpoint', 'https://apiary.io') + }) it('removes deprecated "server" root option', () => { - assert.notProperty(config, 'server'); - }); - }); + assert.notProperty(config, 'server') + }) + }) describe('when "options.server" is set', () => { const { config: nextConfig } = resolveConfig({ options: { - server: 'npm start', - }, - }); + server: 'npm start' + } + }) it('coerces to "server" root options', () => { - assert.propertyVal(nextConfig, 'server', 'npm start'); - }); - }); + assert.propertyVal(nextConfig, 'server', 'npm start') + }) + }) describe('when both root "server" and "options.server" set', () => { const { config } = resolveConfig({ server: 'http://127.0.0.1', options: { - server: 'npm start', - }, - }); + server: 'npm start' + } + }) it('coerces root "server" to "endpoint"', () => { - assert.propertyVal(config, 'endpoint', 'http://127.0.0.1'); - }); + assert.propertyVal(config, 'endpoint', 'http://127.0.0.1') + }) it('coerces "options.server" to root "server"', () => { - assert.propertyVal(config, 'server', 'npm start'); - }); - }); + assert.propertyVal(config, 'server', 'npm start') + }) + }) describe('when root "server", "options.endpoint" and "options.server" set', () => { const { config } = resolveConfig({ server: 'http://127.0.0.1', options: { server: 'npm start', - endpoint: 'https://apiary.io', - }, - }); + endpoint: 'https://apiary.io' + } + }) it('coerces "options.server" to root "server" option', () => { - assert.propertyVal(config, 'server', 'npm start'); - }); + assert.propertyVal(config, 'server', 'npm start') + }) it('takes "options.endpoint" as a priority over root "server"', () => { - assert.propertyVal(config, 'endpoint', 'https://apiary.io'); - }); - }); - }); + assert.propertyVal(config, 'endpoint', 'https://apiary.io') + }) + }) + }) // describe('option: emitter', () => { describe('with default configuration', () => { - const { config } = resolveConfig({}); + const { config } = resolveConfig({}) it('has default emitter', () => { - assert.instanceOf(config.emitter, EventEmitter); - }); - }); + assert.instanceOf(config.emitter, EventEmitter) + }) + }) describe('when provided custom emitter', () => { - let emitterCalled = false; - const customEmitter = new EventEmitter(); + let emitterCalled = false + const customEmitter = new EventEmitter() customEmitter.addListener('test', () => { - emitterCalled = true; - }); + emitterCalled = true + }) const { config } = resolveConfig({ - emitter: customEmitter, - }); + emitter: customEmitter + }) it('uses custom event emitter', () => { - config.emitter.emit('test'); - assert.isTrue(emitterCalled); - }); - }); - }); -}); + config.emitter.emit('test') + assert.isTrue(emitterCalled) + }) + }) + }) +}) diff --git a/test/integration/helpers.js b/test/integration/helpers.js index 0f9150406..bc27bbabb 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -1,17 +1,17 @@ -const async = require('async'); -const bodyParser = require('body-parser'); -const clone = require('clone'); -const express = require('express'); -const fs = require('fs'); -const https = require('https'); -const path = require('path'); -const ps = require('ps-node'); -const spawn = require('cross-spawn'); - -const logger = require('../../lib/logger'); -const reporterOutputLogger = require('../../lib/reporters/reporterOutputLogger'); - -const DEFAULT_SERVER_PORT = 9876; +import async from 'async'; +import bodyParser from 'body-parser'; +import clone from 'clone'; +import express from 'express'; +import fs from 'fs'; +import https from 'https'; +import path from 'path'; +import ps from 'ps-node'; +import spawn from 'cross-spawn'; + +import logger from '../../lib/logger'; +import reporterOutputLogger from '../../lib/reporters/reporterOutputLogger'; + +export const DEFAULT_SERVER_PORT = 9876; const DREDD_BIN = require.resolve('../../bin/dredd'); // Records logging during runtime of a given function. Given function @@ -22,9 +22,10 @@ const DREDD_BIN = require.resolve('../../bin/dredd'); // - args (array) - array of all arguments the 'next' callback obtained // from the 'fn' function // - logging (string) - the recorded logging output -function recordLogging(fn, callback) { +export const recordLogging = (fn, callback) => { const loggerSilent = !!logger.transports.console.silent; - const reporterOutputLoggerSilent = !!reporterOutputLogger.transports.console.silent; + const reporterOutputLoggerSilent = !!reporterOutputLogger.transports.console + .silent; // Supress Dredd's console output (remove if debugging) logger.transports.console.silent = true; @@ -47,7 +48,7 @@ function recordLogging(fn, callback) { callback(null, args, logging); }); -} +}; // Helper function which records incoming server request to given // server runtime info object. @@ -62,15 +63,19 @@ function recordServerRequest(serverRuntimeInfo, req) { method: req.method, url: req.url, headers: clone(req.headers), - body: clone(req.body), + body: clone(req.body) }; serverRuntimeInfo.lastRequest = recordedReq; - if (!serverRuntimeInfo.requests[req.url]) { serverRuntimeInfo.requests[req.url] = []; } + if (!serverRuntimeInfo.requests[req.url]) { + serverRuntimeInfo.requests[req.url] = []; + } serverRuntimeInfo.requests[req.url].push(recordedReq); - if (!serverRuntimeInfo.requestCounts[req.url]) { serverRuntimeInfo.requestCounts[req.url] = 0; } + if (!serverRuntimeInfo.requestCounts[req.url]) { + serverRuntimeInfo.requestCounts[req.url] = 0; + } serverRuntimeInfo.requestCounts[req.url] += 1; } @@ -79,7 +84,7 @@ function getSSLCredentials() { const httpsDir = path.join(__dirname, '../fixtures/https'); return { key: fs.readFileSync(path.join(httpsDir, 'server.key'), 'utf8'), - cert: fs.readFileSync(path.join(httpsDir, 'server.crt'), 'utf8'), + cert: fs.readFileSync(path.join(httpsDir, 'server.crt'), 'utf8') }; } @@ -99,9 +104,10 @@ function getSSLCredentials() { // - body (string) // - requestCounts (object) // - *endpointUrl*: 0 (number, default) - number of requests to the endpoint -function createServer(options = {}) { +export const createServer = (options = {}) => { const protocol = options.protocol || 'http'; - const bodyParserInstance = options.bodyParser || bodyParser.json({ size: '5mb' }); + const bodyParserInstance = + options.bodyParser || bodyParser.json({ size: '5mb' }); const serverRuntimeInfo = { requestedOnce: false, @@ -115,7 +121,7 @@ function createServer(options = {}) { this.lastRequest = null; this.requests = {}; this.requestCounts = {}; - }, + } }; let app = express(); @@ -125,7 +131,9 @@ function createServer(options = {}) { res.type('json').status(200); // sensible defaults, can be overriden next(); }); - if (protocol === 'https') { app = https.createServer(getSSLCredentials(), app); } + if (protocol === 'https') { + app = https.createServer(getSSLCredentials(), app); + } // Monkey-patching the app.listen() function. The 'port' argument // is made optional, defaulting to the 'DEFAULT_SERVER_PORT' value. @@ -133,52 +141,78 @@ function createServer(options = {}) { // runtime info about the server (what requests it got etc.). const originalListen = app.listen; app.listen = function listen(port, callback) { - if (typeof port === 'function') { [callback, port] = Array.from([port, DEFAULT_SERVER_PORT]); } - return originalListen.call(this, port, err => callback(err, serverRuntimeInfo)); + if (typeof port === 'function') { + [callback, port] = Array.from([port, DEFAULT_SERVER_PORT]); + } + return originalListen.call(this, port, (err) => + callback(err, serverRuntimeInfo) + ); }; return app; -} +}; // Runs given Dredd class instance against localhost server on given (or default) // server port. Automatically records all Dredd logging ouput. The error isn't passed // as the first argument, but as part of the result, which is convenient in // tests. Except of 'err' and 'logging' returns also 'stats' which is what the Dredd // instance returns as test results. -function runDredd(dredd, serverPort, callback) { - if (typeof serverPort === 'function') { [callback, serverPort] = Array.from([serverPort, DEFAULT_SERVER_PORT]); } - if (dredd.configuration.endpoint == null) { dredd.configuration.endpoint = `http://127.0.0.1:${serverPort}`; } +export const runDredd = (dredd, serverPort, callback) => { + if (typeof serverPort === 'function') { + [callback, serverPort] = Array.from([serverPort, DEFAULT_SERVER_PORT]); + } + if (dredd.configuration.endpoint == null) { + dredd.configuration.endpoint = `http://127.0.0.1:${serverPort}`; + } - if (dredd.configuration.options == null) { dredd.configuration.options = {}; } - if (dredd.configuration.options.loglevel == null) { dredd.configuration.options.loglevel = 'debug'; } + if (dredd.configuration.options == null) { + dredd.configuration.options = {}; + } + if (dredd.configuration.options.loglevel == null) { + dredd.configuration.options.loglevel = 'debug'; + } let stats; - recordLogging(next => dredd.run(next), + recordLogging( + (next) => dredd.run(next), (err, args, logging) => { - if (err) { return callback(err); } + if (err) { + return callback(err); + } [err, stats] = Array.from(args); callback(null, { err, stats, logging }); - }); -} + } + ); +}; // Runs given Express.js server instance and then runs given Dredd class instance. // Collects their runtime information and provides it to the callback. -function runDreddWithServer(dredd, app, serverPort, callback) { - if (typeof serverPort === 'function') { [callback, serverPort] = Array.from([serverPort, DEFAULT_SERVER_PORT]); } +export const runDreddWithServer = (dredd, app, serverPort, callback) => { + if (typeof serverPort === 'function') { + [callback, serverPort] = Array.from([serverPort, DEFAULT_SERVER_PORT]); + } const server = app.listen(serverPort, (err, serverRuntimeInfo) => { - if (err) { return callback(err); } - - runDredd(dredd, serverPort, (error, dreddRuntimeInfo) => server.close(() => callback(error, { server: serverRuntimeInfo, dredd: dreddRuntimeInfo }))); + if (err) { + return callback(err); + } + + runDredd(dredd, serverPort, (error, dreddRuntimeInfo) => + server.close(() => + callback(error, { server: serverRuntimeInfo, dredd: dreddRuntimeInfo }) + ) + ); }); -} +}; // Runs CLI command with given arguments. Records and provides stdout, stderr // and also 'output', which is the two combined. Also provides 'exitStatus' // of the process. function runCommand(command, args, spawnOptions = {}, callback) { - if (typeof spawnOptions === 'function') { [callback, spawnOptions] = Array.from([spawnOptions, undefined]); } + if (typeof spawnOptions === 'function') { + [callback, spawnOptions] = Array.from([spawnOptions, undefined]); + } let stdout = ''; let stderr = ''; @@ -195,34 +229,50 @@ function runCommand(command, args, spawnOptions = {}, callback) { output += data; }); - cli.on('exit', exitStatus => callback(null, { - stdout, stderr, output, exitStatus, - })); + cli.on('exit', (exitStatus) => + callback(null, { + stdout, + stderr, + output, + exitStatus + }) + ); } // Runs Dredd as a CLI command, with given arguments. -const runCLI = (args, spawnOptions, callback) => runCommand('node', [DREDD_BIN].concat(args), spawnOptions, callback); +export const runCLI = (args, spawnOptions, callback) => + runCommand('node', [DREDD_BIN].concat(args), spawnOptions, callback); // Runs given Express.js server instance and then runs Dredd command with given // arguments. Collects their runtime information and provides it to the callback. -function runCLIWithServer(args, app, serverPort, callback) { - if (typeof serverPort === 'function') { [callback, serverPort] = Array.from([serverPort, DEFAULT_SERVER_PORT]); } +export const runCLIWithServer = (args, app, serverPort, callback) => { + if (typeof serverPort === 'function') { + [callback, serverPort] = Array.from([serverPort, DEFAULT_SERVER_PORT]); + } const server = app.listen(serverPort, (err, serverRuntimeInfo) => { - if (err) { return callback(err); } - - runCLI(args, (error, cliInfo) => server.close(() => callback(error, { server: serverRuntimeInfo, dredd: cliInfo }))); + if (err) { + return callback(err); + } + + runCLI(args, (error, cliInfo) => + server.close(() => + callback(error, { server: serverRuntimeInfo, dredd: cliInfo }) + ) + ); }); -} +}; // Checks whether there's a process with name matching given pattern. -function isProcessRunning(pattern, callback) { - return ps.lookup({ arguments: pattern }, (err, processList) => callback(err, !!(processList ? processList.length : undefined))); -} +export const isProcessRunning = (pattern, callback) => { + return ps.lookup({ arguments: pattern }, (err, processList) => + callback(err, !!(processList ? processList.length : undefined)) + ); +}; // Kills process with given PID if the process exists. Otherwise // does nothing. -function kill(pid, callback) { +export const kill = (pid, callback) => { if (process.platform === 'win32') { const taskkill = spawn('taskkill', ['/F', '/T', '/PID', pid]); return taskkill.on('exit', () => callback()); @@ -230,32 +280,23 @@ function kill(pid, callback) { } try { process.kill(pid, 'SIGKILL'); - } catch (error) { } + } catch (error) {} // If the PID doesn't exist, process.kill() throws - we do not care process.nextTick(callback); -} - +}; // Kills processes which have names matching given pattern. Does // nothing if there are no matching processes. -function killAll(pattern, callback) { +export const killAll = (pattern, callback) => { return ps.lookup({ arguments: pattern }, (err, processList) => { - if (err || !processList.length) { return callback(err); } - - async.each(processList, (processListItem, next) => kill(processListItem.pid, next), - callback); + if (err || !processList.length) { + return callback(err); + } + + async.each( + processList, + (processListItem, next) => kill(processListItem.pid, next), + callback + ); }); -} - -module.exports = { - DEFAULT_SERVER_PORT, - recordLogging, - createServer, - runDredd, - runDreddWithServer, - runCLI, - runCLIWithServer, - isProcessRunning, - kill, - killAll, }; diff --git a/test/integration/js-interface-test.js b/test/integration/js-interface-test.js index cba28b4d2..12fc13a1d 100644 --- a/test/integration/js-interface-test.js +++ b/test/integration/js-interface-test.js @@ -1,9 +1,8 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); - -const Dredd = require('../../lib/Dredd'); -const { createServer, runDredd, runDreddWithServer } = require('./helpers'); +import sinon from 'sinon' +import { assert } from 'chai' +import Dredd from '../../lib/Dredd' +import { createServer, runDredd, runDreddWithServer } from './helpers' const EXPECTED_STATS_KEYS = [ 'tests', @@ -13,238 +12,247 @@ const EXPECTED_STATS_KEYS = [ 'skipped', 'start', 'end', - 'duration', -]; - + 'duration' +] describe('Running Dredd from JavaScript', () => { describe('when the testing is successful', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/single-get.apib' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('requests the server', () => { - assert.isTrue(runtimeInfo.server.requestedOnce); - }); + assert.isTrue(runtimeInfo.server.requestedOnce) + }) it('passes no error to the callback', () => { - assert.isNotOk(runtimeInfo.dredd.err); - }); + assert.isNotOk(runtimeInfo.dredd.err) + }) it('passes expected stats to the callback', () => { - assert.hasAllKeys(runtimeInfo.dredd.stats, EXPECTED_STATS_KEYS); - }); + assert.hasAllKeys(runtimeInfo.dredd.stats, EXPECTED_STATS_KEYS) + }) it('performs 1 test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - }); + assert.equal(runtimeInfo.dredd.stats.tests, 1) + }) it('finishes with 0 failing tests', () => { - assert.equal(runtimeInfo.dredd.stats.failures, 0); - }); + assert.equal(runtimeInfo.dredd.stats.failures, 0) + }) it('finishes with 0 erroring tests', () => { - assert.equal(runtimeInfo.dredd.stats.errors, 0); - }); + assert.equal(runtimeInfo.dredd.stats.errors, 0) + }) it('finishes with 1 passing test', () => { - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) it('finishes with 0 skipped tests', () => { - assert.equal(runtimeInfo.dredd.stats.skipped, 0); - }); + assert.equal(runtimeInfo.dredd.stats.skipped, 0) + }) it('records start', () => { - assert.instanceOf(runtimeInfo.dredd.stats.start, Date); - }); + assert.instanceOf(runtimeInfo.dredd.stats.start, Date) + }) it('records end', () => { - assert.instanceOf(runtimeInfo.dredd.stats.end, Date); - }); + assert.instanceOf(runtimeInfo.dredd.stats.end, Date) + }) it('records duration', () => { - assert.isAbove(runtimeInfo.dredd.stats.duration, 0); - }); - }); + assert.isAbove(runtimeInfo.dredd.stats.duration, 0) + }) + }) describe('when the testing is failing', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ foo: 'bar' }])); + const app = createServer() + app.get('/machines', (req, res) => res.json([{ foo: 'bar' }])) - const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/single-get.apib' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('requests the server', () => { - assert.isTrue(runtimeInfo.server.requestedOnce); - }); + assert.isTrue(runtimeInfo.server.requestedOnce) + }) it('passes no error to the callback', () => { - assert.isNotOk(runtimeInfo.dredd.err); - }); + assert.isNotOk(runtimeInfo.dredd.err) + }) it('passes expected stats to the callback', () => { - assert.hasAllKeys(runtimeInfo.dredd.stats, EXPECTED_STATS_KEYS); - }); + assert.hasAllKeys(runtimeInfo.dredd.stats, EXPECTED_STATS_KEYS) + }) it('performs 1 test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - }); + assert.equal(runtimeInfo.dredd.stats.tests, 1) + }) it('finishes with 1 failing test', () => { - assert.equal(runtimeInfo.dredd.stats.failures, 1); - }); + assert.equal(runtimeInfo.dredd.stats.failures, 1) + }) it('finishes with 0 erroring tests', () => { - assert.equal(runtimeInfo.dredd.stats.errors, 0); - }); + assert.equal(runtimeInfo.dredd.stats.errors, 0) + }) it('finishes with 0 passing tests', () => { - assert.equal(runtimeInfo.dredd.stats.passes, 0); - }); + assert.equal(runtimeInfo.dredd.stats.passes, 0) + }) it('finishes with 0 skipped tests', () => { - assert.equal(runtimeInfo.dredd.stats.skipped, 0); - }); + assert.equal(runtimeInfo.dredd.stats.skipped, 0) + }) it('records start', () => { - assert.instanceOf(runtimeInfo.dredd.stats.start, Date); - }); + assert.instanceOf(runtimeInfo.dredd.stats.start, Date) + }) it('records end', () => { - assert.instanceOf(runtimeInfo.dredd.stats.end, Date); - }); + assert.instanceOf(runtimeInfo.dredd.stats.end, Date) + }) it('records duration', () => { - assert.isAbove(runtimeInfo.dredd.stats.duration, 0); - }); - }); + assert.isAbove(runtimeInfo.dredd.stats.duration, 0) + }) + }) describe('when the testing is erroring', () => { - let dreddRuntimeInfo; + let dreddRuntimeInfo before((done) => { - const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/single-get.apib' } + }) runDredd(dredd, (err, info) => { - dreddRuntimeInfo = info; - done(err); - }); - }); + dreddRuntimeInfo = info + done(err) + }) + }) it('passes no error to the callback', () => { - assert.isNotOk(dreddRuntimeInfo.err); - }); + assert.isNotOk(dreddRuntimeInfo.err) + }) it('passes expected stats to the callback', () => { - assert.hasAllKeys(dreddRuntimeInfo.stats, EXPECTED_STATS_KEYS); - }); + assert.hasAllKeys(dreddRuntimeInfo.stats, EXPECTED_STATS_KEYS) + }) it('performs 1 test', () => { - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) it('finishes with 0 failing tests', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 0); - }); + assert.equal(dreddRuntimeInfo.stats.failures, 0) + }) it('finishes with 1 erroring test', () => { - assert.equal(dreddRuntimeInfo.stats.errors, 1); - }); + assert.equal(dreddRuntimeInfo.stats.errors, 1) + }) it('finishes with 0 passing tests', () => { - assert.equal(dreddRuntimeInfo.stats.passes, 0); - }); + assert.equal(dreddRuntimeInfo.stats.passes, 0) + }) it('finishes with 0 skipped tests', () => { - assert.equal(dreddRuntimeInfo.stats.skipped, 0); - }); + assert.equal(dreddRuntimeInfo.stats.skipped, 0) + }) it('records start', () => { - assert.instanceOf(dreddRuntimeInfo.stats.start, Date); - }); + assert.instanceOf(dreddRuntimeInfo.stats.start, Date) + }) it('records end', () => { - assert.instanceOf(dreddRuntimeInfo.stats.end, Date); - }); + assert.instanceOf(dreddRuntimeInfo.stats.end, Date) + }) it('records duration', () => { - assert.isAbove(dreddRuntimeInfo.stats.duration, 0); - }); - }); + assert.isAbove(dreddRuntimeInfo.stats.duration, 0) + }) + }) describe('when API descriptions loading is erroring', () => { - let dreddRuntimeInfo; + let dreddRuntimeInfo before((done) => { - const dredd = new Dredd({ options: { path: '__non-existing__.apib' } }); + const dredd = new Dredd({ options: { path: '__non-existing__.apib' } }) runDredd(dredd, (err, info) => { - dreddRuntimeInfo = info; - done(err); - }); - }); + dreddRuntimeInfo = info + done(err) + }) + }) it('passes error to the callback', () => { - assert.instanceOf(dreddRuntimeInfo.err, Error); - }); + assert.instanceOf(dreddRuntimeInfo.err, Error) + }) it('passes expected stats to the callback', () => { - assert.hasAllKeys(dreddRuntimeInfo.stats, EXPECTED_STATS_KEYS); - }); + assert.hasAllKeys(dreddRuntimeInfo.stats, EXPECTED_STATS_KEYS) + }) it('performs 0 tests', () => { - assert.equal(dreddRuntimeInfo.stats.tests, 0); - }); + assert.equal(dreddRuntimeInfo.stats.tests, 0) + }) it('finishes with 0 failing tests', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 0); - }); + assert.equal(dreddRuntimeInfo.stats.failures, 0) + }) it('finishes with 0 erroring tests', () => { - assert.equal(dreddRuntimeInfo.stats.errors, 0); - }); + assert.equal(dreddRuntimeInfo.stats.errors, 0) + }) it('finishes with 0 passing tests', () => { - assert.equal(dreddRuntimeInfo.stats.passes, 0); - }); + assert.equal(dreddRuntimeInfo.stats.passes, 0) + }) it('finishes with 0 skipped tests', () => { - assert.equal(dreddRuntimeInfo.stats.skipped, 0); - }); + assert.equal(dreddRuntimeInfo.stats.skipped, 0) + }) it('does not record start', () => { - assert.equal(dreddRuntimeInfo.stats.start, 0); - }); + assert.equal(dreddRuntimeInfo.stats.start, 0) + }) it('does not record end', () => { - assert.equal(dreddRuntimeInfo.stats.end, 0); - }); + assert.equal(dreddRuntimeInfo.stats.end, 0) + }) it('does not record duration', () => { - assert.equal(dreddRuntimeInfo.stats.duration, 0); - }); - }); + assert.equal(dreddRuntimeInfo.stats.duration, 0) + }) + }) describe('when running transactions is erroring', () => { - let dreddRuntimeInfo; - const error = new Error('Ouch!'); + let dreddRuntimeInfo + const error = new Error('Ouch!') before((done) => { - const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib' } }); - sinon.stub(dredd.transactionRunner, 'run').callsArgWithAsync(1, error); + const dredd = new Dredd({ + options: { path: './test/fixtures/single-get.apib' } + }) + sinon.stub(dredd.transactionRunner, 'run').callsArgWithAsync(1, error) runDredd(dredd, (err, info) => { - dreddRuntimeInfo = info; - done(err); - }); - }); + dreddRuntimeInfo = info + done(err) + }) + }) it('passes the error to the callback', () => { - assert.deepEqual(dreddRuntimeInfo.err, error); - }); + assert.deepEqual(dreddRuntimeInfo.err, error) + }) it('passes expected stats to the callback', () => { - assert.hasAllKeys(dreddRuntimeInfo.stats, EXPECTED_STATS_KEYS); - }); + assert.hasAllKeys(dreddRuntimeInfo.stats, EXPECTED_STATS_KEYS) + }) it('performs 0 tests', () => { - assert.equal(dreddRuntimeInfo.stats.tests, 0); - }); + assert.equal(dreddRuntimeInfo.stats.tests, 0) + }) it('finishes with 0 failing tests', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 0); - }); + assert.equal(dreddRuntimeInfo.stats.failures, 0) + }) it('finishes with 0 erroring tests', () => { - assert.equal(dreddRuntimeInfo.stats.errors, 0); - }); + assert.equal(dreddRuntimeInfo.stats.errors, 0) + }) it('finishes with 0 passing tests', () => { - assert.equal(dreddRuntimeInfo.stats.passes, 0); - }); + assert.equal(dreddRuntimeInfo.stats.passes, 0) + }) it('finishes with 0 skipped tests', () => { - assert.equal(dreddRuntimeInfo.stats.skipped, 0); - }); + assert.equal(dreddRuntimeInfo.stats.skipped, 0) + }) it('does not record start', () => { - assert.equal(dreddRuntimeInfo.stats.start, 0); - }); + assert.equal(dreddRuntimeInfo.stats.start, 0) + }) it('does not record end', () => { - assert.equal(dreddRuntimeInfo.stats.end, 0); - }); + assert.equal(dreddRuntimeInfo.stats.end, 0) + }) it('does not record duration', () => { - assert.equal(dreddRuntimeInfo.stats.duration, 0); - }); - }); -}); + assert.equal(dreddRuntimeInfo.stats.duration, 0) + }) + }) +}) diff --git a/test/integration/loading-api-descriptions-test.js b/test/integration/loading-api-descriptions-test.js index 0ccf3d5db..26e63df81 100644 --- a/test/integration/loading-api-descriptions-test.js +++ b/test/integration/loading-api-descriptions-test.js @@ -1,11 +1,10 @@ -const sinon = require('sinon'); -const path = require('path'); -const express = require('express'); -const { assert } = require('chai'); - -const { DEFAULT_SERVER_PORT } = require('./helpers'); -const Dredd = require('../../lib/Dredd'); +import sinon from 'sinon' +import * as path from 'path' +import express from 'express' +import { assert } from 'chai' +import { DEFAULT_SERVER_PORT } from './helpers' +import Dredd from '../../lib/Dredd' const EXPECTED_API_DESCRIPTION_PROPS = [ 'location', @@ -13,294 +12,443 @@ const EXPECTED_API_DESCRIPTION_PROPS = [ 'mediaType', 'apiElements', 'transactions', - 'annotations', -]; - + 'annotations' +] function createDredd(configuration) { - const dredd = new Dredd(configuration); + const dredd = new Dredd(configuration) dredd.transactionRunner = { config: sinon.stub(), - run: sinon.stub().yields(), - }; - return dredd; + run: sinon.stub().yields() + } + return dredd } - describe('Loading API descriptions', () => { describe('when the API description is specified by configuration', () => { - let dredd; + let dredd const content = ` FORMAT: 1A # Machines API # GET /machines + Response 200 (text/plain) - `; + ` before((done) => { - dredd = createDredd({ apiDescriptions: [content] }); - dredd.run(done); - }); + dredd = createDredd({ apiDescriptions: [content] }) + dredd.run(done) + }) it('loads the API description', () => { - assert.lengthOf(dredd.configuration.apiDescriptions, 1); - }); + assert.lengthOf(dredd.configuration.apiDescriptions, 1) + }) it('the API description has all expected data', () => { - assert.hasAllKeys(dredd.configuration.apiDescriptions[0], EXPECTED_API_DESCRIPTION_PROPS); - }); + assert.hasAllKeys( + dredd.configuration.apiDescriptions[0], + EXPECTED_API_DESCRIPTION_PROPS + ) + }) it('the location is set to the configuration', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'location', 'configuration.apiDescriptions[0]'); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'location', + 'configuration.apiDescriptions[0]' + ) + }) it('the content is set', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'content', content); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'content', + content + ) + }) it('the media type is set', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'mediaType', 'text/vnd.apiblueprint'); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'mediaType', + 'text/vnd.apiblueprint' + ) + }) it('the transactions are set', () => { - assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1); - assert.equal(dredd.configuration.apiDescriptions[0].transactions[0].name, 'Machines API > /machines > GET'); - }); + assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1) + assert.equal( + dredd.configuration.apiDescriptions[0].transactions[0].name, + 'Machines API > /machines > GET' + ) + }) it('the transaction runner is called with the transactions', () => { - assert.lengthOf(dredd.transactionRunner.run.firstCall.args[0], 1); - assert.equal(dredd.transactionRunner.run.firstCall.args[0][0].name, 'Machines API > /machines > GET'); - }); - }); + assert.lengthOf(dredd.transactionRunner.run.firstCall.args[0], 1) + assert.equal( + dredd.transactionRunner.run.firstCall.args[0][0].name, + 'Machines API > /machines > GET' + ) + }) + }) describe('when the API description is specified by a path', () => { - let dredd; + let dredd before((done) => { - dredd = createDredd({ options: { path: './test/fixtures/single-get.apib' } }); - dredd.run(done); - }); + dredd = createDredd({ + options: { path: './test/fixtures/single-get.apib' } + }) + dredd.run(done) + }) it('loads the API description', () => { - assert.lengthOf(dredd.configuration.apiDescriptions, 1); - }); + assert.lengthOf(dredd.configuration.apiDescriptions, 1) + }) it('the API description has all expected data', () => { - assert.hasAllKeys(dredd.configuration.apiDescriptions[0], EXPECTED_API_DESCRIPTION_PROPS); - }); + assert.hasAllKeys( + dredd.configuration.apiDescriptions[0], + EXPECTED_API_DESCRIPTION_PROPS + ) + }) it('the location is set to the path', () => { - assert.match(dredd.configuration.apiDescriptions[0].location, /single-get\.apib$/); - assert.isTrue(path.isAbsolute(dredd.configuration.apiDescriptions[0].location)); - }); + assert.match( + dredd.configuration.apiDescriptions[0].location, + /single-get\.apib$/ + ) + assert.isTrue( + path.isAbsolute(dredd.configuration.apiDescriptions[0].location) + ) + }) it('the content is set', () => { - assert.include(dredd.configuration.apiDescriptions[0].content, '# Machines collection [/machines]'); - }); + assert.include( + dredd.configuration.apiDescriptions[0].content, + '# Machines collection [/machines]' + ) + }) it('the media type is set', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'mediaType', 'text/vnd.apiblueprint'); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'mediaType', + 'text/vnd.apiblueprint' + ) + }) it('the transactions are set', () => { - assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1); - assert.equal(dredd.configuration.apiDescriptions[0].transactions[0].name, 'Machines API > Machines > Machines collection > Get Machines'); - }); + assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1) + assert.equal( + dredd.configuration.apiDescriptions[0].transactions[0].name, + 'Machines API > Machines > Machines collection > Get Machines' + ) + }) it('the transaction runner is called with the transactions', () => { - assert.lengthOf(dredd.transactionRunner.run.firstCall.args[0], 1); - assert.equal(dredd.transactionRunner.run.firstCall.args[0][0].name, 'Machines API > Machines > Machines collection > Get Machines'); - }); - }); + assert.lengthOf(dredd.transactionRunner.run.firstCall.args[0], 1) + assert.equal( + dredd.transactionRunner.run.firstCall.args[0][0].name, + 'Machines API > Machines > Machines collection > Get Machines' + ) + }) + }) describe('when the API description is specified by a non-existing path', () => { - let error; - let dredd; + let error + let dredd before((done) => { - dredd = createDredd({ options: { path: '__non-existing__.apib' } }); - dredd.run((err) => { error = err; done(); }); - }); + dredd = createDredd({ options: { path: '__non-existing__.apib' } }) + dredd.run((err) => { + error = err + done() + }) + }) it('results in an error', () => { - assert.instanceOf(error, Error); - }); + assert.instanceOf(error, Error) + }) it('the error is descriptive', () => { - assert.equal(error.message, "Could not find any files on path: '__non-existing__.apib'"); - }); + assert.equal( + error.message, + "Could not find any files on path: '__non-existing__.apib'" + ) + }) it('aborts Dredd', () => { - assert.isFalse(dredd.transactionRunner.run.called); - }); - }); + assert.isFalse(dredd.transactionRunner.run.called) + }) + }) describe('when the API description is specified by a glob pattern', () => { - let dredd; + let dredd before((done) => { - dredd = createDredd({ options: { path: './test/fixtures/multifile/*.apib' } }); - dredd.run(done); - }); + dredd = createDredd({ + options: { path: './test/fixtures/multifile/*.apib' } + }) + dredd.run(done) + }) it('loads the API descriptions', () => { - assert.lengthOf(dredd.configuration.apiDescriptions, 3); - }); + assert.lengthOf(dredd.configuration.apiDescriptions, 3) + }) it('the API descriptions have all expected data', () => { - assert.hasAllKeys(dredd.configuration.apiDescriptions[0], EXPECTED_API_DESCRIPTION_PROPS); - assert.hasAllKeys(dredd.configuration.apiDescriptions[1], EXPECTED_API_DESCRIPTION_PROPS); - assert.hasAllKeys(dredd.configuration.apiDescriptions[2], EXPECTED_API_DESCRIPTION_PROPS); - }); + assert.hasAllKeys( + dredd.configuration.apiDescriptions[0], + EXPECTED_API_DESCRIPTION_PROPS + ) + assert.hasAllKeys( + dredd.configuration.apiDescriptions[1], + EXPECTED_API_DESCRIPTION_PROPS + ) + assert.hasAllKeys( + dredd.configuration.apiDescriptions[2], + EXPECTED_API_DESCRIPTION_PROPS + ) + }) it('the locations are set to the absolute paths', () => { - assert.match(dredd.configuration.apiDescriptions[0].location, /greeting\.apib$/); - assert.match(dredd.configuration.apiDescriptions[1].location, /message\.apib$/); - assert.match(dredd.configuration.apiDescriptions[2].location, /name\.apib$/); - assert.isTrue(path.isAbsolute(dredd.configuration.apiDescriptions[0].location)); - assert.isTrue(path.isAbsolute(dredd.configuration.apiDescriptions[1].location)); - assert.isTrue(path.isAbsolute(dredd.configuration.apiDescriptions[2].location)); - }); + assert.match( + dredd.configuration.apiDescriptions[0].location, + /greeting\.apib$/ + ) + assert.match( + dredd.configuration.apiDescriptions[1].location, + /message\.apib$/ + ) + assert.match( + dredd.configuration.apiDescriptions[2].location, + /name\.apib$/ + ) + assert.isTrue( + path.isAbsolute(dredd.configuration.apiDescriptions[0].location) + ) + assert.isTrue( + path.isAbsolute(dredd.configuration.apiDescriptions[1].location) + ) + assert.isTrue( + path.isAbsolute(dredd.configuration.apiDescriptions[2].location) + ) + }) it('the contents are set', () => { - assert.include(dredd.configuration.apiDescriptions[0].content, '# Greeting API'); - assert.include(dredd.configuration.apiDescriptions[1].content, '# Message API'); - assert.include(dredd.configuration.apiDescriptions[2].content, '# Name API'); - }); + assert.include( + dredd.configuration.apiDescriptions[0].content, + '# Greeting API' + ) + assert.include( + dredd.configuration.apiDescriptions[1].content, + '# Message API' + ) + assert.include( + dredd.configuration.apiDescriptions[2].content, + '# Name API' + ) + }) it('the media types are set', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'mediaType', 'text/vnd.apiblueprint'); - assert.propertyVal(dredd.configuration.apiDescriptions[1], 'mediaType', 'text/vnd.apiblueprint'); - assert.propertyVal(dredd.configuration.apiDescriptions[2], 'mediaType', 'text/vnd.apiblueprint'); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'mediaType', + 'text/vnd.apiblueprint' + ) + assert.propertyVal( + dredd.configuration.apiDescriptions[1], + 'mediaType', + 'text/vnd.apiblueprint' + ) + assert.propertyVal( + dredd.configuration.apiDescriptions[2], + 'mediaType', + 'text/vnd.apiblueprint' + ) + }) it('the transactions are set', () => { - assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1); - assert.lengthOf(dredd.configuration.apiDescriptions[1].transactions, 1); - assert.lengthOf(dredd.configuration.apiDescriptions[2].transactions, 1); - assert.equal(dredd.configuration.apiDescriptions[0].transactions[0].name, 'Greeting API > /greeting > GET'); - assert.equal(dredd.configuration.apiDescriptions[1].transactions[0].name, 'Message API > /message > GET'); - assert.equal(dredd.configuration.apiDescriptions[2].transactions[0].name, 'Name API > /name > GET'); - }); + assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1) + assert.lengthOf(dredd.configuration.apiDescriptions[1].transactions, 1) + assert.lengthOf(dredd.configuration.apiDescriptions[2].transactions, 1) + assert.equal( + dredd.configuration.apiDescriptions[0].transactions[0].name, + 'Greeting API > /greeting > GET' + ) + assert.equal( + dredd.configuration.apiDescriptions[1].transactions[0].name, + 'Message API > /message > GET' + ) + assert.equal( + dredd.configuration.apiDescriptions[2].transactions[0].name, + 'Name API > /name > GET' + ) + }) it('the transaction runner is called with the transactions', () => { - const transactions = dredd.transactionRunner.run.firstCall.args[0]; - assert.lengthOf(transactions, 3); - assert.equal(transactions[0].name, 'Greeting API > /greeting > GET'); - assert.equal(transactions[1].name, 'Message API > /message > GET'); - assert.equal(transactions[2].name, 'Name API > /name > GET'); - }); - }); + const transactions = dredd.transactionRunner.run.firstCall.args[0] + assert.lengthOf(transactions, 3) + assert.equal(transactions[0].name, 'Greeting API > /greeting > GET') + assert.equal(transactions[1].name, 'Message API > /message > GET') + assert.equal(transactions[2].name, 'Name API > /name > GET') + }) + }) describe('when the API description is specified by a glob pattern resolving to no files', () => { - let error; - let dredd; + let error + let dredd before((done) => { - dredd = createDredd({ options: { path: '__non-existing-*-glob__.apib' } }); - dredd.run((err) => { error = err; done(); }); - }); + dredd = createDredd({ options: { path: '__non-existing-*-glob__.apib' } }) + dredd.run((err) => { + error = err + done() + }) + }) it('results in an error', () => { - assert.instanceOf(error, Error); - }); + assert.instanceOf(error, Error) + }) it('the error is descriptive', () => { - assert.equal(error.message, "Could not find any files on path: '__non-existing-*-glob__.apib'"); - }); + assert.equal( + error.message, + "Could not find any files on path: '__non-existing-*-glob__.apib'" + ) + }) it('aborts Dredd', () => { - assert.isFalse(dredd.transactionRunner.run.called); - }); - }); + assert.isFalse(dredd.transactionRunner.run.called) + }) + }) describe('when the API description is specified by URL', () => { - let dredd; + let dredd const content = ` FORMAT: 1A # Machines API # GET /machines + Response 200 (text/plain) - `; + ` before((done) => { - const app = express(); + const app = express() app.get('/file.apib', (req, res) => { - res.type('text/vnd.apiblueprint').send(content); - }); + res.type('text/vnd.apiblueprint').send(content) + }) const server = app.listen(DEFAULT_SERVER_PORT, (listenErr) => { - if (listenErr) { done(listenErr); return; } - dredd = createDredd({ options: { path: `http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib` } }); + if (listenErr) { + done(listenErr) + return + } + dredd = createDredd({ + options: { path: `http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib` } + }) dredd.run((dreddErr) => { - server.close(() => { done(dreddErr); }); - }); - }); - }); + server.close(() => { + done(dreddErr) + }) + }) + }) + }) it('loads the API description', () => { - assert.lengthOf(dredd.configuration.apiDescriptions, 1); - }); + assert.lengthOf(dredd.configuration.apiDescriptions, 1) + }) it('the API description has all expected data', () => { - assert.hasAllKeys(dredd.configuration.apiDescriptions[0], EXPECTED_API_DESCRIPTION_PROPS); - }); + assert.hasAllKeys( + dredd.configuration.apiDescriptions[0], + EXPECTED_API_DESCRIPTION_PROPS + ) + }) it('the location is set to the URL', () => { - assert.equal(dredd.configuration.apiDescriptions[0].location, `http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib`); - }); + assert.equal( + dredd.configuration.apiDescriptions[0].location, + `http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib` + ) + }) it('the content is set', () => { - assert.equal(dredd.configuration.apiDescriptions[0].content, content); - }); + assert.equal(dredd.configuration.apiDescriptions[0].content, content) + }) it('the media type is set', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'mediaType', 'text/vnd.apiblueprint'); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'mediaType', + 'text/vnd.apiblueprint' + ) + }) it('the transactions are set', () => { - assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1); - assert.equal(dredd.configuration.apiDescriptions[0].transactions[0].name, 'Machines API > /machines > GET'); - }); + assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1) + assert.equal( + dredd.configuration.apiDescriptions[0].transactions[0].name, + 'Machines API > /machines > GET' + ) + }) it('the transaction runner is called with the transactions', () => { - assert.lengthOf(dredd.transactionRunner.run.firstCall.args[0], 1); - assert.equal(dredd.transactionRunner.run.firstCall.args[0][0].name, 'Machines API > /machines > GET'); - }); - }); + assert.lengthOf(dredd.transactionRunner.run.firstCall.args[0], 1) + assert.equal( + dredd.transactionRunner.run.firstCall.args[0][0].name, + 'Machines API > /machines > GET' + ) + }) + }) describe('when the API description is specified by URL with a non-existing server', () => { - let error; - let dredd; + let error + let dredd before((done) => { - dredd = createDredd({ options: { path: 'http://example.example:1234/file.apib' } }); - dredd.run((err) => { error = err; done(); }); - }); + dredd = createDredd({ + options: { path: 'http://example.example:1234/file.apib' } + }) + dredd.run((err) => { + error = err + done() + }) + }) it('results in an error', () => { - assert.instanceOf(error, Error); - }); + assert.instanceOf(error, Error) + }) it('the error is descriptive', () => { - assert.include(error.message, "Unable to load API description document from 'http://example.example:1234/file.apib': "); - assert.include(error.message, 'ENOTFOUND'); - }); + assert.include( + error.message, + "Unable to load API description document from 'http://example.example:1234/file.apib': " + ) + assert.include(error.message, 'ENOTFOUND') + }) it('aborts Dredd', () => { - assert.isFalse(dredd.transactionRunner.run.called); - }); - }); + assert.isFalse(dredd.transactionRunner.run.called) + }) + }) describe('when the API description is specified by URL pointing to a non-existing file', () => { - let error; - let dredd; + let error + let dredd before((done) => { - const app = express(); + const app = express() const server = app.listen(DEFAULT_SERVER_PORT, (listenErr) => { - if (listenErr) { done(listenErr); return; } - dredd = createDredd({ options: { path: `http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib` } }); + if (listenErr) { + done(listenErr) + return + } + dredd = createDredd({ + options: { path: `http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib` } + }) dredd.run((dreddErr) => { - error = dreddErr; - server.close(done); - }); - }); - }); + error = dreddErr + server.close(done) + }) + }) + }) it('results in an error', () => { - assert.instanceOf(error, Error); - }); + assert.instanceOf(error, Error) + }) it('the error is descriptive', () => { - assert.include(error.message, `Unable to load API description document from 'http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib': `); - assert.include(error.message, 'Dredd got HTTP 404 response'); - }); + assert.include( + error.message, + `Unable to load API description document from 'http://127.0.0.1:${DEFAULT_SERVER_PORT}/file.apib': ` + ) + assert.include(error.message, 'Dredd got HTTP 404 response') + }) it('aborts Dredd', () => { - assert.isFalse(dredd.transactionRunner.run.called); - }); - }); + assert.isFalse(dredd.transactionRunner.run.called) + }) + }) describe('when there are multiple API descriptions', () => { - let dredd; + let dredd const content1 = ` FORMAT: 1A # Beehive API v1 # GET /honey + Response 200 (text/plain) - `; + ` const content2 = ` FORMAT: 1A @@ -309,37 +457,41 @@ FORMAT: 1A + Response 200 (text/plain) # GET /bees + Response 200 (text/plain) - `; + ` before((done) => { dredd = createDredd({ apiDescriptions: [content1, content2], - options: { path: './test/fixtures/single-get.apib' }, - }); - dredd.run(done); - }); + options: { path: './test/fixtures/single-get.apib' } + }) + dredd.run(done) + }) it('loads the API descriptions', () => { - assert.lengthOf(dredd.configuration.apiDescriptions, 3); - }); + assert.lengthOf(dredd.configuration.apiDescriptions, 3) + }) it('the media type is set', () => { - assert.propertyVal(dredd.configuration.apiDescriptions[0], 'mediaType', 'text/vnd.apiblueprint'); - }); + assert.propertyVal( + dredd.configuration.apiDescriptions[0], + 'mediaType', + 'text/vnd.apiblueprint' + ) + }) it('the transactions are set', () => { - assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1); - assert.lengthOf(dredd.configuration.apiDescriptions[1].transactions, 2); - assert.lengthOf(dredd.configuration.apiDescriptions[2].transactions, 1); - }); + assert.lengthOf(dredd.configuration.apiDescriptions[0].transactions, 1) + assert.lengthOf(dredd.configuration.apiDescriptions[1].transactions, 2) + assert.lengthOf(dredd.configuration.apiDescriptions[2].transactions, 1) + }) it('the transaction runner is called with the transactions', () => { - const transactions = dredd.transactionRunner.run.firstCall.args[0]; + const transactions = dredd.transactionRunner.run.firstCall.args[0] - assert.lengthOf(transactions, 4); + assert.lengthOf(transactions, 4) assert.deepEqual(transactions.map(({ name }) => name), [ 'Beehive API v1 > /honey > GET', 'Beehive API v2 > /honey > GET', 'Beehive API v2 > /bees > GET', - 'Machines API > Machines > Machines collection > Get Machines', - ]); - }); - }); -}); + 'Machines API > Machines > Machines collection > Get Machines' + ]) + }) + }) +}) diff --git a/test/integration/openapi2-test.js b/test/integration/openapi2-test.js index 88903f75e..00eadfc11 100644 --- a/test/integration/openapi2-test.js +++ b/test/integration/openapi2-test.js @@ -1,9 +1,9 @@ -const R = require('ramda'); -const { assert } = require('chai'); +import R from 'ramda'; +import { assert } from 'chai'; -const logger = require('../../lib/logger'); -const reporterOutputLogger = require('../../lib/reporters/reporterOutputLogger'); -const Dredd = require('../../lib/Dredd'); +import logger from '../../lib/logger'; +import reporterOutputLogger from '../../lib/reporters/reporterOutputLogger'; +import Dredd from '../../lib/Dredd'; const PORT = 9876; @@ -13,12 +13,16 @@ function execCommand(options = {}, cb) { output = ''; let finished = false; const defaultConfig = { - server: `http://127.0.0.1:${PORT}`, + server: `http://127.0.0.1:${PORT}` }; const dreddOptions = R.mergeDeepRight(defaultConfig, options); new Dredd(dreddOptions).run((error) => { + if (error) { + throw error; + } + if (!finished) { finished = true; if (error ? error.message : undefined) { @@ -29,12 +33,10 @@ function execCommand(options = {}, cb) { }); } - function record(transport, level, message) { output += `\n${level}: ${message}`; } - // These tests were separated out from a larger file. They deserve a rewrite, // see https://github.com/apiaryio/dredd/issues/1288 describe('OpenAPI 2', () => { @@ -58,102 +60,147 @@ describe('OpenAPI 2', () => { const reTransaction = /(skip|fail): (\w+) \((\d+)\) \/honey/g; let actual; - before(done => execCommand({ - options: { - path: './test/fixtures/multiple-responses.yaml', - }, - }, - (err) => { - let groups; - const matches = []; - // eslint-disable-next-line - while (groups = reTransaction.exec(output)) { matches.push(groups); } - - actual = matches.map((match) => { - const keyMap = { - 0: 'name', 1: 'action', 2: 'method', 3: 'statusCode', - }; - return match.reduce((result, element, i) => Object.assign(result, { [keyMap[i]]: element }), - {}); - }); - done(err); - })); + before((done) => + execCommand( + { + options: { + path: './test/fixtures/multiple-responses.yaml' + } + }, + (err) => { + let groups; + const matches = []; + // eslint-disable-next-line + while ((groups = reTransaction.exec(output))) { + matches.push(groups); + } + + actual = matches.map((match) => { + const keyMap = { + 0: 'name', + 1: 'action', + 2: 'method', + 3: 'statusCode' + }; + return match.reduce( + (result, element, i) => + Object.assign(result, { [keyMap[i]]: element }), + {} + ); + }); + done(err); + } + ) + ); it('recognizes all 3 transactions', () => assert.equal(actual.length, 3)); - [ { action: 'skip', statusCode: '400' }, { action: 'skip', statusCode: '500' }, - { action: 'fail', statusCode: '200' }, - ].forEach((expected, i) => context(`the transaction #${i + 1}`, () => { - it(`has status code ${expected.statusCode}`, () => assert.equal(expected.statusCode, actual[i].statusCode)); - it(`is ${expected.action === 'skip' ? '' : 'not '}skipped by default`, () => assert.equal(expected.action, actual[i].action)); - })); + { action: 'fail', statusCode: '200' } + ].forEach((expected, i) => + context(`the transaction #${i + 1}`, () => { + it(`has status code ${expected.statusCode}`, () => + assert.equal(expected.statusCode, actual[i].statusCode)); + it(`is ${ + expected.action === 'skip' ? '' : 'not ' + }skipped by default`, () => + assert.equal(expected.action, actual[i].action)); + }) + ); }); describe('when OpenAPI 2 document has multiple responses and hooks unskip some of them', () => { const reTransaction = /(skip|fail): (\w+) \((\d+)\) \/honey/g; let actual; - before(done => execCommand({ - options: { - path: './test/fixtures/multiple-responses.yaml', - hookfiles: './test/fixtures/openapi2-multiple-responses.js', - }, - }, - (err) => { - let groups; - const matches = []; - // eslint-disable-next-line - while (groups = reTransaction.exec(output)) { matches.push(groups); } - actual = matches.map((match) => { - const keyMap = { - 0: 'name', 1: 'action', 2: 'method', 3: 'statusCode', - }; - return match.reduce((result, element, i) => Object.assign(result, { [keyMap[i]]: element }), - {}); - }); - - done(err); - })); + before((done) => { + execCommand( + { + options: { + path: './test/fixtures/multiple-responses.yaml', + hookfiles: './test/fixtures/openapi2-multiple-responses.js' + } + }, + (err) => { + if (err) { + throw err; + } + + let groups; + const matches = []; + // eslint-disable-next-line + while ((groups = reTransaction.exec(output))) { + matches.push(groups); + } + actual = matches.map((match) => { + const keyMap = { + 0: 'name', + 1: 'action', + 2: 'method', + 3: 'statusCode' + }; + return match.reduce( + (result, element, i) => + Object.assign(result, { [keyMap[i]]: element }), + {} + ); + }); + done(err); + } + ); + }); it('recognizes all 3 transactions', () => assert.equal(actual.length, 3)); - [ { action: 'skip', statusCode: '400' }, { action: 'fail', statusCode: '200' }, - { action: 'fail', statusCode: '500' }, // Unskipped in hooks - ].forEach((expected, i) => context(`the transaction #${i + 1}`, () => { - it(`has status code ${expected.statusCode}`, () => assert.equal(expected.statusCode, actual[i].statusCode)); - - const defaultMessage = `is ${expected.action === 'skip' ? '' : 'not '}skipped by default`; - const unskippedMessage = 'is unskipped in hooks'; - it(`${expected.statusCode === '500' ? unskippedMessage : defaultMessage}`, () => assert.equal(expected.action, actual[i].action)); - })); + { action: 'fail', statusCode: '500' } // Unskipped in hooks + ].forEach((expected, i) => + context(`the transaction #${i + 1}`, () => { + it(`has status code ${expected.statusCode}`, () => + assert.equal(expected.statusCode, actual[i].statusCode)); + + const defaultMessage = `is ${ + expected.action === 'skip' ? '' : 'not ' + }skipped by default`; + const unskippedMessage = 'is unskipped in hooks'; + it(`${ + expected.statusCode === '500' ? unskippedMessage : defaultMessage + }`, () => assert.equal(expected.action, actual[i].action)); + }) + ); }); describe('when using OpenAPI 2 document with hooks', () => { const reTransactionName = /hook: (.+)/g; let matches; - before(done => execCommand({ - options: { - path: './test/fixtures/multiple-responses.yaml', - hookfiles: './test/fixtures/openapi2-transaction-names.js', - }, - }, - (err) => { - let groups; - matches = []; - // eslint-disable-next-line - while (groups = reTransactionName.exec(output)) { matches.push(groups[1]); } - done(err); - })); - - it('transaction names contain status code and content type', () => assert.deepEqual(matches, [ - '/honey > GET > 200 > application/json', - '/honey > GET > 400 > application/json', - '/honey > GET > 500 > application/json', - ])); + before((done) => + execCommand( + { + options: { + path: './test/fixtures/multiple-responses.yaml', + hookfiles: './test/fixtures/openapi2-transaction-names.js' + } + }, + (err) => { + let groups; + matches = []; + // eslint-disable-next-line + while ((groups = reTransactionName.exec(output))) { + matches.push(groups[1]); + } + done(err); + } + ) + ); + + it('transaction names contain status code and content type', () => + assert.deepEqual(matches, [ + '/honey > GET > 200 > application/json', + '/honey > GET > 400 > application/json', + '/honey > GET > 500 > application/json' + ])); }); }); diff --git a/test/integration/proxy-test.js b/test/integration/proxy-test.js index eb5b4617b..233924bd9 100644 --- a/test/integration/proxy-test.js +++ b/test/integration/proxy-test.js @@ -1,11 +1,14 @@ -const http = require('http'); -const url = require('url'); -const { assert } = require('chai'); - -const { - runDredd, recordLogging, createServer, DEFAULT_SERVER_PORT, -} = require('./helpers'); -const Dredd = require('../../lib/Dredd'); +import http from 'http'; +import url from 'url'; +import { assert } from 'chai'; + +import { + runDredd, + recordLogging, + createServer, + DEFAULT_SERVER_PORT +} from './helpers'; +import Dredd from '../../lib/Dredd'; const PROXY_PORT = DEFAULT_SERVER_PORT + 1; const PROXY_URL = `http://127.0.0.1:${PROXY_PORT}`; @@ -13,30 +16,35 @@ const SERVER_HOST = `127.0.0.1:${DEFAULT_SERVER_PORT}`; const DUMMY_URL = 'http://example.com'; const REGULAR_HTTP_METHODS = ['GET', 'POST', 'PUT', 'DELETE']; - function unsetAllProxyEnvVars(env) { Object.keys(env) - .filter(envName => envName.toLowerCase().includes('proxy')) - .forEach(envName => delete env[envName]); + .filter((envName) => envName.toLowerCase().includes('proxy')) + .forEach((envName) => delete env[envName]); } // Normally, tests create Dredd instance and pass it to the 'runDredd' // helper, which captures Dredd's logging while it runs. However, in // this case we need to capture logging also during the instantiation. function createAndRunDredd(configuration, done) { - if (!configuration.options) { configuration.options = {}; } + if (!configuration.options) { + configuration.options = {}; + } configuration.options.color = false; configuration.options.loglevel = 'debug'; let dredd; - recordLogging((next) => { - dredd = new Dredd(configuration); - dredd.configuration.http.strictSSL = false; - next(); - }, (err, args, dreddInitLogging) => runDredd(dredd, (error, info) => { - info.logging = `${dreddInitLogging}\n${info.logging}`; - done(error, info); - })); + recordLogging( + (next) => { + dredd = new Dredd(configuration); + dredd.configuration.http.strictSSL = false; + next(); + }, + (err, args, dreddInitLogging) => + runDredd(dredd, (error, info) => { + info.logging = `${dreddInitLogging}\n${info.logging}`; + done(error, info); + }) + ); } // Creates dummy proxy server for given protocol. Records details @@ -125,44 +133,59 @@ function test(scenario) { scenario.configureDredd(configuration); createAndRunDredd(configuration, (err, info) => { - if (err) { return done(err); } + if (err) { + return done(err); + } dreddLogging = info.logging; done(); }); }); // Assertions... - it('logs the proxy settings', () => assert.include(dreddLogging, scenario.expectedLog)); - it('recommends user to read the documentation about using HTTP(S) proxies', () => assert.include(dreddLogging, '#using-https-proxy')); + it('logs the proxy settings', () => + assert.include(dreddLogging, scenario.expectedLog)); + it('recommends user to read the documentation about using HTTP(S) proxies', () => + assert.include(dreddLogging, '#using-https-proxy')); if (scenario.expectedDestination === 'proxy') { - it('does not request the server', () => assert.isFalse(serverRuntimeInfo.requested)); - it('does request the proxy', () => assert.notDeepEqual(proxyRequestInfo, {})); + it('does not request the server', () => + assert.isFalse(serverRuntimeInfo.requested)); + it('does request the proxy', () => + assert.notDeepEqual(proxyRequestInfo, {})); if (scenario.protocol === 'http') { - it('requests the proxy with regular HTTP method', () => assert.oneOf(proxyRequestInfo.method, REGULAR_HTTP_METHODS)); - it('requests the proxy, using the original URL as a path', () => assert.equal(proxyRequestInfo.url, scenario.expectedUrl)); + it('requests the proxy with regular HTTP method', () => + assert.oneOf(proxyRequestInfo.method, REGULAR_HTTP_METHODS)); + it('requests the proxy, using the original URL as a path', () => + assert.equal(proxyRequestInfo.url, scenario.expectedUrl)); return; - } if (scenario.protocol === 'https') { - it('requests the proxy with CONNECT', () => assert.equal(proxyRequestInfo.method, 'CONNECT')); + } + if (scenario.protocol === 'https') { + it('requests the proxy with CONNECT', () => + assert.equal(proxyRequestInfo.method, 'CONNECT')); it('asks the proxy to tunnel SSL connection to the original hostname', () => { - const hostname = `${url.parse(scenario.expectedUrl).hostname}:${DEFAULT_SERVER_PORT}`; + const hostname = `${ + url.parse(scenario.expectedUrl).hostname + }:${DEFAULT_SERVER_PORT}`; assert.equal(proxyRequestInfo.url, hostname); }); return; } throw new Error(`Unsupported protocol: ${scenario.protocol}`); } else if (scenario.expectedDestination === 'server') { - it('does not request the proxy', () => assert.deepEqual(proxyRequestInfo, {})); - it('does request the server', () => assert.isTrue(serverRuntimeInfo.requestedOnce)); - it('requests the server with regular HTTP method', () => assert.oneOf(serverRuntimeInfo.lastRequest.method, REGULAR_HTTP_METHODS)); - it('requests the server with the original path', () => assert.equal(serverRuntimeInfo.lastRequest.url, scenario.expectedUrl)); + it('does not request the proxy', () => + assert.deepEqual(proxyRequestInfo, {})); + it('does request the server', () => + assert.isTrue(serverRuntimeInfo.requestedOnce)); + it('requests the server with regular HTTP method', () => + assert.oneOf(serverRuntimeInfo.lastRequest.method, REGULAR_HTTP_METHODS)); + it('requests the server with the original path', () => + assert.equal(serverRuntimeInfo.lastRequest.url, scenario.expectedUrl)); } else { throw new Error(`Unsupported destination: ${scenario.expectedDestination}`); } } - ['http', 'https'].forEach((protocol) => { const serverUrl = `${protocol}://${SERVER_HOST}`; @@ -178,20 +201,23 @@ ${protocol}_proxy=${PROXY_URL}\ }); after(() => unsetAllProxyEnvVars(process.env)); - describe('Requesting Server Under Test', () => test({ - protocol, - configureDredd(configuration) { - configuration.loglevel = 'debug'; - configuration.server = serverUrl; - configuration.options.path = './test/fixtures/single-get.apib'; - }, - expectedLog, - expectedDestination: 'server', - expectedUrl: '/machines', - })); + describe('Requesting Server Under Test', () => + test({ + protocol, + configureDredd(configuration) { + configuration.loglevel = 'debug'; + configuration.server = serverUrl; + configuration.options.path = './test/fixtures/single-get.apib'; + }, + expectedLog, + expectedDestination: 'server', + expectedUrl: '/machines' + })); describe('Using Apiary Reporter', () => { - before(() => { process.env.APIARY_API_URL = serverUrl; }); + before(() => { + process.env.APIARY_API_URL = serverUrl; + }); after(() => delete process.env.APIARY_API_URL); test({ @@ -203,24 +229,24 @@ ${protocol}_proxy=${PROXY_URL}\ }, expectedLog, expectedDestination: 'proxy', - expectedUrl: `${serverUrl}/apis/public/tests/runs`, + expectedUrl: `${serverUrl}/apis/public/tests/runs` }); }); - describe('Downloading API Description Document', () => test({ - protocol, - configureDredd(configuration) { - configuration.server = DUMMY_URL; - configuration.options.path = `${serverUrl}/example.apib`; - }, - expectedLog, - expectedDestination: 'proxy', - expectedUrl: `${serverUrl}/example.apib`, - })); + describe('Downloading API Description Document', () => + test({ + protocol, + configureDredd(configuration) { + configuration.server = DUMMY_URL; + configuration.options.path = `${serverUrl}/example.apib`; + }, + expectedLog, + expectedDestination: 'proxy', + expectedUrl: `${serverUrl}/example.apib` + })); }); }); - describe('Respecting ‘no_proxy’ Environment Variable', () => { const serverUrl = `http://${SERVER_HOST}`; const expectedLog = `\ @@ -235,20 +261,23 @@ http_proxy=${PROXY_URL}, no_proxy=${SERVER_HOST}\ }); after(() => unsetAllProxyEnvVars(process.env)); - describe('Requesting Server Under Test', () => test({ - protocol: 'http', - configureDredd(configuration) { - configuration.loglevel = 'debug'; - configuration.server = serverUrl; - configuration.options.path = './test/fixtures/single-get.apib'; - }, - expectedLog, - expectedDestination: 'server', - expectedUrl: '/machines', - })); + describe('Requesting Server Under Test', () => + test({ + protocol: 'http', + configureDredd(configuration) { + configuration.loglevel = 'debug'; + configuration.server = serverUrl; + configuration.options.path = './test/fixtures/single-get.apib'; + }, + expectedLog, + expectedDestination: 'server', + expectedUrl: '/machines' + })); describe('Using Apiary Reporter', () => { - before(() => { process.env.APIARY_API_URL = serverUrl; }); + before(() => { + process.env.APIARY_API_URL = serverUrl; + }); after(() => delete process.env.APIARY_API_URL); test({ @@ -260,18 +289,19 @@ http_proxy=${PROXY_URL}, no_proxy=${SERVER_HOST}\ }, expectedLog, expectedDestination: 'server', - expectedUrl: '/apis/public/tests/runs', + expectedUrl: '/apis/public/tests/runs' }); }); - describe('Downloading API Description Document', () => test({ - protocol: 'http', - configureDredd(configuration) { - configuration.server = DUMMY_URL; - configuration.options.path = `${serverUrl}/example.apib`; - }, - expectedLog, - expectedDestination: 'server', - expectedUrl: '/example.apib', - })); + describe('Downloading API Description Document', () => + test({ + protocol: 'http', + configureDredd(configuration) { + configuration.server = DUMMY_URL; + configuration.options.path = `${serverUrl}/example.apib`; + }, + expectedLog, + expectedDestination: 'server', + expectedUrl: '/example.apib' + })); }); diff --git a/test/integration/regressions/regression-152-test.js b/test/integration/regressions/regression-152-test.js index c65e9a5ed..038c2a2df 100644 --- a/test/integration/regressions/regression-152-test.js +++ b/test/integration/regressions/regression-152-test.js @@ -1,30 +1,36 @@ -const { assert } = require('chai'); +import { assert } from 'chai' -const Dredd = require('../../../lib/Dredd'); -const { runDreddWithServer, createServer } = require('../helpers'); +import Dredd from '../../../lib/Dredd' +import { runDreddWithServer, createServer } from '../helpers' -describe('Regression: Issue #152', () => describe('Modify transaction object inside beforeAll combined with beforeEach helper', () => { - let runtimeInfo; +describe('Regression: Issue #152', () => + describe('Modify transaction object inside beforeAll combined with beforeEach helper', () => { + let runtimeInfo - before((done) => { - const app = createServer(); - app.get('/machines', (req, res) => res.json([{ type: 'bulldozer', name: 'willy' }])); + before((done) => { + const app = createServer() + app.get('/machines', (req, res) => + res.json([{ type: 'bulldozer', name: 'willy' }]) + ) - const dredd = new Dredd({ - options: { - path: './test/fixtures/single-get.apib', - require: 'coffeescript/register', - hookfiles: './test/fixtures/regression-152.coffee', - }, - }); + const dredd = new Dredd({ + options: { + path: './test/fixtures/single-get.apib', + require: 'coffeescript/register', + hookfiles: './test/fixtures/regression-152.coffee' + } + }) - runDreddWithServer(dredd, app, (...args) => { - let err; - // eslint-disable-next-line - [err, runtimeInfo] = Array.from(args); - done(err); - }); - }); + runDreddWithServer(dredd, app, (...args) => { + let err + // eslint-disable-next-line + ;[err, runtimeInfo] = Array.from(args) + done(err) + }) + }) - it('should modify the transaction with hooks', () => assert.deepEqual(Object.keys(runtimeInfo.server.requests), ['/machines?api-key=23456'])); -})); + it('should modify the transaction with hooks', () => + assert.deepEqual(Object.keys(runtimeInfo.server.requests), [ + '/machines?api-key=23456' + ])) + })) diff --git a/test/integration/regressions/regression-319-354-test.js b/test/integration/regressions/regression-319-354-test.js index e62ffec91..dad9292db 100644 --- a/test/integration/regressions/regression-319-354-test.js +++ b/test/integration/regressions/regression-319-354-test.js @@ -1,36 +1,36 @@ -const clone = require('clone'); -const { assert } = require('chai'); - -const { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } = require('../helpers'); +import clone from 'clone' +import { assert } from 'chai' +import { runCLIWithServer, createServer, DEFAULT_SERVER_PORT } from '../helpers' // Helper, tries to parse given HTTP body and in case it can be parsed as JSON, // it returns the resulting JS object, otherwise it returns whatever came in. function parseIfJson(body) { - if (!body) { return undefined; } + if (!body) { + return undefined + } try { - return JSON.parse(body); + return JSON.parse(body) } catch (error) { - return body; + return body } } - // This can be removed once https://github.com/apiaryio/dredd/issues/341 is done function parseDreddStdout(stdout) { // Parse individual entries (deals also with multi-line entries) - let entries = []; - let entry; + let entries = [] + let entry for (const line of stdout.split(/\r?\n/)) { - const match = line.match(/^(\w+): (.+)?$/); + const match = line.match(/^(\w+): (.+)?$/) if (match) { if (entry) { - entry.body = entry.body.trim(); - entries.push(entry); + entry.body = entry.body.trim() + entries.push(entry) } - entry = { label: match[1], body: match[2] || '' }; + entry = { label: match[1], body: match[2] || '' } } else { - entry.body += `\n${line.trim()}`; + entry.body += `\n${line.trim()}` } } @@ -40,32 +40,44 @@ function parseDreddStdout(stdout) { // fail: body: At '/name' Invalid type: null (expected string) // body: At '/shoeSize' Invalid type: string (expected number) entries = entries.filter((item, i) => { - const previousEntry = entries[i - 1]; - if ((item.label === 'body') && (previousEntry.label === 'fail')) { - previousEntry.body += `\n${item.body}`; - return false; + const previousEntry = entries[i - 1] + if (item.label === 'body' && previousEntry.label === 'fail') { + previousEntry.body += `\n${item.body}` + return false } - return true; - }); + return true + }) // Re-arrange data from entries const results = { - summary: '', failures: [], bodies: [], schemas: [], - }; + summary: '', + failures: [], + bodies: [], + schemas: [] + } for (entry of entries) { switch (entry.label) { - case 'body': results.bodies.push(parseIfJson(entry.body)); break; - case 'bodySchema': results.schemas.push(parseIfJson(entry.body)); break; - case 'complete': results.summary = entry.body; break; - case 'fail': results.failures.push(entry.body); break; - default: continue; + case 'body': + results.bodies.push(parseIfJson(entry.body)) + break + case 'bodySchema': + results.schemas.push(parseIfJson(entry.body)) + break + case 'complete': + results.summary = entry.body + break + case 'fail': + results.failures.push(entry.body) + break + default: + continue } } - return results; + return results } describe('Regression: Issues #319 and #354', () => { - let results; + let results const brickTypePayload = { id: '', @@ -75,10 +87,10 @@ describe('Regression: Issues #319 and #354', () => { producer: { address: { city: null, - street: '', - }, - }, - }; + street: '' + } + } + } const brickTypeSchema = { $schema: 'http://json-schema.org/draft-04/schema#', @@ -95,19 +107,16 @@ describe('Regression: Issues #319 and #354', () => { type: 'object', properties: { city: { - anyOf: [ - { type: 'null' }, - { type: 'string' }, - ], + anyOf: [{ type: 'null' }, { type: 'string' }] }, - street: { type: 'string' }, - }, - }, - }, - }, + street: { type: 'string' } + } + } + } + } }, - required: ['name'], - }; + required: ['name'] + } const userPayload = { id: '', @@ -115,9 +124,9 @@ describe('Regression: Issues #319 and #354', () => { shoeSize: 42, address: { city: null, - street: '', - }, - }; + street: '' + } + } const userSchema = { $schema: 'http://json-schema.org/draft-04/schema#', @@ -130,35 +139,30 @@ describe('Regression: Issues #319 and #354', () => { type: 'object', properties: { city: { - anyOf: [ - { type: 'null' }, - { type: 'string' }, - ], + anyOf: [{ type: 'null' }, { type: 'string' }] }, - street: { type: 'string' }, - }, - }, - }, - }; + street: { type: 'string' } + } + } + } + } - const userArrayPayload = [ - userPayload, - ]; + const userArrayPayload = [userPayload] const userArraySchema = { $schema: 'http://json-schema.org/draft-04/schema#', - type: 'array', - }; + type: 'array' + } describe('Tested app is consistent with the API description', () => { before((done) => { - const app = createServer(); + const app = createServer() // Attaching endpoint for each testing scenario - app.get('/bricks/XYZ42', (req, res) => res.json(brickTypePayload)); - app.post('/bricks', (req, res) => res.json(brickTypePayload)); - app.get('/customers', (req, res) => res.json(userArrayPayload)); - app.post('/customers', (req, res) => res.json(userPayload)); + app.get('/bricks/XYZ42', (req, res) => res.json(brickTypePayload)) + app.post('/bricks', (req, res) => res.json(brickTypePayload)) + app.get('/customers', (req, res) => res.json(userArrayPayload)) + app.post('/customers', (req, res) => res.json(userPayload)) // Spinning up the Express server, running Dredd, and saving results const args = [ @@ -166,72 +170,92 @@ describe('Regression: Issues #319 and #354', () => { `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--inline-errors', '--details', - '--no-color', - ]; + '--no-color' + ] runCLIWithServer(args, app, (err, info) => { - if (info) { results = parseDreddStdout(info.dredd.stdout); } - done(err); - }); - }); + if (info) { + results = parseDreddStdout(info.dredd.stdout) + } + done(err) + }) + }) it('outputs no failures', () => { // Intentionally not testing just '.length' as this approach will output the difference - assert.deepEqual(results.failures, []); - }); - it('results in exactly four tests', () => assert.include(results.summary, '4 total')); - it('results in four passing tests', () => assert.include(results.summary, '4 passing')); + assert.deepEqual(results.failures, []) + }) + it('results in exactly four tests', () => + assert.include(results.summary, '4 total')) + it('results in four passing tests', () => + assert.include(results.summary, '4 passing')) describe('Attributes defined in resource are referenced from payload [GET /bricks/XYZ42]', () => { - it('has no request body', () => assert.isUndefined(results.bodies[0])); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[1], brickTypePayload)); - it('has correct ‘actual’ response body', () => assert.deepEqual(results.bodies[2], brickTypePayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[0], brickTypeSchema)); - }); + it('has no request body', () => assert.isUndefined(results.bodies[0])) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[1], brickTypePayload)) + it('has correct ‘actual’ response body', () => + assert.deepEqual(results.bodies[2], brickTypePayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[0], brickTypeSchema)) + }) describe('Attributes defined in resource are referenced from action [POST /bricks]', () => { - it('has correct request body', () => assert.deepEqual(results.bodies[3], brickTypePayload)); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[4], brickTypePayload)); - it('has correct ‘actual’ response body', () => assert.deepEqual(results.bodies[5], brickTypePayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[1], brickTypeSchema)); - }); + it('has correct request body', () => + assert.deepEqual(results.bodies[3], brickTypePayload)) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[4], brickTypePayload)) + it('has correct ‘actual’ response body', () => + assert.deepEqual(results.bodies[5], brickTypePayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[1], brickTypeSchema)) + }) describe('Attributes defined as data structure are referenced from payload [GET /customers]', () => { - it('has no request body', () => assert.isUndefined(results.bodies[6])); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[7], userArrayPayload)); - it('has correct ‘actual’ response body', () => assert.deepEqual(results.bodies[8], userArrayPayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[2], userArraySchema)); - }); + it('has no request body', () => assert.isUndefined(results.bodies[6])) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[7], userArrayPayload)) + it('has correct ‘actual’ response body', () => + assert.deepEqual(results.bodies[8], userArrayPayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[2], userArraySchema)) + }) describe('Attributes defined as data structure are referenced from action [POST /customers]', () => { - it('has correct request body', () => assert.deepEqual(results.bodies[9], userPayload)); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[10], userPayload)); - it('has correct ‘actual’ response body', () => assert.deepEqual(results.bodies[11], userPayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[3], userSchema)); - }); - }); + it('has correct request body', () => + assert.deepEqual(results.bodies[9], userPayload)) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[10], userPayload)) + it('has correct ‘actual’ response body', () => + assert.deepEqual(results.bodies[11], userPayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[3], userSchema)) + }) + }) describe('Tested app is inconsistent with the API description', () => { - const incorrectBrickTypePayload = clone(brickTypePayload); - incorrectBrickTypePayload.id = 42; - delete incorrectBrickTypePayload.name; + const incorrectBrickTypePayload = clone(brickTypePayload) + incorrectBrickTypePayload.id = 42 + delete incorrectBrickTypePayload.name - const incorrectUserPayload = clone(userPayload); - incorrectUserPayload.shoeSize = 'XL'; - incorrectUserPayload.name = null; + const incorrectUserPayload = clone(userPayload) + incorrectUserPayload.shoeSize = 'XL' + incorrectUserPayload.name = null const incorrectUserArrayPayload = { page: 1, - items: [incorrectUserPayload], - }; + items: [incorrectUserPayload] + } before((done) => { - const app = createServer(); + const app = createServer() // Attaching endpoint for each testing scenario - app.get('/bricks/XYZ42', (req, res) => res.json(incorrectBrickTypePayload)); - app.post('/bricks', (req, res) => res.json(incorrectBrickTypePayload)); - app.get('/customers', (req, res) => res.json(incorrectUserArrayPayload)); - app.post('/customers', (req, res) => res.json(incorrectUserPayload)); + app.get('/bricks/XYZ42', (req, res) => + res.json(incorrectBrickTypePayload) + ) + app.post('/bricks', (req, res) => res.json(incorrectBrickTypePayload)) + app.get('/customers', (req, res) => res.json(incorrectUserArrayPayload)) + app.post('/customers', (req, res) => res.json(incorrectUserPayload)) // Spinning up the Express server, running Dredd, and saving results const args = [ @@ -239,63 +263,81 @@ describe('Regression: Issues #319 and #354', () => { `http://127.0.0.1:${DEFAULT_SERVER_PORT}`, '--inline-errors', '--details', - '--no-color', - ]; + '--no-color' + ] runCLIWithServer(args, app, (err, info) => { - if (info) { results = parseDreddStdout(info.dredd.stdout); } - done(err); - }); - }); - - it('outputs failures', () => assert.isOk(results.failures.length)); - it('results in exactly four tests', () => assert.include(results.summary, '4 total')); - it('results in four failing tests', () => assert.include(results.summary, '4 failing')); + if (info) { + results = parseDreddStdout(info.dredd.stdout) + } + done(err) + }) + }) + + it('outputs failures', () => assert.isOk(results.failures.length)) + it('results in exactly four tests', () => + assert.include(results.summary, '4 total')) + it('results in four failing tests', () => + assert.include(results.summary, '4 failing')) describe('Attributes defined in resource are referenced from payload [GET /bricks/XYZ42]', () => { it('fails on missing required property and invalid type', () => { - assert.include(results.failures[0], 'GET (200) /bricks/XYZ42'); - assert.include(results.failures[1], 'Missing required property: name'); - assert.include(results.failures[1], 'Invalid type: number'); - }); - it('has no request body', () => assert.isUndefined(results.bodies[0])); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[1], brickTypePayload)); - it('has incorrect ‘actual’ response body', () => assert.deepEqual(results.bodies[2], incorrectBrickTypePayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[0], brickTypeSchema)); - }); + assert.include(results.failures[0], 'GET (200) /bricks/XYZ42') + assert.include(results.failures[1], 'Missing required property: name') + assert.include(results.failures[1], 'Invalid type: number') + }) + it('has no request body', () => assert.isUndefined(results.bodies[0])) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[1], brickTypePayload)) + it('has incorrect ‘actual’ response body', () => + assert.deepEqual(results.bodies[2], incorrectBrickTypePayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[0], brickTypeSchema)) + }) describe('Attributes defined in resource are referenced from action [POST /bricks]', () => { it('fails on missing required property and invalid type', () => { - assert.include(results.failures[2], 'POST (200) /bricks'); - assert.include(results.failures[3], 'Missing required property: name'); - assert.include(results.failures[3], 'Invalid type: number'); - }); - it('has correct request body', () => assert.deepEqual(results.bodies[3], brickTypePayload)); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[4], brickTypePayload)); - it('has incorrect ‘actual’ response body', () => assert.deepEqual(results.bodies[5], incorrectBrickTypePayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[1], brickTypeSchema)); - }); + assert.include(results.failures[2], 'POST (200) /bricks') + assert.include(results.failures[3], 'Missing required property: name') + assert.include(results.failures[3], 'Invalid type: number') + }) + it('has correct request body', () => + assert.deepEqual(results.bodies[3], brickTypePayload)) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[4], brickTypePayload)) + it('has incorrect ‘actual’ response body', () => + assert.deepEqual(results.bodies[5], incorrectBrickTypePayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[1], brickTypeSchema)) + }) describe('Attributes defined as data structure are referenced from payload [GET /customers]', () => { it('fails on invalid type', () => { - assert.include(results.failures[4], 'GET (200) /customers'); - assert.include(results.failures[5], 'Invalid type: object'); - }); - it('has no request body', () => assert.isUndefined(results.bodies[6])); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[7], userArrayPayload)); - it('has incorrect ‘actual’ response body', () => assert.deepEqual(results.bodies[8], incorrectUserArrayPayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[2], userArraySchema)); - }); + assert.include(results.failures[4], 'GET (200) /customers') + assert.include(results.failures[5], 'Invalid type: object') + }) + it('has no request body', () => assert.isUndefined(results.bodies[6])) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[7], userArrayPayload)) + it('has incorrect ‘actual’ response body', () => + assert.deepEqual(results.bodies[8], incorrectUserArrayPayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[2], userArraySchema)) + }) describe('Attributes defined as data structure are referenced from action [POST /customers]', () => { it('fails on invalid types', () => { - assert.include(results.failures[6], 'POST (200) /customers'); - assert.include(results.failures[7], 'Invalid type: null'); - assert.include(results.failures[7], 'Invalid type: string'); - }); - it('has correct request body', () => assert.deepEqual(results.bodies[9], userPayload)); - it('has correct ‘expected’ response body', () => assert.deepEqual(results.bodies[10], userPayload)); - it('has incorrect ‘actual’ response body', () => assert.deepEqual(results.bodies[11], incorrectUserPayload)); - it('has correct schema', () => assert.deepEqual(results.schemas[3], userSchema)); - }); - }); -}); + assert.include(results.failures[6], 'POST (200) /customers') + assert.include(results.failures[7], 'Invalid type: null') + assert.include(results.failures[7], 'Invalid type: string') + }) + it('has correct request body', () => + assert.deepEqual(results.bodies[9], userPayload)) + it('has correct ‘expected’ response body', () => + assert.deepEqual(results.bodies[10], userPayload)) + it('has incorrect ‘actual’ response body', () => + assert.deepEqual(results.bodies[11], incorrectUserPayload)) + it('has correct schema', () => + assert.deepEqual(results.schemas[3], userSchema)) + }) + }) +}) diff --git a/test/integration/regressions/regression-615-test.js b/test/integration/regressions/regression-615-test.js index 37d27b5b3..37161703d 100644 --- a/test/integration/regressions/regression-615-test.js +++ b/test/integration/regressions/regression-615-test.js @@ -1,28 +1,37 @@ -const { assert } = require('chai'); +import { assert } from 'chai' -const Dredd = require('../../../lib/Dredd'); -const { runDreddWithServer, createServer } = require('../helpers'); +import Dredd from '../../../lib/Dredd' +import { runDreddWithServer, createServer } from '../helpers' describe('Regression: Issue #615', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.all('/honey', (req, res) => res.status(200).type('text/plain').send('')); + const app = createServer() + app.all('/honey', (req, res) => + res + .status(200) + .type('text/plain') + .send('') + ) - const dredd = new Dredd({ options: { path: './test/fixtures/regression-615.apib' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/regression-615.apib' } + }) runDreddWithServer(dredd, app, (...args) => { - let err; + let err // eslint-disable-next-line - [err, runtimeInfo] = Array.from(args); - done(err); - }); - }); + ;[err, runtimeInfo] = Array.from(args) + done(err) + }) + }) - it('outputs no failures', () => assert.equal(runtimeInfo.dredd.stats.failures, 0)); - it('results in exactly three tests', () => assert.equal(runtimeInfo.dredd.stats.tests, 3)); + it('outputs no failures', () => + assert.equal(runtimeInfo.dredd.stats.failures, 0)) + it('results in exactly three tests', () => + assert.equal(runtimeInfo.dredd.stats.tests, 3)) it('results in three passing tests', () => { // Ensures just the 200 responses were selected, because the server returns only 200s - assert.equal(runtimeInfo.dredd.stats.passes, 3); - }); -}); + assert.equal(runtimeInfo.dredd.stats.passes, 3) + }) +}) diff --git a/test/integration/regressions/regression-893-897-test.js b/test/integration/regressions/regression-893-897-test.js index 0c9a0d00d..986c1dd95 100644 --- a/test/integration/regressions/regression-893-897-test.js +++ b/test/integration/regressions/regression-893-897-test.js @@ -1,71 +1,105 @@ -const { assert } = require('chai'); +import { assert } from 'chai' -const Dredd = require('../../../lib/Dredd'); -const { runDreddWithServer, createServer } = require('../helpers'); +import Dredd from '../../../lib/Dredd' +import { runDreddWithServer, createServer } from '../helpers' describe('Regression: Issue #893 and #897', () => { describe('when the response has no explicit status code', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/resource', (req, res) => res.json({ name: 'Honza', color: 'green' })); + const app = createServer() + app.get('/resource', (req, res) => + res.json({ name: 'Honza', color: 'green' }) + ) - const dredd = new Dredd({ options: { path: './test/fixtures/regression-893.yaml' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/regression-893.yaml' } + }) runDreddWithServer(dredd, app, (...args) => { - let err; + let err // eslint-disable-next-line - [err, runtimeInfo] = Array.from(args); - done(err); - }); - }); + ;[err, runtimeInfo] = Array.from(args) + done(err) + }) + }) - it('outputs no failures or errors', () => assert.equal(runtimeInfo.dredd.stats.failures + runtimeInfo.dredd.stats.errors, 0)); - it('results in exactly one test', () => assert.equal(runtimeInfo.dredd.stats.tests, 1)); - it('results in one passing test (HTTP 200 is assumed)', () => assert.equal(runtimeInfo.dredd.stats.passes, 1)); - }); + it('outputs no failures or errors', () => + assert.equal( + runtimeInfo.dredd.stats.failures + runtimeInfo.dredd.stats.errors, + 0 + )) + it('results in exactly one test', () => + assert.equal(runtimeInfo.dredd.stats.tests, 1)) + it('results in one passing test (HTTP 200 is assumed)', () => + assert.equal(runtimeInfo.dredd.stats.passes, 1)) + }) describe('when the response has no explicit schema and it has empty body', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/resource', (req, res) => res.json({ name: 'Honza', color: 'green' })); - app.get('/resource.csv', (req, res) => res.type('text/csv').send('name,color\nHonza,green\n')); + const app = createServer() + app.get('/resource', (req, res) => + res.json({ name: 'Honza', color: 'green' }) + ) + app.get('/resource.csv', (req, res) => + res.type('text/csv').send('name,color\nHonza,green\n') + ) - const dredd = new Dredd({ options: { path: './test/fixtures/regression-897-body.yaml' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/regression-897-body.yaml' } + }) runDreddWithServer(dredd, app, (...args) => { - let err; + let err // eslint-disable-next-line - [err, runtimeInfo] = Array.from(args); - done(err); - }); - }); + ;[err, runtimeInfo] = Array.from(args) + done(err) + }) + }) - it('outputs no failures or errors', () => assert.equal(runtimeInfo.dredd.stats.failures + runtimeInfo.dredd.stats.errors, 0)); - it('results in exactly two tests', () => assert.equal(runtimeInfo.dredd.stats.tests, 2)); - it('results in two passing tests (body is not validated)', () => assert.equal(runtimeInfo.dredd.stats.passes, 2)); - }); + it('outputs no failures or errors', () => + assert.equal( + runtimeInfo.dredd.stats.failures + runtimeInfo.dredd.stats.errors, + 0 + )) + it('results in exactly two tests', () => + assert.equal(runtimeInfo.dredd.stats.tests, 2)) + it('results in two passing tests (body is not validated)', () => + assert.equal(runtimeInfo.dredd.stats.passes, 2)) + }) describe('when the response has no explicit schema', () => { - let runtimeInfo; + let runtimeInfo before((done) => { - const app = createServer(); - app.get('/resource', (req, res) => res.json({ name: 'Honza', color: 'green' })); - app.get('/resource.csv', (req, res) => res.type('text/csv').send('name,color\nHonza,green\n')); + const app = createServer() + app.get('/resource', (req, res) => + res.json({ name: 'Honza', color: 'green' }) + ) + app.get('/resource.csv', (req, res) => + res.type('text/csv').send('name,color\nHonza,green\n') + ) - const dredd = new Dredd({ options: { path: './test/fixtures/regression-897-schema.yaml' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/regression-897-schema.yaml' } + }) runDreddWithServer(dredd, app, (...args) => { - let err; + let err // eslint-disable-next-line - [err, runtimeInfo] = Array.from(args); - done(err); - }); - }); + ;[err, runtimeInfo] = Array.from(args) + done(err) + }) + }) - it('outputs no failures or errors', () => assert.equal(runtimeInfo.dredd.stats.failures + runtimeInfo.dredd.stats.errors, 0)); - it('results in exactly two tests', () => assert.equal(runtimeInfo.dredd.stats.tests, 2)); - it('results in two passing tests', () => assert.equal(runtimeInfo.dredd.stats.passes, 2)); - }); -}); + it('outputs no failures or errors', () => + assert.equal( + runtimeInfo.dredd.stats.failures + runtimeInfo.dredd.stats.errors, + 0 + )) + it('results in exactly two tests', () => + assert.equal(runtimeInfo.dredd.stats.tests, 2)) + it('results in two passing tests', () => + assert.equal(runtimeInfo.dredd.stats.passes, 2)) + }) +}) diff --git a/test/integration/request-test.js b/test/integration/request-test.js index 8561488bf..07d8f85f5 100644 --- a/test/integration/request-test.js +++ b/test/integration/request-test.js @@ -1,56 +1,74 @@ -const bodyParser = require('body-parser'); -const { assert } = require('chai'); -const fs = require('fs'); -const path = require('path'); +import * as bodyParser from 'body-parser' +import { assert } from 'chai' +import fs from 'fs' +import * as path from 'path' -const { runDreddWithServer, createServer } = require('./helpers'); -const Dredd = require('../../lib/Dredd'); +import { runDreddWithServer, createServer } from './helpers' +import Dredd from '../../lib/Dredd' -describe('Sending \'application/json\' request', () => { - let runtimeInfo; - const contentType = 'application/json'; +describe("Sending 'application/json' request", () => { + let runtimeInfo + const contentType = 'application/json' before((done) => { - const app = createServer({ bodyParser: bodyParser.text({ type: contentType }) }); - app.post('/data', (req, res) => res.json({ test: 'OK' })); + const app = createServer({ + bodyParser: bodyParser.text({ type: contentType }) + }) + app.post('/data', (req, res) => res.json({ test: 'OK' })) - const dredd = new Dredd({ options: { path: './test/fixtures/request/application-json.apib' } }); + const dredd = new Dredd({ + options: { path: './test/fixtures/request/application-json.apib' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('results in one request being delivered to the server', () => assert.isTrue(runtimeInfo.server.requestedOnce)); - it('the request has the expected Content-Type', () => assert.equal(runtimeInfo.server.lastRequest.headers['content-type'], contentType)); + runtimeInfo = info + done(err) + }) + }) + + it('results in one request being delivered to the server', () => + assert.isTrue(runtimeInfo.server.requestedOnce)) + it('the request has the expected Content-Type', () => + assert.equal( + runtimeInfo.server.lastRequest.headers['content-type'], + contentType + )) it('the request has the expected format', () => { - const { body } = runtimeInfo.server.lastRequest; - assert.deepEqual(JSON.parse(body), { test: 42 }); - }); + const { body } = runtimeInfo.server.lastRequest + assert.deepEqual(JSON.parse(body), { test: 42 }) + }) it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -}); + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) +}) describe("Sending 'multipart/form-data' request described in API Blueprint", () => { - let runtimeInfo; - const contentType = 'multipart/form-data'; + let runtimeInfo + const contentType = 'multipart/form-data' before((done) => { - const app = createServer({ bodyParser: bodyParser.text({ type: contentType }) }); - app.post('/data', (req, res) => res.json({ test: 'OK' })); - const dredd = new Dredd({ options: { path: './test/fixtures/request/multipart-form-data.apib' } }); + const app = createServer({ + bodyParser: bodyParser.text({ type: contentType }) + }) + app.post('/data', (req, res) => res.json({ test: 'OK' })) + const dredd = new Dredd({ + options: { path: './test/fixtures/request/multipart-form-data.apib' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('results in one request being delivered to the server', () => assert.isTrue(runtimeInfo.server.requestedOnce)); - it('the request has the expected Content-Type', () => assert.include(runtimeInfo.server.lastRequest.headers['content-type'], 'multipart/form-data')); + runtimeInfo = info + done(err) + }) + }) + + it('results in one request being delivered to the server', () => + assert.isTrue(runtimeInfo.server.requestedOnce)) + it('the request has the expected Content-Type', () => + assert.include( + runtimeInfo.server.lastRequest.headers['content-type'], + 'multipart/form-data' + )) it('the request has the expected format', () => { const lines = [ '--CUSTOM-BOUNDARY', @@ -65,33 +83,42 @@ describe("Sending 'multipart/form-data' request described in API Blueprint", () '{"test": 42}', '', '--CUSTOM-BOUNDARY--', - '', - ]; - assert.equal(runtimeInfo.server.lastRequest.body, lines.join('\r\n')); - }); + '' + ] + assert.equal(runtimeInfo.server.lastRequest.body, lines.join('\r\n')) + }) it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -}); + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) +}) describe("Sending 'multipart/form-data' request described in OpenAPI 2", () => { - let runtimeInfo; - const contentType = 'multipart/form-data'; + let runtimeInfo + const contentType = 'multipart/form-data' before((done) => { - const app = createServer({ bodyParser: bodyParser.text({ type: contentType }) }); - app.post('/data', (req, res) => res.json({ test: 'OK' })); - const dredd = new Dredd({ options: { path: './test/fixtures/request/multipart-form-data.yaml' } }); + const app = createServer({ + bodyParser: bodyParser.text({ type: contentType }) + }) + app.post('/data', (req, res) => res.json({ test: 'OK' })) + const dredd = new Dredd({ + options: { path: './test/fixtures/request/multipart-form-data.yaml' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('results in one request being delivered to the server', () => assert.isTrue(runtimeInfo.server.requestedOnce)); - it('the request has the expected Content-Type', () => assert.include(runtimeInfo.server.lastRequest.headers['content-type'], 'multipart/form-data')); + runtimeInfo = info + done(err) + }) + }) + + it('results in one request being delivered to the server', () => + assert.isTrue(runtimeInfo.server.requestedOnce)) + it('the request has the expected Content-Type', () => + assert.include( + runtimeInfo.server.lastRequest.headers['content-type'], + 'multipart/form-data' + )) it('the request has the expected format', () => { const lines = [ '--CUSTOM-BOUNDARY', @@ -104,33 +131,42 @@ describe("Sending 'multipart/form-data' request described in OpenAPI 2", () => { '{"test": 42}', '', '--CUSTOM-BOUNDARY--', - '', - ]; - assert.equal(runtimeInfo.server.lastRequest.body, lines.join('\r\n')); - }); + '' + ] + assert.equal(runtimeInfo.server.lastRequest.body, lines.join('\r\n')) + }) it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -}); + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) +}) describe("Sending 'multipart/form-data' request described as 'file' in OpenAPI 2", () => { - let runtimeInfo; - const contentType = 'multipart/form-data'; + let runtimeInfo + const contentType = 'multipart/form-data' before((done) => { - const app = createServer({ bodyParser: bodyParser.text({ type: contentType }) }); - app.post('/data', (req, res) => res.json({ test: 'OK' })); - const dredd = new Dredd({ options: { path: './test/fixtures/request/multipart-form-data-file.yaml' } }); + const app = createServer({ + bodyParser: bodyParser.text({ type: contentType }) + }) + app.post('/data', (req, res) => res.json({ test: 'OK' })) + const dredd = new Dredd({ + options: { path: './test/fixtures/request/multipart-form-data-file.yaml' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('results in one request being delivered to the server', () => assert.isTrue(runtimeInfo.server.requestedOnce)); - it('the request has the expected Content-Type', () => assert.include(runtimeInfo.server.lastRequest.headers['content-type'], 'multipart/form-data')); + runtimeInfo = info + done(err) + }) + }) + + it('results in one request being delivered to the server', () => + assert.isTrue(runtimeInfo.server.requestedOnce)) + it('the request has the expected Content-Type', () => + assert.include( + runtimeInfo.server.lastRequest.headers['content-type'], + 'multipart/form-data' + )) it('the request has the expected format', () => { const lines = [ '--BOUNDARY', @@ -143,165 +179,197 @@ describe("Sending 'multipart/form-data' request described as 'file' in OpenAPI 2 '{"test": 42}', '', '--BOUNDARY--', - '', - ]; - assert.equal(runtimeInfo.server.lastRequest.body, lines.join('\r\n')); - }); + '' + ] + assert.equal(runtimeInfo.server.lastRequest.body, lines.join('\r\n')) + }) it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -}); - -[{ - name: 'API Blueprint', - path: './test/fixtures/request/application-x-www-form-urlencoded.apib', -}, -{ - name: 'OpenAPI 2', - path: './test/fixtures/request/application-x-www-form-urlencoded.yaml', -}, + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) +}) +;[ + { + name: 'API Blueprint', + path: './test/fixtures/request/application-x-www-form-urlencoded.apib' + }, + { + name: 'OpenAPI 2', + path: './test/fixtures/request/application-x-www-form-urlencoded.yaml' + } ].forEach((apiDescription) => { describe(`Sending 'application/x-www-form-urlencoded' request described in ${apiDescription.name}`, () => { - let runtimeInfo; - const contentType = 'application/x-www-form-urlencoded'; + let runtimeInfo + const contentType = 'application/x-www-form-urlencoded' before((done) => { - const app = createServer({ bodyParser: bodyParser.text({ type: contentType }) }); - app.post('/data', (req, res) => res.json({ test: 'OK' })); - const dredd = new Dredd({ options: { path: apiDescription.path } }); + const app = createServer({ + bodyParser: bodyParser.text({ type: contentType }) + }) + app.post('/data', (req, res) => res.json({ test: 'OK' })) + const dredd = new Dredd({ options: { path: apiDescription.path } }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('results in one request being delivered to the server', () => { - assert.isTrue(runtimeInfo.server.requestedOnce); - }); + assert.isTrue(runtimeInfo.server.requestedOnce) + }) it('the request has the expected Content-Type', () => { - assert.equal(runtimeInfo.server.lastRequest.headers['content-type'], contentType); - }); + assert.equal( + runtimeInfo.server.lastRequest.headers['content-type'], + contentType + ) + }) it('the request has the expected format', () => { // API Blueprint adds extra \n at the end: https://github.com/apiaryio/dredd/issues/67 - assert.equal(runtimeInfo.server.lastRequest.body.trim(), 'test=42'); - }); + assert.equal(runtimeInfo.server.lastRequest.body.trim(), 'test=42') + }) it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); - }); -}); + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) + }) +}) -describe('Sending \'text/plain\' request', () => { - let runtimeInfo; - const contentType = 'text/plain'; +describe("Sending 'text/plain' request", () => { + let runtimeInfo + const contentType = 'text/plain' before((done) => { - const app = createServer({ bodyParser: bodyParser.text({ type: contentType }) }); - app.post('/data', (req, res) => res.json({ test: 'OK' })); - const dredd = new Dredd({ options: { path: './test/fixtures/request/text-plain.apib' } }); + const app = createServer({ + bodyParser: bodyParser.text({ type: contentType }) + }) + app.post('/data', (req, res) => res.json({ test: 'OK' })) + const dredd = new Dredd({ + options: { path: './test/fixtures/request/text-plain.apib' } + }) runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + runtimeInfo = info + done(err) + }) + }) it('results in one request being delivered to the server', () => { - assert.isTrue(runtimeInfo.server.requestedOnce); - }); + assert.isTrue(runtimeInfo.server.requestedOnce) + }) it('the request has the expected Content-Type', () => { - assert.equal(runtimeInfo.server.lastRequest.headers['content-type'], contentType); - }); + assert.equal( + runtimeInfo.server.lastRequest.headers['content-type'], + contentType + ) + }) it('the request has the expected format', () => { - assert.equal(runtimeInfo.server.lastRequest.body, 'test equals to 42\n'); - }); + assert.equal(runtimeInfo.server.lastRequest.body, 'test equals to 42\n') + }) it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -}); - -[ + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) +}) +;[ { name: 'API Blueprint', - path: './test/fixtures/request/application-octet-stream.apib', + path: './test/fixtures/request/application-octet-stream.apib' }, { name: 'OpenAPI 2', - path: './test/fixtures/request/application-octet-stream.yaml', - }, -].forEach(apiDescription => describe(`Sending 'application/octet-stream' request described in ${apiDescription.name}`, () => { - let runtimeInfo; - const contentType = 'application/octet-stream'; + path: './test/fixtures/request/application-octet-stream.yaml' + } +].forEach((apiDescription) => + describe(`Sending 'application/octet-stream' request described in ${apiDescription.name}`, () => { + let runtimeInfo + const contentType = 'application/octet-stream' - before((done) => { - const app = createServer({ bodyParser: bodyParser.raw({ type: contentType }) }); - app.post('/binary', (req, res) => res.json({ test: 'OK' })); - - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/request/application-octet-stream-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('results in one request being delivered to the server', () => assert.isTrue(runtimeInfo.server.requestedOnce)); - it('the request has the expected Content-Type', () => assert.equal(runtimeInfo.server.lastRequest.headers['content-type'], contentType)); - it('the request has the expected format', () => assert.equal( - runtimeInfo.server.lastRequest.body.toString('base64'), - Buffer.from([0xFF, 0xEF, 0xBF, 0xBE]).toString('base64') - )); - it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -})); - -[ + before((done) => { + const app = createServer({ + bodyParser: bodyParser.raw({ type: contentType }) + }) + app.post('/binary', (req, res) => res.json({ test: 'OK' })) + + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/request/application-octet-stream-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('results in one request being delivered to the server', () => + assert.isTrue(runtimeInfo.server.requestedOnce)) + it('the request has the expected Content-Type', () => + assert.equal( + runtimeInfo.server.lastRequest.headers['content-type'], + contentType + )) + it('the request has the expected format', () => + assert.equal( + runtimeInfo.server.lastRequest.body.toString('base64'), + Buffer.from([0xff, 0xef, 0xbf, 0xbe]).toString('base64') + )) + it('results in one passing test', () => { + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) + }) +) +;[ { name: 'API Blueprint', - path: './test/fixtures/request/image-png.apib', + path: './test/fixtures/request/image-png.apib' }, { name: 'OpenAPI 2', - path: './test/fixtures/request/image-png.yaml', - }, -].forEach(apiDescription => describe(`Sending 'image/png' request described in ${apiDescription.name}`, () => { - let runtimeInfo; - const contentType = 'image/png'; - - before((done) => { - const app = createServer({ bodyParser: bodyParser.raw({ type: contentType }) }); - app.put('/image.png', (req, res) => res.json({ test: 'OK' })); - - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/request/image-png-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); + path: './test/fixtures/request/image-png.yaml' + } +].forEach((apiDescription) => + describe(`Sending 'image/png' request described in ${apiDescription.name}`, () => { + let runtimeInfo + const contentType = 'image/png' - it('results in one request being delivered to the server', () => assert.isTrue(runtimeInfo.server.requestedOnce)); - it('the request has the expected Content-Type', () => assert.equal(runtimeInfo.server.lastRequest.headers['content-type'], contentType)); - it('the request has the expected format', () => assert.equal( - runtimeInfo.server.lastRequest.body.toString('base64'), - fs.readFileSync(path.join(__dirname, '../fixtures/image.png')).toString('base64') - )); - it('results in one passing test', () => { - assert.equal(runtimeInfo.dredd.stats.tests, 1); - assert.equal(runtimeInfo.dredd.stats.passes, 1); - }); -})); + before((done) => { + const app = createServer({ + bodyParser: bodyParser.raw({ type: contentType }) + }) + app.put('/image.png', (req, res) => res.json({ test: 'OK' })) + + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/request/image-png-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('results in one request being delivered to the server', () => + assert.isTrue(runtimeInfo.server.requestedOnce)) + it('the request has the expected Content-Type', () => + assert.equal( + runtimeInfo.server.lastRequest.headers['content-type'], + contentType + )) + it('the request has the expected format', () => + assert.equal( + runtimeInfo.server.lastRequest.body.toString('base64'), + fs + .readFileSync(path.join(__dirname, '../fixtures/image.png')) + .toString('base64') + )) + it('results in one passing test', () => { + assert.equal(runtimeInfo.dredd.stats.tests, 1) + assert.equal(runtimeInfo.dredd.stats.passes, 1) + }) + }) +) diff --git a/test/integration/require-test.js b/test/integration/require-test.js index 152a53808..01ae0244f 100644 --- a/test/integration/require-test.js +++ b/test/integration/require-test.js @@ -1,85 +1,87 @@ -const { assert } = require('chai'); - -const Dredd = require('../../lib/Dredd'); -const { runDredd } = require('./helpers'); +import { assert } from 'chai' +import Dredd from '../../lib/Dredd' +import { runDredd } from './helpers' describe('Requiring user-provided modules (e.g. language compilers)', () => { describe('when provided with a local module', () => { - let dreddRuntimeInfo; + let dreddRuntimeInfo before((done) => { - delete global.__requiredModule; + delete global.__requiredModule const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib', - require: './test/fixtures/requiredModule', - }, - }); + require: './test/fixtures/requiredModule' + } + }) runDredd(dredd, (err, info) => { - dreddRuntimeInfo = info; - done(err); - }); - }); + dreddRuntimeInfo = info + done(err) + }) + }) it('passes no error to the callback', () => { - assert.isNotOk(dreddRuntimeInfo.err); - }); + assert.isNotOk(dreddRuntimeInfo.err) + }) it('requires the module', () => { - assert.isTrue(global.__requiredModule); - }); - }); + assert.isTrue(global.__requiredModule) + }) + }) describe('when provided with an installed module', () => { - let dreddRuntimeInfo; + let dreddRuntimeInfo before((done) => { const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib', hookfiles: './test/fixtures/hooks-log.coffee', - require: 'coffeescript/register', - }, - }); + require: 'coffeescript/register' + } + }) runDredd(dredd, (err, info) => { - dreddRuntimeInfo = info; - done(err); - }); - }); + dreddRuntimeInfo = info + done(err) + }) + }) it('passes no error to the callback', () => { - assert.isNotOk(dreddRuntimeInfo.err); - }); + assert.isNotOk(dreddRuntimeInfo.err) + }) it('requires the module', () => { - assert.include(dreddRuntimeInfo.logging, 'using hooks.log to debug'); - }); - }); + assert.include(dreddRuntimeInfo.logging, 'using hooks.log to debug') + }) + }) describe('when provided with a non-existing module', () => { - let dreddRuntimeInfo; + let dreddRuntimeInfo before((done) => { const dredd = new Dredd({ options: { path: './test/fixtures/single-get.apib', - require: 'no-such-module', - }, - }); + require: 'no-such-module' + } + }) runDredd(dredd, (err, info) => { - dreddRuntimeInfo = info; - done(err); - }); - }); + dreddRuntimeInfo = info + done(err) + }) + }) it('passes error to the callback', () => { - assert.instanceOf(dreddRuntimeInfo.err, Error); - }); + assert.instanceOf(dreddRuntimeInfo.err, Error) + }) it('the error is a native MODULE_NOT_FOUND error', () => { - assert.equal(dreddRuntimeInfo.err.code, 'MODULE_NOT_FOUND'); - }); + assert.equal(dreddRuntimeInfo.err.code, 'MODULE_NOT_FOUND') + }) it('the error message is descriptive', () => { - assert.include(dreddRuntimeInfo.err.message, 'Cannot find module \'no-such-module\''); - }); - }); -}); + assert.include( + dreddRuntimeInfo.err.message, + "Cannot find module 'no-such-module'" + ) + }) + }) +}) diff --git a/test/integration/response-test.js b/test/integration/response-test.js index e47fd6f4e..437ec23e1 100644 --- a/test/integration/response-test.js +++ b/test/integration/response-test.js @@ -1,324 +1,391 @@ -const { assert } = require('chai'); -const path = require('path'); - -const { runDreddWithServer, createServer } = require('./helpers'); -const Dredd = require('../../lib/Dredd'); - -[{ - name: 'API Blueprint', - path: './test/fixtures/response/empty-body-empty-schema.apib', -}, -{ - name: 'OpenAPI 2', - path: './test/fixtures/response/empty-body-empty-schema.yaml', -}, -].forEach(apiDescription => describe(`Specifying neither response body nor schema in the ${apiDescription.name}`, () => { - describe('when the server returns non-empty responses', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource.json', (req, res) => res.json({ test: 'OK' })); - app.get('/resource.csv', (req, res) => res.type('text/csv').send('test,OK\n')); - const dredd = new Dredd({ options: { path: apiDescription.path } }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the responses as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })); - }); - - describe('when the server returns empty responses', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource.json', (req, res) => res.type('json').send()); - app.get('/resource.csv', (req, res) => res.type('text/csv').send()); - const dredd = new Dredd({ options: { path: apiDescription.path } }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the responses as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })); - }); -})); - -[{ - name: 'API Blueprint', - path: './test/fixtures/response/empty-body.apib', -}, -{ - name: 'OpenAPI 2', - path: './test/fixtures/response/empty-body.yaml', -}, -].forEach(apiDescription => describe(`Specifying no response body in the ${apiDescription.name}, but specifying a schema`, () => { - describe('when the server returns a response not valid according to the schema', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource', (req, res) => res.json({ name: 123 })); - const dredd = new Dredd({ options: { path: apiDescription.path } }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the response as invalid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, failures: 1 })); - it('prints JSON Schema validation error', () => assert.include(runtimeInfo.dredd.logging, 'At \'/name\' Invalid type: number (expected string)')); - }); - - describe('when the server returns a response valid according to the schema', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource', (req, res) => res.json({ name: 'test' })); - const dredd = new Dredd({ options: { path: apiDescription.path } }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the response as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, passes: 1 })); - }); -})); - -[{ - name: 'API Blueprint', - path: './test/fixtures/response/empty-body-empty-schema.apib', -}, -{ - name: 'OpenAPI 2', - path: './test/fixtures/response/empty-body-empty-schema.yaml', -}, -].forEach(apiDescription => describe(`Specifying no response body in the ${apiDescription.name} and having hooks ensuring empty response`, () => { - describe('when the server returns a non-empty responses', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource.json', (req, res) => res.json({ test: 'OK' })); - app.get('/resource.csv', (req, res) => res.type('text/csv').send('test,OK\n')); - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/response/empty-body-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the responses as invalid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, failures: 2 })); - it('prints the error message from hooks', () => assert.include(runtimeInfo.dredd.logging, 'The response body must be empty')); - }); - - describe('when the server returns an empty responses', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource.json', (req, res) => res.send()); - app.get('/resource.csv', (req, res) => res.type('text/csv').send()); - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/response/empty-body-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the responses as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })); - }); -})); - -[{ - name: 'API Blueprint', - path: './test/fixtures/response/empty-body-empty-schema.apib', -}, -{ - name: 'OpenAPI 2', - path: './test/fixtures/response/empty-body-empty-schema.yaml', -}, -].forEach(apiDescription => describe(`Specifying no response body in the ${apiDescription.name} and having hooks ensuring empty response`, () => { - describe('when the server returns non-empty responses', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource.json', (req, res) => res.json({ test: 'OK' })); - app.get('/resource.csv', (req, res) => res.type('text/csv').send('test,OK\n')); - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/response/empty-body-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the responses as invalid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, failures: 2 })); - it('prints the error message from hooks', () => assert.include(runtimeInfo.dredd.logging, 'The response body must be empty')); - }); - - describe('when the server returns empty responses', () => { - let runtimeInfo; - - before((done) => { - const app = createServer(); - app.get('/resource.json', (req, res) => res.send()); - app.get('/resource.csv', (req, res) => res.type('text/csv').send()); - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/response/empty-body-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the responses as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })); - }); -})); - -[{ - name: 'API Blueprint', - path: './test/fixtures/response/204-205-body.apib', -}, -{ - name: 'OpenAPI 2', - path: './test/fixtures/response/204-205-body.yaml', -}, -].forEach(apiDescription => describe(`Working with HTTP 204 and 205 responses in the ${apiDescription.name}`, () => { - describe('when the actual response is non-empty', () => { - let runtimeInfo; - - before((done) => { - // It's not trivial to create an actual server sending HTTP 204 or 205 - // with non-empty body, because it's against specs. That's why we're - // returning HTTP 200 here and in the assertions we're making sure - // the failures are there only because of non-matching status codes. - const app = createServer(); - app.get('*', (req, res) => res.type('text/plain').send('test\n')); - - const dredd = new Dredd({ options: { path: apiDescription.path } }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates all the responses as invalid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 4, failures: 4 })); - it('prints four warnings for each of the responses', () => assert.equal(runtimeInfo.dredd.logging.match( - /HTTP 204 and 205 responses must not include a message body/g - ).length, 4)); - it('prints four failures for each non-matching status code', () => assert.equal(runtimeInfo.dredd.logging.match( - /fail: statusCode: Expected status code '\d+', but got '200'./g - ).length, 4)); - it('does not print any failures regarding response bodies', () => assert.isNull(runtimeInfo.dredd.logging.match(/fail: body:/g))); - }); - - describe('when the actual response is empty', () => { - let runtimeInfo; - - before((done) => { - // It's not trivial to create an actual server sending HTTP 204 or 205 - // sending a Content-Type header, because it's against specs. That's - // why we're returning HTTP 200 here and in the assertions we're making - // sure the extra failures are there only because of non-matching status - // codes. - const app = createServer(); - app.get('*', (req, res) => res.type('text/plain').send()); - - const dredd = new Dredd({ options: { path: apiDescription.path } }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates all the responses as invalid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 4, failures: 4 })); - it('prints two warnings for each of the non-empty expectations', () => assert.equal(runtimeInfo.dredd.logging.match( - /HTTP 204 and 205 responses must not include a message body/g - ).length, 2)); - it('prints two failures for each non-matching body (and status code)', () => assert.equal(runtimeInfo.dredd.logging.match( - /fail: body: Actual and expected data do not match.\nstatusCode: Expected status code '\d+', but got '200'./g - ).length, 2)); - it('prints two failures for each non-matching status code', () => assert.equal(runtimeInfo.dredd.logging.match( - /fail: statusCode: Expected status code '\d+', but got '200'./g - ).length, 2)); - }); -})); - -[ +import { assert } from 'chai' +import * as path from 'path' + +import { runDreddWithServer, createServer } from './helpers' +import Dredd from '../../lib/Dredd' + +const files = [ + { + name: 'API Blueprint', + path: './test/fixtures/response/empty-body-empty-schema.apib' + }, + { + name: 'OpenAPI 2', + path: './test/fixtures/response/empty-body-empty-schema.yaml' + } +] + +files.forEach((apiDescription) => + describe(`Specifying neither response body nor schema in the ${apiDescription.name}`, () => { + describe('when the server returns non-empty responses', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource.json', (req, res) => res.json({ test: 'OK' })) + app.get('/resource.csv', (req, res) => + res.type('text/csv').send('test,OK\n') + ) + const dredd = new Dredd({ options: { path: apiDescription.path } }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the responses as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })) + }) + + describe('when the server returns empty responses', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource.json', (req, res) => res.type('json').send()) + app.get('/resource.csv', (req, res) => res.type('text/csv').send()) + const dredd = new Dredd({ options: { path: apiDescription.path } }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the responses as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })) + }) + }) +) +;[ + { + name: 'API Blueprint', + path: './test/fixtures/response/empty-body.apib' + }, + { + name: 'OpenAPI 2', + path: './test/fixtures/response/empty-body.yaml' + } +].forEach((apiDescription) => + describe(`Specifying no response body in the ${apiDescription.name}, but specifying a schema`, () => { + describe('when the server returns a response not valid according to the schema', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource', (req, res) => res.json({ name: 123 })) + const dredd = new Dredd({ options: { path: apiDescription.path } }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the response as invalid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, failures: 1 })) + it('prints JSON Schema validation error', () => + assert.include( + runtimeInfo.dredd.logging, + "At '/name' Invalid type: number (expected string)" + )) + }) + + describe('when the server returns a response valid according to the schema', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource', (req, res) => res.json({ name: 'test' })) + const dredd = new Dredd({ options: { path: apiDescription.path } }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the response as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, passes: 1 })) + }) + }) +) +;[ + { + name: 'API Blueprint', + path: './test/fixtures/response/empty-body-empty-schema.apib' + }, + { + name: 'OpenAPI 2', + path: './test/fixtures/response/empty-body-empty-schema.yaml' + } +].forEach((apiDescription) => + describe(`Specifying no response body in the ${apiDescription.name} and having hooks ensuring empty response`, () => { + describe('when the server returns a non-empty responses', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource.json', (req, res) => res.json({ test: 'OK' })) + app.get('/resource.csv', (req, res) => + res.type('text/csv').send('test,OK\n') + ) + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/response/empty-body-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the responses as invalid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, failures: 2 })) + it('prints the error message from hooks', () => + assert.include( + runtimeInfo.dredd.logging, + 'The response body must be empty' + )) + }) + + describe('when the server returns an empty responses', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource.json', (req, res) => res.send()) + app.get('/resource.csv', (req, res) => res.type('text/csv').send()) + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/response/empty-body-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the responses as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })) + }) + }) +) +;[ { name: 'API Blueprint', - path: './test/fixtures/response/binary.apib', + path: './test/fixtures/response/empty-body-empty-schema.apib' }, { name: 'OpenAPI 2', - path: './test/fixtures/response/binary.yaml', + path: './test/fixtures/response/empty-body-empty-schema.yaml' + } +].forEach((apiDescription) => + describe(`Specifying no response body in the ${apiDescription.name} and having hooks ensuring empty response`, () => { + describe('when the server returns non-empty responses', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource.json', (req, res) => res.json({ test: 'OK' })) + app.get('/resource.csv', (req, res) => + res.type('text/csv').send('test,OK\n') + ) + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/response/empty-body-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the responses as invalid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, failures: 2 })) + it('prints the error message from hooks', () => + assert.include( + runtimeInfo.dredd.logging, + 'The response body must be empty' + )) + }) + + describe('when the server returns empty responses', () => { + let runtimeInfo + + before((done) => { + const app = createServer() + app.get('/resource.json', (req, res) => res.send()) + app.get('/resource.csv', (req, res) => res.type('text/csv').send()) + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/response/empty-body-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the responses as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 2, passes: 2 })) + }) + }) +) +;[ + { + name: 'API Blueprint', + path: './test/fixtures/response/204-205-body.apib' }, -].forEach(apiDescription => describe(`Working with binary responses in the ${apiDescription.name}`, () => { - const imagePath = path.join(__dirname, '../fixtures/image.png'); - const app = createServer(); - app.get('/image.png', (req, res) => res.type('image/png').sendFile(imagePath)); - - describe('when the body is described as empty and there are hooks to remove the real body', () => { - let runtimeInfo; - - before((done) => { - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/response/binary-ignore-body-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the response as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, passes: 1 })); - }); - - describe('when the body is described as empty and there are hooks to assert the real body', () => { - let runtimeInfo; - - before((done) => { - const dredd = new Dredd({ - options: { - path: apiDescription.path, - hookfiles: './test/fixtures/response/binary-assert-body-hooks.js', - }, - }); - runDreddWithServer(dredd, app, (err, info) => { - runtimeInfo = info; - done(err); - }); - }); - - it('evaluates the response as valid', () => assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, passes: 1 })); - }); -})); + { + name: 'OpenAPI 2', + path: './test/fixtures/response/204-205-body.yaml' + } +].forEach((apiDescription) => + describe(`Working with HTTP 204 and 205 responses in the ${apiDescription.name}`, () => { + describe('when the actual response is non-empty', () => { + let runtimeInfo + + before((done) => { + // It's not trivial to create an actual server sending HTTP 204 or 205 + // with non-empty body, because it's against specs. That's why we're + // returning HTTP 200 here and in the assertions we're making sure + // the failures are there only because of non-matching status codes. + const app = createServer() + app.get('*', (req, res) => res.type('text/plain').send('test\n')) + + const dredd = new Dredd({ options: { path: apiDescription.path } }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates all the responses as invalid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 4, failures: 4 })) + it('prints four warnings for each of the responses', () => + assert.equal( + runtimeInfo.dredd.logging.match( + /HTTP 204 and 205 responses must not include a message body/g + ).length, + 4 + )) + it('prints four failures for each non-matching status code', () => + assert.equal( + runtimeInfo.dredd.logging.match( + /fail: statusCode: Expected status code '\d+', but got '200'./g + ).length, + 4 + )) + it('does not print any failures regarding response bodies', () => + assert.isNull(runtimeInfo.dredd.logging.match(/fail: body:/g))) + }) + + describe('when the actual response is empty', () => { + let runtimeInfo + + before((done) => { + // It's not trivial to create an actual server sending HTTP 204 or 205 + // sending a Content-Type header, because it's against specs. That's + // why we're returning HTTP 200 here and in the assertions we're making + // sure the extra failures are there only because of non-matching status + // codes. + const app = createServer() + app.get('*', (req, res) => res.type('text/plain').send()) + + const dredd = new Dredd({ options: { path: apiDescription.path } }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates all the responses as invalid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 4, failures: 4 })) + it('prints two warnings for each of the non-empty expectations', () => + assert.equal( + runtimeInfo.dredd.logging.match( + /HTTP 204 and 205 responses must not include a message body/g + ).length, + 2 + )) + it('prints two failures for each non-matching body (and status code)', () => + assert.equal( + runtimeInfo.dredd.logging.match( + /fail: body: Actual and expected data do not match.\nstatusCode: Expected status code '\d+', but got '200'./g + ).length, + 2 + )) + it('prints two failures for each non-matching status code', () => + assert.equal( + runtimeInfo.dredd.logging.match( + /fail: statusCode: Expected status code '\d+', but got '200'./g + ).length, + 2 + )) + }) + }) +) +;[ + { + name: 'API Blueprint', + path: './test/fixtures/response/binary.apib' + }, + { + name: 'OpenAPI 2', + path: './test/fixtures/response/binary.yaml' + } +].forEach((apiDescription) => + describe(`Working with binary responses in the ${apiDescription.name}`, () => { + const imagePath = path.join(__dirname, '../fixtures/image.png') + const app = createServer() + app.get('/image.png', (req, res) => + res.type('image/png').sendFile(imagePath) + ) + + describe('when the body is described as empty and there are hooks to remove the real body', () => { + let runtimeInfo + + before((done) => { + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/response/binary-ignore-body-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the response as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, passes: 1 })) + }) + + describe('when the body is described as empty and there are hooks to assert the real body', () => { + let runtimeInfo + + before((done) => { + const dredd = new Dredd({ + options: { + path: apiDescription.path, + hookfiles: './test/fixtures/response/binary-assert-body-hooks.js' + } + }) + runDreddWithServer(dredd, app, (err, info) => { + runtimeInfo = info + done(err) + }) + }) + + it('evaluates the response as valid', () => + assert.deepInclude(runtimeInfo.dredd.stats, { tests: 1, passes: 1 })) + }) + }) +) diff --git a/test/integration/sanitation-test.js b/test/integration/sanitation-test.js index dc3979214..cafe52762 100644 --- a/test/integration/sanitation-test.js +++ b/test/integration/sanitation-test.js @@ -1,37 +1,53 @@ -const clone = require('clone'); -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import clone from 'clone' +import { assert } from 'chai' +import { EventEmitter } from 'events' -const { runDredd, createServer, runDreddWithServer } = require('./helpers'); -const Dredd = require('../../lib/Dredd'); +import { runDredd, createServer, runDreddWithServer } from './helpers' +import Dredd from '../../lib/Dredd' describe('Sanitation of Reported Data', () => { // Sample sensitive data (this value is used in API Blueprint fixtures as well) - const sensitiveKey = 'token'; - const sensitiveHeaderName = 'authorization'; - const sensitiveValue = '5229c6e8e4b0bd7dbb07e29c'; + const sensitiveKey = 'token' + const sensitiveHeaderName = 'authorization' + const sensitiveValue = '5229c6e8e4b0bd7dbb07e29c' // Create an EventEmitter to record events sent to reporters function createEventEmitter(events) { - const emitter = new EventEmitter(); + const emitter = new EventEmitter() // Dredd emits 'test *' events and reporters listen on them. To test whether // sensitive data will or won't make it to reporters, we need to capture all // the emitted events. We're using 'clone' to prevent propagation of subsequent // modifications of the 'test' object (Dredd can change the data after they're // reported and by reference they would change also here in the 'events' array). - emitter.on('test start', test => events.push({ name: 'test start', test: clone(test) })); - emitter.on('test pass', test => events.push({ name: 'test pass', test: clone(test) })); - emitter.on('test skip', test => events.push({ name: 'test skip', test: clone(test) })); - emitter.on('test fail', test => events.push({ name: 'test fail', test: clone(test) })); - emitter.on('test error', (err, test) => events.push({ name: 'test error', test: clone(test), err })); + emitter.on('test start', (test) => + events.push({ name: 'test start', test: clone(test) }) + ) + emitter.on('test pass', (test) => + events.push({ name: 'test pass', test: clone(test) }) + ) + emitter.on('test skip', (test) => + events.push({ name: 'test skip', test: clone(test) }) + ) + emitter.on('test fail', (test) => + events.push({ name: 'test fail', test: clone(test) }) + ) + emitter.on('test error', (err, test) => + events.push({ name: 'test error', test: clone(test), err }) + ) // 'start' and 'end' events are asynchronous and they do not carry any data // significant for following scenarios - emitter.on('start', (apiDescriptions, cb) => { events.push({ name: 'start' }); return cb(); }); - emitter.on('end', (cb) => { events.push({ name: 'end' }); return cb(); }); - - return emitter; + emitter.on('start', (apiDescriptions, cb) => { + events.push({ name: 'start' }) + return cb() + }) + emitter.on('end', (cb) => { + events.push({ name: 'end' }) + return cb() + }) + + return emitter } // Helper for preparing Dredd instance with our custom emitter @@ -40,578 +56,715 @@ describe('Sanitation of Reported Data', () => { emitter: createEventEmitter(events), options: { path: `./test/fixtures/sanitation/${fixtureName}.apib`, - hookfiles: `./test/fixtures/sanitation/${fixtureName}.js`, - }, - }); + hookfiles: `./test/fixtures/sanitation/${fixtureName}.js` + } + }) } // Helper for preparing the server under test function createServerFromResponse(response) { - const app = createServer(); - app.put('/resource', (req, res) => res.json(response)); - return app; + const app = createServer() + app.put('/resource', (req, res) => res.json(response)) + return app } describe('Sanitation of the Entire Request Body', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'entire-request-body'); - const app = createServerFromResponse({ name: 123 }); // 'name' should be string → failing test + const dredd = createDreddFromFixture(events, 'entire-request-body') + const app = createServerFromResponse({ name: 123 }) // 'name' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); - it('emitted test data does not contain request body', () => assert.equal(events[2].test.request.body, '')); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) + it('emitted test data does not contain request body', () => + assert.equal(events[2].test.request.body, '')) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of the Entire Response Body', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'entire-response-body'); - const app = createServerFromResponse({ token: 123 }); // 'token' should be string → failing test + const dredd = createDreddFromFixture(events, 'entire-response-body') + const app = createServerFromResponse({ token: 123 }) // 'token' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test data does not contain response body', () => { - assert.equal(events[2].test.actual.body, ''); - assert.equal(events[2].test.expected.body, ''); - }); + assert.equal(events[2].test.actual.body, '') + assert.equal(events[2].test.expected.body, '') + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of a Request Body Attribute', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'request-body-attribute'); - const app = createServerFromResponse({ name: 123 }); // 'name' should be string → failing test + const dredd = createDreddFromFixture(events, 'request-body-attribute') + const app = createServerFromResponse({ name: 123 }) // 'name' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test data does not contain confidential body attribute', () => { - const attrs = Object.keys(JSON.parse(events[2].test.request.body)); - assert.deepEqual(attrs, ['name']); - }); + const attrs = Object.keys(JSON.parse(events[2].test.request.body)) + assert.deepEqual(attrs, ['name']) + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of a Response Body Attribute', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'response-body-attribute'); - const app = createServerFromResponse({ token: 123, name: 'Bob' }); // 'token' should be string → failing test + const dredd = createDreddFromFixture(events, 'response-body-attribute') + const app = createServerFromResponse({ token: 123, name: 'Bob' }) // 'token' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test data does not contain confidential body attribute', () => { - let attrs = Object.keys(JSON.parse(events[2].test.actual.body)); - assert.deepEqual(attrs, ['name']); + let attrs = Object.keys(JSON.parse(events[2].test.actual.body)) + assert.deepEqual(attrs, ['name']) - attrs = Object.keys(JSON.parse(events[2].test.expected.body)); - assert.deepEqual(attrs, ['name']); - }); + attrs = Object.keys(JSON.parse(events[2].test.expected.body)) + assert.deepEqual(attrs, ['name']) + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of Plain Text Response Body by Pattern Matching', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'plain-text-response-body'); - const app = createServerFromResponse(`${sensitiveKey}=42${sensitiveValue}`); // should be without '42' → failing test + const dredd = createDreddFromFixture(events, 'plain-text-response-body') + const app = createServerFromResponse( + `${sensitiveKey}=42${sensitiveValue}` + ) // should be without '42' → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test data does contain the sensitive data censored', () => { - assert.include(events[2].test.actual.body, '--- CENSORED ---'); - assert.include(events[2].test.expected.body, '--- CENSORED ---'); - }); - it('sensitive data cannot be found anywhere in the emitted test data', () => assert.notInclude(JSON.stringify(events), sensitiveValue)); - it('sensitive data cannot be found anywhere in Dredd output', () => assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)); - }); + assert.include(events[2].test.actual.body, '--- CENSORED ---') + assert.include(events[2].test.expected.body, '--- CENSORED ---') + }) + it('sensitive data cannot be found anywhere in the emitted test data', () => + assert.notInclude(JSON.stringify(events), sensitiveValue)) + it('sensitive data cannot be found anywhere in Dredd output', () => + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)) + }) describe('Sanitation of Request Headers', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'request-headers'); - const app = createServerFromResponse({ name: 123 }); // 'name' should be string → failing test + const dredd = createDreddFromFixture(events, 'request-headers') + const app = createServerFromResponse({ name: 123 }) // 'name' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test data does not contain confidential header', () => { - const names = Object.keys(events[2].test.request.headers).map(name => name.toLowerCase()); - assert.notInclude(names, sensitiveHeaderName); - }); + const names = Object.keys(events[2].test.request.headers).map((name) => + name.toLowerCase() + ) + assert.notInclude(names, sensitiveHeaderName) + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events).toLowerCase(); - assert.notInclude(test, sensitiveHeaderName); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events).toLowerCase() + assert.notInclude(test, sensitiveHeaderName) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - const logging = dreddRuntimeInfo.logging.toLowerCase(); - assert.notInclude(logging, sensitiveHeaderName); - assert.notInclude(logging, sensitiveValue); - }); - }); + const logging = dreddRuntimeInfo.logging.toLowerCase() + assert.notInclude(logging, sensitiveHeaderName) + assert.notInclude(logging, sensitiveValue) + }) + }) describe('Sanitation of Response Headers', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'response-headers'); - const app = createServerFromResponse({ name: 'Bob' }); // Authorization header is missing → failing test + const dredd = createDreddFromFixture(events, 'response-headers') + const app = createServerFromResponse({ name: 'Bob' }) // Authorization header is missing → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test data does not contain confidential header', () => { - let names = Object.keys(events[2].test.actual.headers).map(name => name.toLowerCase()); - assert.notInclude(names, sensitiveHeaderName); - - names = Object.keys(events[2].test.expected.headers).map(name => name.toLowerCase()); - assert.notInclude(names, sensitiveHeaderName); - }); + let names = Object.keys(events[2].test.actual.headers).map((name) => + name.toLowerCase() + ) + assert.notInclude(names, sensitiveHeaderName) + + names = Object.keys(events[2].test.expected.headers).map((name) => + name.toLowerCase() + ) + assert.notInclude(names, sensitiveHeaderName) + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events).toLowerCase(); - assert.notInclude(test, sensitiveHeaderName); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events).toLowerCase() + assert.notInclude(test, sensitiveHeaderName) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - const logging = dreddRuntimeInfo.logging.toLowerCase(); - assert.notInclude(logging, sensitiveHeaderName); - assert.notInclude(logging, sensitiveValue); - }); - }); + const logging = dreddRuntimeInfo.logging.toLowerCase() + assert.notInclude(logging, sensitiveHeaderName) + assert.notInclude(logging, sensitiveValue) + }) + }) describe('Sanitation of URI Parameters by Pattern Matching', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'uri-parameters'); - const app = createServerFromResponse({ name: 123 }); // 'name' should be string → failing test + const dredd = createDreddFromFixture(events, 'uri-parameters') + const app = createServerFromResponse({ name: 123 }) // 'name' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); - it('emitted test data does contain the sensitive data censored', () => assert.include(events[2].test.request.uri, 'CENSORED')); - it('sensitive data cannot be found anywhere in the emitted test data', () => assert.notInclude(JSON.stringify(events), sensitiveValue)); - it('sensitive data cannot be found anywhere in Dredd output', () => assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)); - }); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) + it('emitted test data does contain the sensitive data censored', () => + assert.include(events[2].test.request.uri, 'CENSORED')) + it('sensitive data cannot be found anywhere in the emitted test data', () => + assert.notInclude(JSON.stringify(events), sensitiveValue)) + it('sensitive data cannot be found anywhere in Dredd output', () => + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)) + }) // This fails because it's not possible to do 'transaction.test = myOwnTestObject;' // at the moment, Dredd ignores the new object. describe('Sanitation of Any Content by Pattern Matching', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'any-content-pattern-matching'); - const app = createServerFromResponse({ name: 123 }); // 'name' should be string → failing test + const dredd = createDreddFromFixture( + events, + 'any-content-pattern-matching' + ) + const app = createServerFromResponse({ name: 123 }) // 'name' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); - it('emitted test data does contain the sensitive data censored', () => assert.include(JSON.stringify(events), 'CENSORED')); - it('sensitive data cannot be found anywhere in the emitted test data', () => assert.notInclude(JSON.stringify(events), sensitiveValue)); - it('sensitive data cannot be found anywhere in Dredd output', () => assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)); - }); - - describe('Ultimate \'afterEach\' Guard Using Pattern Matching', () => { - const events = []; - let dreddRuntimeInfo; + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) + it('emitted test data does contain the sensitive data censored', () => + assert.include(JSON.stringify(events), 'CENSORED')) + it('sensitive data cannot be found anywhere in the emitted test data', () => + assert.notInclude(JSON.stringify(events), sensitiveValue)) + it('sensitive data cannot be found anywhere in Dredd output', () => + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)) + }) + + describe("Ultimate 'afterEach' Guard Using Pattern Matching", () => { + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'any-content-guard-pattern-matching'); - const app = createServerFromResponse({ name: 123 }); // 'name' should be string → failing test + const dredd = createDreddFromFixture( + events, + 'any-content-guard-pattern-matching' + ) + const app = createServerFromResponse({ name: 123 }) // 'name' should be string → failing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveValue); - }); - it('sensitive data cannot be found anywhere in Dredd output', () => assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)); - it('custom error message is printed', () => assert.include(dreddRuntimeInfo.logging, 'Sensitive data would be sent to Dredd reporter')); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveValue) + }) + it('sensitive data cannot be found anywhere in Dredd output', () => + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue)) + it('custom error message is printed', () => + assert.include( + dreddRuntimeInfo.logging, + 'Sensitive data would be sent to Dredd reporter' + )) + }) describe('Sanitation of Test Data of Passing Transaction', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'transaction-passing'); - const app = createServerFromResponse({ name: 'Bob' }); // Passing test + const dredd = createDreddFromFixture(events, 'transaction-passing') + const app = createServerFromResponse({ name: 'Bob' }) // Passing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one passing test', () => { - assert.equal(dreddRuntimeInfo.stats.passes, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test pass', 'end', - ])); - it('emitted test data does not contain request body', () => assert.equal(events[2].test.request.body, '')); + assert.equal(dreddRuntimeInfo.stats.passes, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test pass', + 'end' + ])) + it('emitted test data does not contain request body', () => + assert.equal(events[2].test.request.body, '')) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) - describe('Sanitation of Test Data When Transaction Is Marked as Failed in \'before\' Hook', () => { - const events = []; - let dreddRuntimeInfo; + describe("Sanitation of Test Data When Transaction Is Marked as Failed in 'before' Hook", () => { + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'transaction-marked-failed-before'); + const dredd = createDreddFromFixture( + events, + 'transaction-marked-failed-before' + ) runDredd(dredd, (...args) => { - let err; - // eslint-disable-next-line - [err, dreddRuntimeInfo] = Array.from(args); - done(err); - }); - }); + let err + // eslint-disable-next-line + ;[err, dreddRuntimeInfo] = Array.from(args) + done(err) + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('emitted test is failed', () => { - assert.equal(events[2].test.status, 'fail'); - assert.include(events[2].test.errors[0].message.toLowerCase(), 'fail'); - }); - it('emitted test data results contain just \'errors\' section', () => assert.containsAllKeys(events[2].test, ['errors'])); + assert.equal(events[2].test.status, 'fail') + assert.include(events[2].test.errors[0].message.toLowerCase(), 'fail') + }) + it("emitted test data results contain just 'errors' section", () => + assert.containsAllKeys(events[2].test, ['errors'])) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) - describe('Sanitation of Test Data When Transaction Is Marked as Failed in \'after\' Hook', () => { - const events = []; - let dreddRuntimeInfo; + describe("Sanitation of Test Data When Transaction Is Marked as Failed in 'after' Hook", () => { + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'transaction-marked-failed-after'); - const app = createServerFromResponse({ name: 'Bob' }); // Passing test + const dredd = createDreddFromFixture( + events, + 'transaction-marked-failed-after' + ) + const app = createServerFromResponse({ name: 'Bob' }) // Passing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); - it('emitted test data does not contain request body', () => assert.equal(events[2].test.request.body, '')); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) + it('emitted test data does not contain request body', () => + assert.equal(events[2].test.request.body, '')) it('emitted test is failed', () => { - assert.equal(events[2].test.status, 'fail'); - assert.include(events[2].test.errors[0].message.toLowerCase(), 'fail'); - }); + assert.equal(events[2].test.status, 'fail') + assert.include(events[2].test.errors[0].message.toLowerCase(), 'fail') + }) it('emitted test data results contain all regular sections', () => { - assert.containsAllKeys(events[2].test, ['errors']); - assert.hasAllKeys(events[2].test.results, ['valid', 'fields']); - assert.hasAllKeys(events[2].test.results.fields, ['statusCode', 'headers', 'body']); - }); + assert.containsAllKeys(events[2].test, ['errors']) + assert.hasAllKeys(events[2].test.results, ['valid', 'fields']) + assert.hasAllKeys(events[2].test.results.fields, [ + 'statusCode', + 'headers', + 'body' + ]) + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of Test Data When Transaction Is Marked as Skipped', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'transaction-marked-skipped'); + const dredd = createDreddFromFixture(events, 'transaction-marked-skipped') runDredd(dredd, (...args) => { - let err; - // eslint-disable-next-line - [err, dreddRuntimeInfo] = Array.from(args); - done(err); - }); - }); + let err + // eslint-disable-next-line + ;[err, dreddRuntimeInfo] = Array.from(args) + done(err) + }) + }) it('results in one skipped test', () => { - assert.equal(dreddRuntimeInfo.stats.skipped, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test skip', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.skipped, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test skip', + 'end' + ])) it('emitted test is skipped', () => { - assert.equal(events[2].test.status, 'skip'); - assert.containsAllKeys(events[2].test, ['errors']); - assert.include(events[2].test.errors[0].message.toLowerCase(), 'skip'); - }); + assert.equal(events[2].test.status, 'skip') + assert.containsAllKeys(events[2].test, ['errors']) + assert.include(events[2].test.errors[0].message.toLowerCase(), 'skip') + }) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of Test Data of Transaction With Erroring Hooks', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'transaction-erroring-hooks'); - const app = createServerFromResponse({ name: 'Bob' }); // passing test + const dredd = createDreddFromFixture(events, 'transaction-erroring-hooks') + const app = createServerFromResponse({ name: 'Bob' }) // passing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one erroring test', () => { - assert.equal(dreddRuntimeInfo.stats.errors, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test error', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.errors, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test error', + 'end' + ])) it('sensitive data leak to emitted test data', () => { - const test = JSON.stringify(events); - assert.include(test, sensitiveKey); - assert.include(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.include(test, sensitiveKey) + assert.include(test, sensitiveValue) + }) it('sensitive data leak to Dredd output', () => { - assert.include(dreddRuntimeInfo.logging, sensitiveKey); - assert.include(dreddRuntimeInfo.logging, sensitiveValue); - }); - }); + assert.include(dreddRuntimeInfo.logging, sensitiveKey) + assert.include(dreddRuntimeInfo.logging, sensitiveValue) + }) + }) describe('Sanitation of Test Data of Transaction With Secured Erroring Hooks', () => { - const events = []; - let dreddRuntimeInfo; + const events = [] + let dreddRuntimeInfo before((done) => { - const dredd = createDreddFromFixture(events, 'transaction-secured-erroring-hooks'); - const app = createServerFromResponse({ name: 'Bob' }); // passing test + const dredd = createDreddFromFixture( + events, + 'transaction-secured-erroring-hooks' + ) + const app = createServerFromResponse({ name: 'Bob' }) // passing test runDreddWithServer(dredd, app, (err, runtimeInfo) => { - if (err) { return done(err); } - dreddRuntimeInfo = runtimeInfo.dredd; - done(); - }); - }); + if (err) { + return done(err) + } + dreddRuntimeInfo = runtimeInfo.dredd + done() + }) + }) it('results in one failed test', () => { - assert.equal(dreddRuntimeInfo.stats.failures, 1); - assert.equal(dreddRuntimeInfo.stats.tests, 1); - }); - it('emits expected events in expected order', () => assert.deepEqual((Array.from(events).map(event => event.name)), [ - 'start', 'test start', 'test fail', 'end', - ])); + assert.equal(dreddRuntimeInfo.stats.failures, 1) + assert.equal(dreddRuntimeInfo.stats.tests, 1) + }) + it('emits expected events in expected order', () => + assert.deepEqual(Array.from(events).map((event) => event.name), [ + 'start', + 'test start', + 'test fail', + 'end' + ])) it('sensitive data cannot be found anywhere in the emitted test data', () => { - const test = JSON.stringify(events); - assert.notInclude(test, sensitiveKey); - assert.notInclude(test, sensitiveValue); - }); + const test = JSON.stringify(events) + assert.notInclude(test, sensitiveKey) + assert.notInclude(test, sensitiveValue) + }) it('sensitive data cannot be found anywhere in Dredd output', () => { - assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey); - assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue); - }); - it('custom error message is printed', () => assert.include(dreddRuntimeInfo.logging, 'Unexpected exception in hooks')); - }); -}); + assert.notInclude(dreddRuntimeInfo.logging, sensitiveKey) + assert.notInclude(dreddRuntimeInfo.logging, sensitiveValue) + }) + it('custom error message is printed', () => + assert.include(dreddRuntimeInfo.logging, 'Unexpected exception in hooks')) + }) +}) diff --git a/test/mocha.opts b/test/mocha.opts index eb637f4c3..a3df12bd6 100644 --- a/test/mocha.opts +++ b/test/mocha.opts @@ -1,2 +1,3 @@ --timeout=120000 --recursive +--require ./test/ts-node.js diff --git a/test/ts-node.js b/test/ts-node.js new file mode 100644 index 000000000..8b4766854 --- /dev/null +++ b/test/ts-node.js @@ -0,0 +1,7 @@ +require('ts-node').register({ + // This path is relative to the CWD of "mocha" process, + // which is, usually, the root directory of the repo. + project: './test/tsconfig.json', + transpileOnly: true, + files: ['../global.d.ts'] +}); diff --git a/test/tsconfig.json b/test/tsconfig.json new file mode 100644 index 000000000..102f42c8d --- /dev/null +++ b/test/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "module": "commonjs", + "target": "esnext", + "allowSyntheticDefaultImports": true, + "allowJs": true, + "noEmit": true + }, + "include": ["./**/*.ts"] +} diff --git a/test/unit/CLI-test.js b/test/unit/CLI-test.js index b446bc818..fe5d7891f 100644 --- a/test/unit/CLI-test.js +++ b/test/unit/CLI-test.js @@ -1,14 +1,17 @@ -const crossSpawnStub = require('cross-spawn'); -const express = require('express'); -const fsStub = require('fs'); -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); -const { assert } = require('chai'); - -const configUtilsStub = require('../../lib/configUtils'); -const loggerStub = require('../../lib/logger'); -const options = require('../../lib/options'); -const packageData = require('../../package.json'); +import crossSpawnStub from 'cross-spawn'; +import express from 'express'; +import fsStub from 'fs'; +import { noCallThru } from 'proxyquire'; + +import sinon from 'sinon'; +import { assert } from 'chai'; + +import * as configUtilsStub from '../../lib/configUtils'; +import loggerStub from '../../lib/logger'; +import options from '../../options'; +import * as packageData from '../../package.json'; + +const proxyquire = noCallThru(); const PORT = 9876; @@ -18,18 +21,18 @@ let stderr = ''; let stdout = ''; const addHooksStub = proxyquire('../../lib/addHooks', { - './logger': loggerStub, -}); + './logger': loggerStub +}).default; const transactionRunner = proxyquire('../../lib/TransactionRunner', { './addHooks': addHooksStub, - './logger': loggerStub, -}); + './logger': loggerStub +}).default; const DreddStub = proxyquire('../../lib/Dredd', { './TransactionRunner': transactionRunner, - './logger': loggerStub, -}); + './logger': loggerStub +}).default; const initStub = sinon.stub().callsFake((config, save, callback) => { save(config); @@ -43,45 +46,52 @@ const CLIStub = proxyquire('../../lib/CLI', { './init': initStub, './configUtils': configUtilsStub, fs: fsStub, - 'cross-spawn': crossSpawnStub, -}); + 'cross-spawn': crossSpawnStub +}).default; function execCommand(custom = {}, cb) { stdout = ''; stderr = ''; let finished = false; - (new CLIStub({ - custom, - }, ((code) => { - if (!finished) { - finished = true; - exitStatus = code || 0; - return cb(); + new CLIStub( + { + custom + }, + (code) => { + if (!finished) { + finished = true; + exitStatus = code || 0; + return cb(); + } } - })).run()); + ).run(); } describe('CLI class', () => { before(() => { - ['warn', 'error', 'debug'].forEach((method) => { - sinon - .stub(loggerStub, method) - .callsFake((chunk) => { stderr += `\n${method}: ${chunk}`; }); + const logLevels = ['warn', 'error', 'debug']; + logLevels.forEach((method) => { + sinon.stub(loggerStub, method).callsFake((chunk) => { + stderr += `\n${method}: ${chunk}`; + }); + }); + sinon.stub(loggerStub, 'log').callsFake((chunk) => { + stdout += chunk; }); - sinon - .stub(loggerStub, 'log') - .callsFake((chunk) => { stdout += chunk; }); }); after(() => { - ['warn', 'error', 'debug', 'log'].forEach((method) => { + const logLevels = ['warn', 'error', 'debug', 'log']; + logLevels.forEach((method) => { loggerStub[method].restore(); }); }); describe('when initialized without "new" keyword', () => { let dc = null; - before(() => { dc = new CLIStub(); }); + before(() => { + dc = new CLIStub(); + }); it('sets finished to false', () => assert.isFalse(dc.finished)); @@ -100,7 +110,6 @@ describe('CLI class', () => { it('returns an instanceof CLI', () => assert.instanceOf(dc, CLIStub)); }); - describe('when initialized with options containing exit callback', () => { let dc = null; let hasCalledExit; @@ -109,7 +118,7 @@ describe('CLI class', () => { dc = new CLIStub({ exit() { hasCalledExit = true; - }, + } }); dc.run(); }); @@ -120,16 +129,18 @@ describe('CLI class', () => { assert.isArray(dc.argv._); }); - it('should set finished to true (keeps false)', () => assert.isTrue(dc.finished)); + it('should set finished to true (keeps false)', () => + assert.isTrue(dc.finished)); - it('ends with an error message about missing blueprint-file', () => assert.include(stderr, 'Must specify path to API description document.')); + it('ends with an error message about missing blueprint-file', () => + assert.include(stderr, 'Must specify path to API description document.')); - it('ends with an error message about missing api endpoint.', () => assert.include(stderr, 'Must specify URL of the tested API instance.')); + it('ends with an error message about missing api endpoint.', () => + assert.include(stderr, 'Must specify URL of the tested API instance.')); it('calls exit callback', () => assert.isNotNull(hasCalledExit)); }); - describe('run', () => { let dc; let initConfigSpy; @@ -141,8 +152,8 @@ describe('CLI class', () => { exit() {}, custom: { argv: ['./file.apib', 'http://127.0.0.1:3000'], - env: { NO_KEY: 'NO_VAL' }, - }, + env: { NO_KEY: 'NO_VAL' } + } }); sinon.stub(dc, 'initDredd').callsFake((configuration) => { @@ -207,13 +218,13 @@ describe('CLI class', () => { argv: [ './test/fixtures/single-get.apib', `http://127.0.0.1:${PORT}`, - '--path=./test/fixtures/single-get.apib', - ], + '--path=./test/fixtures/single-get.apib' + ] }, exit(code) { exitStatus = code; server.close(); - }, + } }); const server = app.listen(PORT, () => dc.run()); @@ -222,43 +233,60 @@ describe('CLI class', () => { }); describe('with server returning good things', () => { - before(() => { returnGood = true; }); + before(() => { + returnGood = true; + }); it('returns exit code 0', () => assert.equal(exitStatus, 0)); it('propagates configuration options to Dredd class', () => { - assert.equal(dc.dreddInstance.configuration.path[0], './test/fixtures/single-get.apib'); - assert.equal(dc.dreddInstance.configuration.endpoint, `http://127.0.0.1:${PORT}`); + assert.equal( + dc.dreddInstance.configuration.path[0], + './test/fixtures/single-get.apib' + ); + assert.equal( + dc.dreddInstance.configuration.endpoint, + `http://127.0.0.1:${PORT}` + ); }); }); describe('with server returning wrong things', () => { - before(() => { returnGood = false; }); + before(() => { + returnGood = false; + }); it('returns exit code 1', () => assert.equal(exitStatus, 1)); it('propagates configuration options to Dredd class', () => { - assert.equal(dc.dreddInstance.configuration.path[0], './test/fixtures/single-get.apib'); - assert.equal(dc.dreddInstance.configuration.endpoint, `http://127.0.0.1:${PORT}`); + assert.equal( + dc.dreddInstance.configuration.path[0], + './test/fixtures/single-get.apib' + ); + assert.equal( + dc.dreddInstance.configuration.endpoint, + `http://127.0.0.1:${PORT}` + ); }); }); }); - describe('when called w/ OR wo/ exiting arguments', () => { describe('--help', () => { - before(done => execCommand({ argv: ['--help'] }, done)); + before((done) => execCommand({ argv: ['--help'] }, done)); it('prints out some really nice help text with all options descriptions', () => { assert.include(stderr, 'Usage:'); assert.include(stderr, 'Example:'); assert.include(stderr, '[OPTIONS]'); - Array.from(Object.keys(options)).forEach(optionKey => assert.include(stderr, optionKey)); + Array.from(Object.keys(options)).forEach((optionKey) => + assert.include(stderr, optionKey) + ); }); }); describe('--version', () => { - before(done => execCommand({ argv: ['--version'] }, done)); + before((done) => execCommand({ argv: ['--version'] }, done)); it('prints out version', () => { assert.include(stdout, `${packageData.name} v${packageData.version}`); @@ -276,32 +304,40 @@ describe('CLI class', () => { }); it('should run interactive config', () => assert.isTrue(initStub.called)); - it('should save configuration', () => assert.isTrue(configUtilsStub.save.called)); + it('should save configuration', () => + assert.isTrue(configUtilsStub.save.called)); }); describe('without argv', () => { - before(done => execCommand({ argv: [] }, done)); + before((done) => execCommand({ argv: [] }, done)); - it('prints out an error message', () => assert.include(stderr, 'Error: Must specify')); + it('prints out an error message', () => + assert.include(stderr, 'Error: Must specify')); }); }); describe('when using --server', () => { before((done) => { sinon.stub(crossSpawnStub, 'spawn').callsFake(); - sinon.stub(transactionRunner.prototype, 'executeAllTransactions').callsFake((transactions, hooks, cb) => cb()); - execCommand({ - argv: [ - './test/fixtures/single-get.apib', - `http://127.0.0.1:${PORT}`, - '--server', - 'foo/bar', - ], - }, done); + sinon + .stub(transactionRunner.prototype, 'executeAllTransactions') + .callsFake((transactions, hooks, cb) => cb()); + execCommand( + { + argv: [ + './test/fixtures/single-get.apib', + `http://127.0.0.1:${PORT}`, + '--server', + 'foo/bar' + ] + }, + done + ); }); after(() => crossSpawnStub.spawn.restore()); - it('should run child process', () => assert.isTrue(crossSpawnStub.spawn.called)); + it('should run child process', () => + assert.isTrue(crossSpawnStub.spawn.called)); }); }); diff --git a/test/unit/Hooks-test.js b/test/unit/Hooks-test.js index 512df5843..d234df347 100644 --- a/test/unit/Hooks-test.js +++ b/test/unit/Hooks-test.js @@ -1,149 +1,156 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); +import sinon from 'sinon' +import { assert } from 'chai' -const Hooks = require('../../lib/Hooks'); +import Hooks from '../../lib/Hooks' describe('Hooks', () => { describe('constructor', () => { it('should not add @logs or @logger when constructor options are empty', () => { - const hooks = new Hooks(); - assert.isUndefined(hooks.logs); - assert.isUndefined(hooks.logger); - }); + const hooks = new Hooks() + assert.isUndefined(hooks.logs) + assert.isUndefined(hooks.logger) + }) it('should add @logs and @logger from passed options', () => { const options = { logs: [{ content: 'message1' }, { content: 'message2' }], logger: { hook() {}, - error() {}, - }, - }; + error() {} + } + } - const hooks = new Hooks(options); - assert.strictEqual(hooks.logs, options.logs); - assert.strictEqual(hooks.logger, options.logger); - }); - }); + const hooks = new Hooks(options) + assert.strictEqual(hooks.logs, options.logs) + assert.strictEqual(hooks.logger, options.logger) + }) + }) describe('#log', () => { - let options = null; + let options = null before(() => { options = { logs: [{ content: 'message1' }, { content: 'message2' }], logger: { hook() {}, - error() {}, - }, - }; - sinon.spy(options.logger, 'hook'); - sinon.spy(options.logger, 'error'); - }); + error() {} + } + } + sinon.spy(options.logger, 'hook') + sinon.spy(options.logger, 'error') + }) after(() => { - options.logger.hook.restore(); - options.logger.error.restore(); - }); + options.logger.hook.restore() + options.logger.error.restore() + }) it('should call @logger.hook when hooks.log is called with 1 argument', () => { - const hooks = new Hooks(options); - hooks.log('messageX'); - assert.isTrue(options.logger.hook.called); - assert.isFalse(options.logger.error.called); - assert.property(hooks.logs[2], 'timestamp'); - assert.propertyVal(hooks.logs[0], 'content', 'message1'); - assert.propertyVal(hooks.logs[1], 'content', 'message2'); - assert.propertyVal(hooks.logs[2], 'content', 'messageX'); - }); - }); + const hooks = new Hooks(options) + hooks.log('messageX') + assert.isTrue(options.logger.hook.called) + assert.isFalse(options.logger.error.called) + assert.property(hooks.logs[2], 'timestamp') + assert.propertyVal(hooks.logs[0], 'content', 'message1') + assert.propertyVal(hooks.logs[1], 'content', 'message2') + assert.propertyVal(hooks.logs[2], 'content', 'messageX') + }) + }) describe('#before', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.before('beforeHook', () => ''); - }); + hooks = new Hooks() + hooks.before('beforeHook', () => '') + }) - it('should add to hook collection', () => assert.property(hooks.beforeHooks, 'beforeHook')); - }); + it('should add to hook collection', () => + assert.property(hooks.beforeHooks, 'beforeHook')) + }) describe('#beforeValidation', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.beforeValidation('beforeValidationHook', () => ''); - }); + hooks = new Hooks() + hooks.beforeValidation('beforeValidationHook', () => '') + }) - it('should add to hook collection', () => assert.property(hooks.beforeValidationHooks, 'beforeValidationHook')); - }); + it('should add to hook collection', () => + assert.property(hooks.beforeValidationHooks, 'beforeValidationHook')) + }) describe('#after', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.after('afterHook', () => ''); - }); + hooks = new Hooks() + hooks.after('afterHook', () => '') + }) - it('should add to hook collection', () => assert.property(hooks.afterHooks, 'afterHook')); - }); + it('should add to hook collection', () => + assert.property(hooks.afterHooks, 'afterHook')) + }) describe('#beforeAll', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.beforeAll(() => ''); - }); + hooks = new Hooks() + hooks.beforeAll(() => '') + }) - it('should add to hook collection', () => assert.lengthOf(hooks.beforeAllHooks, 1)); - }); + it('should add to hook collection', () => + assert.lengthOf(hooks.beforeAllHooks, 1)) + }) describe('#afterAll', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.afterAll(() => ''); - }); + hooks = new Hooks() + hooks.afterAll(() => '') + }) - it('should add to hook collection', () => assert.lengthOf(hooks.afterAllHooks, 1)); - }); + it('should add to hook collection', () => + assert.lengthOf(hooks.afterAllHooks, 1)) + }) describe('#beforeEach', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.beforeEach(() => ''); - }); + hooks = new Hooks() + hooks.beforeEach(() => '') + }) - it('should add to hook collection', () => assert.lengthOf(hooks.beforeEachHooks, 1)); - }); + it('should add to hook collection', () => + assert.lengthOf(hooks.beforeEachHooks, 1)) + }) describe('#beforeEachValidation', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.beforeEachValidation(() => ''); - }); - - it('should add to hook collection', () => assert.lengthOf(hooks.beforeEachValidationHooks, 1)); - }); + hooks = new Hooks() + hooks.beforeEachValidation(() => '') + }) + it('should add to hook collection', () => + assert.lengthOf(hooks.beforeEachValidationHooks, 1)) + }) describe('#afterEach', () => { - let hooks; + let hooks before(() => { - hooks = new Hooks(); - hooks.afterEach(() => ''); - }); - - it('should add to hook collection', () => assert.lengthOf(hooks.afterEachHooks, 1)); - }); -}); + hooks = new Hooks() + hooks.afterEach(() => '') + }) + + it('should add to hook collection', () => + assert.lengthOf(hooks.afterEachHooks, 1)) + }) +}) diff --git a/test/unit/HooksWorkerClient-test.js b/test/unit/HooksWorkerClient-test.js index 5dd275e39..e1a94b3a4 100644 --- a/test/unit/HooksWorkerClient-test.js +++ b/test/unit/HooksWorkerClient-test.js @@ -1,27 +1,32 @@ -const clone = require('clone'); -const crossSpawnStub = require('cross-spawn'); -const net = require('net'); -const path = require('path'); -const proxyquire = require('proxyquire'); -const sinon = require('sinon'); -const { assert } = require('chai'); -const { EventEmitter } = require('events'); - -const whichStub = require('../../lib/which'); -const loggerStub = require('../../lib/logger'); - -const Hooks = require('../../lib/Hooks'); -const commandLineOptions = require('../../lib/options'); +import clone from 'clone'; +import crossSpawnStub from 'cross-spawn'; +import net from 'net'; +import path from 'path'; +import proxyquire from 'proxyquire'; +import sinon from 'sinon'; +import { assert } from 'chai'; +import { EventEmitter } from 'events'; + +import whichStub from '../../lib/which'; +import loggerStub from '../../lib/logger'; + +import Hooks from '../../lib/Hooks'; +import * as commandLineOptions from '../../options'; +import TransactionRunner from '../../lib/TransactionRunner'; function measureExecutionDurationMs(fn) { const time = process.hrtime(); fn(); const timeDiff = process.hrtime(time); // timeDiff = [seconds, nanoseconds] - return ((timeDiff[0] * 1000) + (timeDiff[1] * 1e-6)); + return timeDiff[0] * 1000 + timeDiff[1] * 1e-6; } const COFFEE_BIN = 'node_modules/.bin/coffee'; -const MIN_COMMAND_EXECUTION_DURATION_MS = 2 * measureExecutionDurationMs(() => crossSpawnStub.sync(COFFEE_BIN, ['test/fixtures/scripts/exit-0.coffee'])); +const MIN_COMMAND_EXECUTION_DURATION_MS = + 2 * + measureExecutionDurationMs(() => + crossSpawnStub.sync(COFFEE_BIN, ['test/fixtures/scripts/exit-0.coffee']) + ); const PORT = 61321; let runner; @@ -30,16 +35,14 @@ const logLevels = ['error', 'warn', 'debug']; const HooksWorkerClient = proxyquire('../../lib/HooksWorkerClient', { 'cross-spawn': crossSpawnStub, './which': whichStub, - './logger': loggerStub, -}); - -const TransactionRunner = require('../../lib/TransactionRunner'); + './logger': loggerStub +}).default; let hooksWorkerClient; function loadWorkerClient(callback) { hooksWorkerClient = new HooksWorkerClient(runner); - hooksWorkerClient.start(error => callback(error)); + hooksWorkerClient.start((error) => callback(error)); } describe('Hooks worker client', () => { @@ -51,24 +54,32 @@ describe('Hooks worker client', () => { runner.hooks = new Hooks({ logs: [], logger: console }); runner.hooks.configuration = {}; - Array.from(logLevels).forEach(level => sinon.stub(loggerStub, level).callsFake((msg1, msg2) => { - let text = msg1; - if (msg2) { text += ` ${msg2}`; } + Array.from(logLevels).forEach((level) => + sinon.stub(loggerStub, level).callsFake((msg1, msg2) => { + let text = msg1; + if (msg2) { + text += ` ${msg2}`; + } - // Uncomment to enable logging for debug - // console.log text - logs.push(text); - })); + // Uncomment to enable logging for debug + // console.log text + logs.push(text); + }) + ); }); afterEach(() => { - Array.from(logLevels).forEach(level => loggerStub[level].restore()); + Array.from(logLevels).forEach((level) => loggerStub[level].restore()); }); describe('when methods dealing with connection to the handler are stubbed', () => { beforeEach(() => { - sinon.stub(HooksWorkerClient.prototype, 'disconnectFromHandler').callsFake(() => { }); - sinon.stub(HooksWorkerClient.prototype, 'connectToHandler').callsFake(cb => cb()); + sinon + .stub(HooksWorkerClient.prototype, 'disconnectFromHandler') + .callsFake(() => {}); + sinon + .stub(HooksWorkerClient.prototype, 'connectToHandler') + .callsFake((cb) => cb()); }); afterEach(() => { @@ -79,7 +90,9 @@ describe('Hooks worker client', () => { it('should pipe spawned process stdout to the Dredd process stdout', (done) => { runner.hooks.configuration.language = `${COFFEE_BIN} test/fixtures/scripts/stdout.coffee`; loadWorkerClient((workerError) => { - if (workerError) { return done(workerError); } + if (workerError) { + return done(workerError); + } // The handler sometimes doesn't write to stdout or stderr until it // finishes, so we need to manually stop it. However, it could happen @@ -89,8 +102,13 @@ describe('Hooks worker client', () => { if (data.toString() !== 'exiting\n') { process.nextTick(() => { hooksWorkerClient.stop((stopError) => { - if (stopError) { return done(stopError); } - assert.include(logs, 'Hooks handler stdout: standard output text\n'); + if (stopError) { + return done(stopError); + } + assert.include( + logs, + 'Hooks handler stdout: standard output text\n' + ); done(); }); }); @@ -102,7 +120,9 @@ describe('Hooks worker client', () => { it('should pipe spawned process stderr to the Dredd process stderr', (done) => { runner.hooks.configuration.language = `${COFFEE_BIN} test/fixtures/scripts/stderr.coffee`; loadWorkerClient((workerError) => { - if (workerError) { return done(workerError); } + if (workerError) { + return done(workerError); + } // The handler sometimes doesn't write to stdout or stderr until it // finishes, so we need to manually stop it. However, it could happen @@ -112,8 +132,13 @@ describe('Hooks worker client', () => { if (data.toString() !== 'exiting\n') { process.nextTick(() => { hooksWorkerClient.stop((stopError) => { - if (stopError) { return done(stopError); } - assert.include(logs, 'Hooks handler stderr: error output text\n'); + if (stopError) { + return done(stopError); + } + assert.include( + logs, + 'Hooks handler stderr: error output text\n' + ); done(); }); }); @@ -122,56 +147,76 @@ describe('Hooks worker client', () => { }); }); - it('should not set the error on worker if process gets intentionally killed by Dredd ' - + 'because it can be killed after all hooks execution if SIGTERM isn\'t handled', (done) => { - runner.hooks.configuration.language = `${COFFEE_BIN} test/fixtures/scripts/endless-ignore-term.coffee`; - loadWorkerClient((workerError) => { - if (workerError) { return done(workerError); } + it( + 'should not set the error on worker if process gets intentionally killed by Dredd ' + + "because it can be killed after all hooks execution if SIGTERM isn't handled", + (done) => { + runner.hooks.configuration.language = `${COFFEE_BIN} test/fixtures/scripts/endless-ignore-term.coffee`; + loadWorkerClient((workerError) => { + if (workerError) { + return done(workerError); + } - // The handler sometimes doesn't write to stdout or stderr until it - // finishes, so we need to manually stop it. However, it could happen - // we'll stop it before it actually manages to do what we test here, so - // we add some timeout here. - setTimeout(() => hooksWorkerClient.stop((stopError) => { - if (stopError) { return done(stopError); } - assert.isNull(runner.hookHandlerError); - done(); - }), - MIN_COMMAND_EXECUTION_DURATION_MS); - }); - }); + // The handler sometimes doesn't write to stdout or stderr until it + // finishes, so we need to manually stop it. However, it could happen + // we'll stop it before it actually manages to do what we test here, so + // we add some timeout here. + setTimeout( + () => + hooksWorkerClient.stop((stopError) => { + if (stopError) { + return done(stopError); + } + + assert.isNull(runner.hookHandlerError); + done(); + }), + MIN_COMMAND_EXECUTION_DURATION_MS + ); + }); + } + ); it('should include the status in the error if spawned process ends with non-zero exit status', (done) => { - runner.hooks.configuration.language = 'node test/fixtures/scripts/exit-3.js'; + runner.hooks.configuration.language = + 'node test/fixtures/scripts/exit-3.js'; loadWorkerClient((workerError) => { - if (workerError) { return done(workerError); } + if (workerError) { + return done(workerError); + } // The handler sometimes doesn't write to stdout or stderr until it // finishes, so we need to manually stop it. However, it could happen // we'll stop it before it actually manages to do what we test here, so // we add some timeout here. - setTimeout(() => hooksWorkerClient.stop((stopError) => { - if (stopError) { return done(stopError); } - assert.isOk(runner.hookHandlerError); - assert.include(runner.hookHandlerError.message, '3'); - done(); - }), - MIN_COMMAND_EXECUTION_DURATION_MS); + setTimeout( + () => + hooksWorkerClient.stop((stopError) => { + if (stopError) { + return done(stopError); + } + assert.isOk(runner.hookHandlerError); + assert.include(runner.hookHandlerError.message, '3'); + done(); + }), + MIN_COMMAND_EXECUTION_DURATION_MS + ); }); }); describe('when --language=nodejs option is given', () => { beforeEach(() => { runner.hooks.configuration = { - language: 'nodejs', + language: 'nodejs' }; }); - it('should write a hint that native hooks should be used', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'native Node.js hooks instead'); - done(); - })); + it('should write a hint that native hooks should be used', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include(err.message, 'native Node.js hooks instead'); + done(); + })); }); describe('when --language=ruby option is given and the worker is installed', () => { @@ -185,11 +230,13 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'ruby', - hookfiles: 'somefile.rb', + hookfiles: 'somefile.rb' }; sinon.stub(whichStub, 'which').callsFake(() => true); - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); }); afterEach(() => { @@ -201,26 +248,34 @@ describe('Hooks worker client', () => { HooksWorkerClient.prototype.terminateHandler.restore(); }); - it('should spawn the server process with command "dredd-hooks-ruby"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "dredd-hooks-ruby"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], 'dredd-hooks-ruby'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + 'dredd-hooks-ruby' + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'somefile.rb'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'somefile.rb' + ); + done(); + }); + })); }); describe('when --language=ruby option is given and the worker is not installed', () => { @@ -229,18 +284,18 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'ruby', - hookfiles: 'somefile.rb', + hookfiles: 'somefile.rb' }; }); afterEach(() => whichStub.which.restore()); - - it('should write a hint how to install', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'gem install dredd_hooks'); - done(); - })); + it('should write a hint how to install', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include(err.message, 'gem install dredd_hooks'); + done(); + })); }); describe('when --language=python option is given and the worker is installed', () => { @@ -254,11 +309,13 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'python', - hookfiles: 'somefile.py', + hookfiles: 'somefile.py' }; sinon.stub(whichStub, 'which').callsFake(() => true); - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); }); afterEach(() => { @@ -270,26 +327,34 @@ describe('Hooks worker client', () => { HooksWorkerClient.prototype.terminateHandler.restore(); }); - it('should spawn the server process with command "dredd-hooks-python"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "dredd-hooks-python"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], 'dredd-hooks-python'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + 'dredd-hooks-python' + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'somefile.py'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'somefile.py' + ); + done(); + }); + })); }); describe('when --language=python option is given and the worker is not installed', () => { @@ -298,17 +363,18 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'python', - hookfiles: 'somefile.py', + hookfiles: 'somefile.py' }; }); afterEach(() => whichStub.which.restore()); - it('should write a hint how to install', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'pip install dredd_hooks'); - done(); - })); + it('should write a hint how to install', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include(err.message, 'pip install dredd_hooks'); + done(); + })); }); describe('when --language=php option is given and the worker is installed', () => { @@ -322,11 +388,13 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'php', - hookfiles: 'somefile.py', + hookfiles: 'somefile.py' }; sinon.stub(whichStub, 'which').callsFake(() => true); - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); }); afterEach(() => { @@ -338,26 +406,34 @@ describe('Hooks worker client', () => { HooksWorkerClient.prototype.terminateHandler.restore(); }); - it('should spawn the server process with command "dredd-hooks-php"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "dredd-hooks-php"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], 'dredd-hooks-php'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + 'dredd-hooks-php' + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'somefile.py'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'somefile.py' + ); + done(); + }); + })); }); describe('when --language=php option is given and the worker is not installed', () => { @@ -366,17 +442,21 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'php', - hookfiles: 'somefile.py', + hookfiles: 'somefile.py' }; }); afterEach(() => whichStub.which.restore()); - it('should write a hint how to install', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'composer require ddelnano/dredd-hooks-php --dev'); - done(); - })); + it('should write a hint how to install', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include( + err.message, + 'composer require ddelnano/dredd-hooks-php --dev' + ); + done(); + })); }); describe('when --language=go option is given and the worker is not installed', () => { @@ -392,7 +472,7 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'go', - hookfiles: 'gobinary', + hookfiles: 'gobinary' }; }); afterEach(() => { @@ -401,11 +481,15 @@ describe('Hooks worker client', () => { process.env.GOPATH = goPath; }); - it('should write a hint how to install', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'go get github.com/snikch/goodman/cmd/goodman'); - done(); - })); + it('should write a hint how to install', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include( + err.message, + 'go get github.com/snikch/goodman/cmd/goodman' + ); + done(); + })); }); describe('when --language=go option is given and the worker is installed', () => { @@ -427,12 +511,14 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'go', - hookfiles: 'gobinary', + hookfiles: 'gobinary' }; sinon.stub(whichStub, 'which').callsFake(() => true); - return sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + return sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); }); afterEach(() => { @@ -446,26 +532,34 @@ describe('Hooks worker client', () => { process.env.GOPATH = goPath; }); - it('should spawn the server process with command "$GOBIN/goodman"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "$GOBIN/goodman"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], path.join(dummyPath, 'goodman')); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + path.join(dummyPath, 'goodman') + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'gobinary'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'gobinary' + ); + done(); + }); + })); }); describe('when --language=rust option is given and the worker is not installed', () => { @@ -474,16 +568,17 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'rust', - hookfiles: 'rustbinary', + hookfiles: 'rustbinary' }; }); afterEach(() => whichStub.which.restore()); - it('should write a hint how to install', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'cargo install dredd-hooks'); - done(); - })); + it('should write a hint how to install', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include(err.message, 'cargo install dredd-hooks'); + done(); + })); }); describe('when --language=rust option is given and the worker is installed', () => { @@ -497,11 +592,13 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'rust', - hookfiles: 'rustbinary', + hookfiles: 'rustbinary' }; sinon.stub(whichStub, 'which').callsFake(() => true); - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); }); afterEach(() => { @@ -513,26 +610,34 @@ describe('Hooks worker client', () => { HooksWorkerClient.prototype.terminateHandler.restore(); }); - it('should spawn the server process with command "dredd-hooks-rust"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "dredd-hooks-rust"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], 'dredd-hooks-rust'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + 'dredd-hooks-rust' + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'rustbinary'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'rustbinary' + ); + done(); + }); + })); }); describe('when --language=perl option is given and the worker is installed', () => { @@ -546,11 +651,13 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'perl', - hookfiles: 'somefile.py', + hookfiles: 'somefile.py' }; sinon.stub(whichStub, 'which').callsFake(() => true); - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); }); afterEach(() => { @@ -562,26 +669,34 @@ describe('Hooks worker client', () => { HooksWorkerClient.prototype.terminateHandler.restore(); }); - it('should spawn the server process with command "dredd-hooks-perl"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "dredd-hooks-perl"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], 'dredd-hooks-perl'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + 'dredd-hooks-perl' + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'somefile.py'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'somefile.py' + ); + done(); + }); + })); }); describe('when --language=perl option is given and the worker is not installed', () => { @@ -590,17 +705,18 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: 'perl', - hookfiles: 'somefile.py', + hookfiles: 'somefile.py' }; }); afterEach(() => whichStub.which.restore()); - it('should write a hint how to install', done => loadWorkerClient((err) => { - assert.isOk(err); - assert.include(err.message, 'cpanm Dredd::Hooks'); - done(); - })); + it('should write a hint how to install', (done) => + loadWorkerClient((err) => { + assert.isOk(err); + assert.include(err.message, 'cpanm Dredd::Hooks'); + done(); + })); }); describe('when --language=./any/other-command is given', () => { @@ -614,10 +730,12 @@ describe('Hooks worker client', () => { runner.hooks.configuration = { language: './my-fancy-command', - hookfiles: 'someotherfile', + hookfiles: 'someotherfile' }; - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); sinon.stub(whichStub, 'which').callsFake(() => true); }); @@ -630,41 +748,52 @@ describe('Hooks worker client', () => { whichStub.which.restore(); }); - it('should spawn the server process with command "./my-fancy-command"', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should spawn the server process with command "./my-fancy-command"', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.isTrue(crossSpawnStub.spawn.called); - assert.equal(crossSpawnStub.spawn.getCall(0).args[0], './my-fancy-command'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.isTrue(crossSpawnStub.spawn.called); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[0], + './my-fancy-command' + ); + done(); + }); + })); - it('should pass --hookfiles option as an array of arguments', done => loadWorkerClient((err) => { - assert.isUndefined(err); + it('should pass --hookfiles option as an array of arguments', (done) => + loadWorkerClient((err) => { + assert.isUndefined(err); - hooksWorkerClient.stop((error) => { - assert.isUndefined(error); - assert.equal(crossSpawnStub.spawn.getCall(0).args[1][0], 'someotherfile'); - done(); - }); - })); + hooksWorkerClient.stop((error) => { + assert.isUndefined(error); + assert.equal( + crossSpawnStub.spawn.getCall(0).args[1][0], + 'someotherfile' + ); + done(); + }); + })); }); describe('after loading', () => { beforeEach((done) => { runner.hooks.configuration = { language: 'ruby', - hookfiles: 'somefile.rb', + hookfiles: 'somefile.rb' }; - sinon.stub(HooksWorkerClient.prototype, 'spawnHandler').callsFake(callback => callback()); + sinon + .stub(HooksWorkerClient.prototype, 'spawnHandler') + .callsFake((callback) => callback()); sinon.stub(whichStub, 'which').callsFake(() => true); - sinon.stub(HooksWorkerClient.prototype, 'terminateHandler').callsFake(callback => callback()); - + sinon + .stub(HooksWorkerClient.prototype, 'terminateHandler') + .callsFake((callback) => callback()); loadWorkerClient((err) => { assert.isUndefined(err); @@ -676,7 +805,6 @@ describe('Hooks worker client', () => { }); }); - afterEach(() => { runner.hooks.configuration = undefined; @@ -690,7 +818,7 @@ describe('Hooks worker client', () => { 'beforeEachValidation', 'afterEach', 'beforeAll', - 'afterAll', + 'afterAll' ]; Array.from(eventTypes).forEach((eventType) => { @@ -734,7 +862,6 @@ describe('Hooks worker client', () => { afterEach(() => server.close()); - it('should connect to the server', (done) => { runner.hooks.configuration.language = `${COFFEE_BIN} test/fixtures/scripts/exit-0.coffee`; @@ -754,7 +881,7 @@ describe('Hooks worker client', () => { 'beforeEachValidation', 'afterEach', 'beforeAll', - 'afterAll', + 'afterAll' ]; Array.from(eventTypes).forEach((eventType) => { @@ -781,7 +908,7 @@ describe('Hooks worker client', () => { }); } - afterEach(done => hooksWorkerClient.stop(done)); + afterEach((done) => hooksWorkerClient.stop(done)); it('should send JSON to the socket ending with delimiter character', (done) => { assert.include(receivedData, '\n'); @@ -789,23 +916,23 @@ describe('Hooks worker client', () => { done(); }); - describe('sent object', () => { let receivedObject; - beforeEach(() => { receivedObject = JSON.parse(receivedData.replace('\n', '')); }); + beforeEach(() => { + receivedObject = JSON.parse(receivedData.replace('\n', '')); + }); - const keys = [ - 'data', - 'event', - 'uuid', - ]; + const keys = ['data', 'event', 'uuid']; Array.from(keys).forEach((key) => { - it(`should contain key ${key}`, () => { assert.property(receivedObject, key); }); + it(`should contain key ${key}`, () => { + assert.property(receivedObject, key); + }); }); - it(`key event should have value ${eventType}`, () => assert.equal(receivedObject.event, eventType)); + it(`key event should have value ${eventType}`, () => + assert.equal(receivedObject.event, eventType)); if (eventType.indexOf('All') > -1) { it('key data should contain array of transaction objects', () => { @@ -827,8 +954,11 @@ describe('Hooks worker client', () => { let transaction = { name: 'API > Hello > World', request: { - method: 'POST', uri: '/message', headers: {}, body: 'Hello World!', - }, + method: 'POST', + uri: '/message', + headers: {}, + body: 'Hello World!' + } }; return [ @@ -836,7 +966,7 @@ describe('Hooks worker client', () => { 'beforeEach', 'beforeEachValidation', 'afterEach', - 'afterAll', + 'afterAll' ].forEach((eventName) => { let getFirstTransaction; let transactionData; @@ -845,12 +975,12 @@ describe('Hooks worker client', () => { // as a parameter transactionData = clone([transaction]); // eslint-disable-next-line - getFirstTransaction = transactionData => transactionData[0]; + getFirstTransaction = (transactionData) => transactionData[0]; } else { // all the other hooks recieve a single transaction as a parameter transactionData = clone(transaction); // eslint-disable-next-line - getFirstTransaction = transactionData => transactionData; + getFirstTransaction = (transactionData) => transactionData; } describe(`when '${eventName}' function is triggered and the hooks handler replies`, () => { @@ -899,7 +1029,9 @@ describe('Hooks worker client', () => { // -- 2 --, runs hooks worker client, starts to send transaction(s), // thus triggers the 'connection' event above loadWorkerClient((err) => { - if (err) { return done(err); } + if (err) { + return done(err); + } runner.hooks[`${eventName}Hooks`][0](transactionData, () => {}); }); }); @@ -917,38 +1049,39 @@ describe('Hooks worker client', () => { }); describe("'hooks-worker-*' configuration options", () => { - const scenarios = [{ - property: 'timeout', - option: 'hooks-worker-timeout', - }, - { - property: 'connectTimeout', - option: 'hooks-worker-connect-timeout', - }, - { - property: 'connectRetry', - option: 'hooks-worker-connect-retry', - }, - { - property: 'afterConnectWait', - option: 'hooks-worker-after-connect-wait', - }, - { - property: 'termTimeout', - option: 'hooks-worker-term-timeout', - }, - { - property: 'termRetry', - option: 'hooks-worker-term-retry', - }, - { - property: 'handlerHost', - option: 'hooks-worker-handler-host', - }, - { - property: 'handlerPort', - option: 'hooks-worker-handler-port', - }, + const scenarios = [ + { + property: 'timeout', + option: 'hooks-worker-timeout' + }, + { + property: 'connectTimeout', + option: 'hooks-worker-connect-timeout' + }, + { + property: 'connectRetry', + option: 'hooks-worker-connect-retry' + }, + { + property: 'afterConnectWait', + option: 'hooks-worker-after-connect-wait' + }, + { + property: 'termTimeout', + option: 'hooks-worker-term-timeout' + }, + { + property: 'termRetry', + option: 'hooks-worker-term-retry' + }, + { + property: 'handlerHost', + option: 'hooks-worker-handler-host' + }, + { + property: 'handlerPort', + option: 'hooks-worker-handler-port' + } ]; Array.from(scenarios).forEach((scenario) => { diff --git a/test/unit/addHooks-test.js b/test/unit/addHooks-test.js index 56d4fa3fd..64c523813 100644 --- a/test/unit/addHooks-test.js +++ b/test/unit/addHooks-test.js @@ -1,100 +1,95 @@ -const path = require('path'); -const { assert } = require('chai'); +import path from 'path' +import { assert } from 'chai' -const Hooks = require('../../lib/Hooks'); -const addHooks = require('../../lib/addHooks'); - - -const WORKING_DIRECTORY = path.join(__dirname, '..', 'fixtures'); +import Hooks from '../../lib/Hooks' +import addHooks from '../../lib/addHooks' +const WORKING_DIRECTORY = path.join(__dirname, '..', 'fixtures') function createTransactionRunner() { return { configuration: { - custom: { cwd: WORKING_DIRECTORY }, - }, - }; + custom: { cwd: WORKING_DIRECTORY } + } + } } - describe('addHooks()', () => { it('sets transactionRunner.hooks', (done) => { - const transactionRunner = createTransactionRunner(); + const transactionRunner = createTransactionRunner() addHooks(transactionRunner, [], (err) => { - assert.instanceOf(transactionRunner.hooks, Hooks); - done(err); - }); - }); + assert.instanceOf(transactionRunner.hooks, Hooks) + done(err) + }) + }) it('sets transactionRunner.hooks.transactions', (done) => { - const transactionRunner = createTransactionRunner(); - const transaction1 = { name: 'My API > /resource/{id} > GET' }; - const transaction2 = { name: 'My API > /resources > POST' }; + const transactionRunner = createTransactionRunner() + const transaction1 = { name: 'My API > /resource/{id} > GET' } + const transaction2 = { name: 'My API > /resources > POST' } addHooks(transactionRunner, [transaction1, transaction2], (err) => { assert.deepEqual(transactionRunner.hooks.transactions, { 'My API > /resource/{id} > GET': transaction1, - 'My API > /resources > POST': transaction2, - }); - done(err); - }); - }); + 'My API > /resources > POST': transaction2 + }) + done(err) + }) + }) it('sets transactionRunner.configuation.hookfiles', (done) => { - const transactionRunner = createTransactionRunner(); + const transactionRunner = createTransactionRunner() transactionRunner.configuration.hookfiles = [ './hooks-glob/f*/*.js', - './hooks.js', - ]; + './hooks.js' + ] addHooks(transactionRunner, [], (err) => { assert.deepEqual(transactionRunner.configuration.hookfiles, [ path.join(WORKING_DIRECTORY, 'hooks-glob/foo/a.js'), path.join(WORKING_DIRECTORY, 'hooks.js'), path.join(WORKING_DIRECTORY, 'hooks-glob/foo/o.js'), - path.join(WORKING_DIRECTORY, 'hooks-glob/foo/y.js'), - ]); - done(err); - }); - }); + path.join(WORKING_DIRECTORY, 'hooks-glob/foo/y.js') + ]) + done(err) + }) + }) it('propagates errors when resolving hookfiles is not possible', (done) => { - const transactionRunner = createTransactionRunner(); + const transactionRunner = createTransactionRunner() transactionRunner.configuration.hookfiles = [ - './__non-existing-directory__/non-existing-file.js', - ]; + './__non-existing-directory__/non-existing-file.js' + ] addHooks(transactionRunner, [], (err) => { - assert.instanceOf(err, Error); - assert.match(err.message, /non-existing-file\.js/); - done(); - }); - }); + assert.instanceOf(err, Error) + assert.match(err.message, /non-existing-file\.js/) + done() + }) + }) it('sets transactionRunner.hooks.configuation', (done) => { - const transactionRunner = createTransactionRunner(); - transactionRunner.configuration.hookfiles = [ - './hooks.js', - ]; + const transactionRunner = createTransactionRunner() + transactionRunner.configuration.hookfiles = ['./hooks.js'] addHooks(transactionRunner, [], (err) => { assert.deepEqual( transactionRunner.hooks.configuration, transactionRunner.configuration - ); - done(err); - }); - }); + ) + done(err) + }) + }) it('skips hooks loading when there are no hookfiles', (done) => { - const transactionRunner = createTransactionRunner(); - transactionRunner.configuration.hookfiles = []; - transactionRunner.configuration.language = 'python'; + const transactionRunner = createTransactionRunner() + transactionRunner.configuration.hookfiles = [] + transactionRunner.configuration.language = 'python' addHooks(transactionRunner, [], (err) => { - assert.isUndefined(transactionRunner.hooks.configuration); - done(err); - }); - }); -}); + assert.isUndefined(transactionRunner.hooks.configuration) + done(err) + }) + }) +}) diff --git a/test/unit/annotationToLoggerInfo-test.js b/test/unit/annotationToLoggerInfo-test.js index 4ed21a85a..a69282a10 100644 --- a/test/unit/annotationToLoggerInfo-test.js +++ b/test/unit/annotationToLoggerInfo-test.js @@ -1,145 +1,154 @@ -const { assert } = require('chai'); - -const annotationToLoggerInfo = require('../../lib/annotationToLoggerInfo'); +import { assert } from 'chai' +import annotationToLoggerInfo from '../../lib/annotationToLoggerInfo' const PARSE_ANNOTATION_FIXTURE = { type: 'error', message: 'Ouch!', component: 'apiDescriptionParser', - location: [[1, 2], [3, 4]], -}; + location: [[1, 2], [3, 4]] +} const COMPILE_ANNOTATION_FIXTURE = { type: 'error', message: 'Ouch!', component: 'uriTemplateExpansion', - origin: { apiName: 'Broken API', resourceName: 'Things', actionName: 'Retrieve Things' }, -}; - + origin: { + apiName: 'Broken API', + resourceName: 'Things', + actionName: 'Retrieve Things' + } +} describe('annotationToLoggerInfo()', () => { describe('annotation.type', () => { it('chooses error logging level for error annotation type', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - type: 'error', - }); - assert.equal(loggerInfo.level, 'error'); - }); + type: 'error' + }) + assert.equal(loggerInfo.level, 'error') + }) it('chooses warn logging level for warning annotation type', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - type: 'warning', - }); - assert.equal(loggerInfo.level, 'warn'); - }); + type: 'warning' + }) + assert.equal(loggerInfo.level, 'warn') + }) it('throws for invalid annotation type', () => { - assert.throws(() => annotationToLoggerInfo('apiary.apib', { - ...PARSE_ANNOTATION_FIXTURE, - type: 'gargamel', - }), 'gargamel'); - }); + assert.throws( + () => + annotationToLoggerInfo('apiary.apib', { + ...PARSE_ANNOTATION_FIXTURE, + type: 'gargamel' + }), + 'gargamel' + ) + }) it('propagates the type to the message for parse annotation', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - type: 'warning', - }); - assert.match(loggerInfo.message, /^API description [\s\S]+ warning in/); - }); + type: 'warning' + }) + assert.match(loggerInfo.message, /^API description [\s\S]+ warning in/) + }) it('propagates the type to the message for compile annotation', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...COMPILE_ANNOTATION_FIXTURE, - type: 'warning', - }); - assert.match(loggerInfo.message, /^API description [\s\S]+ warning in/); - }); - }); + type: 'warning' + }) + assert.match(loggerInfo.message, /^API description [\s\S]+ warning in/) + }) + }) describe('annotation.component', () => { it('formats apiDescriptionParser', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - component: 'apiDescriptionParser', - }); - assert.match(loggerInfo.message, /^API description parser error/); - }); + component: 'apiDescriptionParser' + }) + assert.match(loggerInfo.message, /^API description parser error/) + }) it('formats parametersValidation', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...COMPILE_ANNOTATION_FIXTURE, - component: 'parametersValidation', - }); - assert.match(loggerInfo.message, /^API description URI parameters validation error/); - }); + component: 'parametersValidation' + }) + assert.match( + loggerInfo.message, + /^API description URI parameters validation error/ + ) + }) it('formats uriTemplateExpansion', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...COMPILE_ANNOTATION_FIXTURE, - component: 'uriTemplateExpansion', - }); - assert.match(loggerInfo.message, /^API description URI template expansion error/); - }); + component: 'uriTemplateExpansion' + }) + assert.match( + loggerInfo.message, + /^API description URI template expansion error/ + ) + }) it('formats unexpected component with a generic name', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...COMPILE_ANNOTATION_FIXTURE, - component: 'gargamel', - }); - assert.match(loggerInfo.message, /^API description error/); - }); - }); + component: 'gargamel' + }) + assert.match(loggerInfo.message, /^API description error/) + }) + }) describe('annotation.origin', () => { it('uses transaction name as a location hint for compile annotations', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...COMPILE_ANNOTATION_FIXTURE, - component: 'parametersValidation', - }); + component: 'parametersValidation' + }) assert.include( loggerInfo.message, 'error in apiary.apib (Broken API > Things > Retrieve Things): Ouch!' - ); - }); - }); + ) + }) + }) describe('annotation.location', () => { it('formats location for parse annotations', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - location: [[1, 2], [3, 4]], - }); + location: [[1, 2], [3, 4]] + }) assert.include( loggerInfo.message, 'error in apiary.apib:1 (from line 1 column 2 to line 3 column 4): Ouch!' - ); - }); + ) + }) it('formats location without end line if it is the same as the start line', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - location: [[1, 2], [1, 4]], - }); + location: [[1, 2], [1, 4]] + }) assert.include( loggerInfo.message, 'error in apiary.apib:1 (from line 1 column 2 to column 4): Ouch!' - ); - }); + ) + }) it('formats location without range if the start and the end are the same', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - location: [[1, 2], [1, 2]], - }); + location: [[1, 2], [1, 2]] + }) assert.include( loggerInfo.message, 'error in apiary.apib:1 (line 1 column 2): Ouch!' - ); - }); + ) + }) it('formats missing location', () => { const loggerInfo = annotationToLoggerInfo('apiary.apib', { ...PARSE_ANNOTATION_FIXTURE, - location: null, - }); - assert.include( - loggerInfo.message, - 'error in apiary.apib: Ouch!' - ); - }); - }); -}); + location: null + }) + assert.include(loggerInfo.message, 'error in apiary.apib: Ouch!') + }) + }) +}) diff --git a/test/unit/configUtils-test.js b/test/unit/configUtils-test.js index 96182bc70..1db6e2079 100644 --- a/test/unit/configUtils-test.js +++ b/test/unit/configUtils-test.js @@ -1,14 +1,14 @@ -const clone = require('clone'); -const fsStub = require('fs'); -const proxyquire = require('proxyquire'); -const sinon = require('sinon'); -const yamlStub = require('js-yaml'); -const { assert } = require('chai'); +import clone from 'clone' +import fsStub from 'fs' +import proxyquire from 'proxyquire' +import sinon from 'sinon' +import * as yamlStub from 'js-yaml' +import { assert } from 'chai' const configUtils = proxyquire('../../lib/configUtils', { fs: fsStub, - 'js-yaml': yamlStub, -}); + 'js-yaml': yamlStub +}) const argvData = { _: ['blueprint', 'endpoint'], @@ -53,91 +53,96 @@ const argvData = { l: 'warning', path: [], p: [], - $0: 'node ./bin/dredd', -}; + $0: 'node ./bin/dredd' +} describe('configUtils', () => { - let argv = null; - beforeEach(() => { argv = clone(argvData); }); + let argv = null + beforeEach(() => { + argv = clone(argvData) + }) - it('it should export an object', () => assert.isObject(configUtils)); + it('it should export an object', () => assert.isObject(configUtils)) describe('save(args, path)', () => { beforeEach(() => { - sinon.stub(fsStub, 'writeFileSync'); - sinon.spy(yamlStub, 'dump'); - }); + sinon.stub(fsStub, 'writeFileSync') + sinon.spy(yamlStub, 'dump') + }) afterEach(() => { - fsStub.writeFileSync.restore(); - yamlStub.dump.restore(); - }); + fsStub.writeFileSync.restore() + yamlStub.dump.restore() + }) - it('should be a defined function', () => assert.isFunction(configUtils.save)); + it('should be a defined function', () => + assert.isFunction(configUtils.save)) it('should add endpoint key', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.property(yamlStub.safeLoad(args[1]), 'endpoint'); - }); + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.property(yamlStub.safeLoad(args[1]), 'endpoint') + }) it('should add blueprint key', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.property(yamlStub.safeLoad(args[1]), 'blueprint'); - }); + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.property(yamlStub.safeLoad(args[1]), 'blueprint') + }) it('should remove aliases', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.notProperty(yamlStub.safeLoad(args[1]), 'p'); - assert.notProperty(yamlStub.safeLoad(args[1]), 'q'); - }); + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.notProperty(yamlStub.safeLoad(args[1]), 'p') + assert.notProperty(yamlStub.safeLoad(args[1]), 'q') + }) it('should remove _', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.notProperty(yamlStub.safeLoad(args[1]), '_'); - }); + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.notProperty(yamlStub.safeLoad(args[1]), '_') + }) it('should remove $0', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.notProperty(yamlStub.safeLoad(args[1]), '_'); - }); + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.notProperty(yamlStub.safeLoad(args[1]), '_') + }) it('should save an object', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.notProperty(yamlStub.safeLoad(args[1]), '_'); - }); + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.notProperty(yamlStub.safeLoad(args[1]), '_') + }) it('should call YAML.dump', () => { - configUtils.save(argv); - assert.isOk(yamlStub.dump.called); - }); - - describe('when path is not given', () => it('should save to ./dredd.yml', () => { - configUtils.save(argv); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.include(args[0], 'dredd.yml'); - })); - - describe('when path is given', () => it('should save to that path', () => { - const path = 'some-other-location.yml '; - configUtils.save(argv, path); - const call = fsStub.writeFileSync.getCall(0); - const { args } = call; - assert.include(args[0], path); - })); - }); + configUtils.save(argv) + assert.isOk(yamlStub.dump.called) + }) + + describe('when path is not given', () => + it('should save to ./dredd.yml', () => { + configUtils.save(argv) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.include(args[0], 'dredd.yml') + })) + + describe('when path is given', () => + it('should save to that path', () => { + const path = 'some-other-location.yml ' + configUtils.save(argv, path) + const call = fsStub.writeFileSync.getCall(0) + const { args } = call + assert.include(args[0], path) + })) + }) describe('load(path)', () => { const yamlData = `\ @@ -163,65 +168,70 @@ loglevel: info path: [] blueprint: blueprint endpoint: endpoint\ -`; - - beforeEach(() => sinon.stub(fsStub, 'readFileSync').callsFake(() => yamlData)); - - afterEach(() => fsStub.readFileSync.restore()); - - it('should be a defined function', () => assert.isFunction(configUtils.load)); - - describe('if no path is given', () => it('should load from ./dredd.yml', () => { - configUtils.load(); - const call = fsStub.readFileSync.getCall(0); - const { args } = call; - assert.include(args[0], 'dredd.yml'); - })); - - describe('when path is given', () => it('should load from that path', () => { - const path = 'some-other-location.yml '; - configUtils.load(path); - const call = fsStub.readFileSync.getCall(0); - const { args } = call; - assert.include(args[0], path); - })); +` + + beforeEach(() => + sinon.stub(fsStub, 'readFileSync').callsFake(() => yamlData) + ) + + afterEach(() => fsStub.readFileSync.restore()) + + it('should be a defined function', () => + assert.isFunction(configUtils.load)) + + describe('if no path is given', () => + it('should load from ./dredd.yml', () => { + configUtils.load() + const call = fsStub.readFileSync.getCall(0) + const { args } = call + assert.include(args[0], 'dredd.yml') + })) + + describe('when path is given', () => + it('should load from that path', () => { + const path = 'some-other-location.yml ' + configUtils.load(path) + const call = fsStub.readFileSync.getCall(0) + const { args } = call + assert.include(args[0], path) + })) it('should move blueprint and enpoint to an array under _ key', () => { - const output = configUtils.load(); - assert.isArray(output._); - assert.equal(output._[0], 'blueprint'); - assert.equal(output._[1], 'endpoint'); - }); + const output = configUtils.load() + assert.isArray(output._) + assert.equal(output._[0], 'blueprint') + assert.equal(output._[1], 'endpoint') + }) it('should remove blueprint and endpoint keys', () => { - const output = configUtils.load(); - assert.notProperty(output, 'blueprint'); - assert.notProperty(output, 'endpoint'); - }); + const output = configUtils.load() + assert.notProperty(output, 'blueprint') + assert.notProperty(output, 'endpoint') + }) it('should return an object', () => { - const output = configUtils.load(); - assert.isObject(output); - }); - }); + const output = configUtils.load() + assert.isObject(output) + }) + }) describe('parseCustom(arrayOfCustoms)', () => { const custom = [ 'customOpt:itsValue:can:contain:delimiters', - 'customOpt2:itsValue', - ]; + 'customOpt2:itsValue' + ] it('shold return an object', () => { - assert.isObject(configUtils.parseCustom(custom)); - }); + assert.isObject(configUtils.parseCustom(custom)) + }) it('should split values by first ":"', () => { - const output = configUtils.parseCustom(custom); - - assert.property(output, 'customOpt'); - assert.property(output, 'customOpt2'); - assert.equal(output.customOpt, 'itsValue:can:contain:delimiters'); - assert.equal(output.customOpt2, 'itsValue'); - }); - }); -}); + const output = configUtils.parseCustom(custom) + + assert.property(output, 'customOpt') + assert.property(output, 'customOpt2') + assert.equal(output.customOpt, 'itsValue:can:contain:delimiters') + assert.equal(output.customOpt2, 'itsValue') + }) + }) +}) diff --git a/test/unit/configuration-test.js b/test/unit/configuration-test.js index 3a69668fe..f12b0ca33 100644 --- a/test/unit/configuration-test.js +++ b/test/unit/configuration-test.js @@ -1,469 +1,472 @@ -const clone = require('clone'); -const { assert } = require('chai'); +import clone from 'clone' +import { assert } from 'chai' -const configuration = require('../../lib/configuration'); -const normalizeConfig = require('../../lib/configuration/normalizeConfig'); -const validateConfig = require('../../lib/configuration/validateConfig'); -const logger = require('../../lib/logger'); -const reporterOutputLogger = require('../../lib/reporters/reporterOutputLogger'); +import { applyLoggingOptions } from '../../lib/configuration' +import normalizeConfig from '../../lib/configuration/normalizeConfig' +import validateConfig from '../../lib/configuration/validateConfig' +import logger from '../../lib/logger' +import reporterOutputLogger from '../../lib/reporters/reporterOutputLogger' -const defaultLoggerConsole = clone(logger.transports.console); -const defaultReporterOutputLoggerConsole = clone(reporterOutputLogger.transports.console); +const defaultLoggerConsole = clone(logger.transports.console) +const defaultReporterOutputLoggerConsole = clone( + reporterOutputLogger.transports.console +) function resetLoggerConsoles() { - logger.transports.console = defaultLoggerConsole; - reporterOutputLogger.transports.console = defaultReporterOutputLoggerConsole; + logger.transports.console = defaultLoggerConsole + reporterOutputLogger.transports.console = defaultReporterOutputLoggerConsole } -describe('configuration.applyLoggingOptions()', () => { - beforeEach(resetLoggerConsoles); - afterEach(resetLoggerConsoles); +describe('applyLoggingOptions()', () => { + beforeEach(resetLoggerConsoles) + afterEach(resetLoggerConsoles) describe('with color not set', () => { beforeEach(() => { - configuration.applyLoggingOptions({}); - }); + applyLoggingOptions({}) + }) it('the application logger should be set to colorize', () => { - assert.isTrue(logger.transports.console.colorize); - }); + assert.isTrue(logger.transports.console.colorize) + }) it('the application output should be set to colorize', () => { - assert.isTrue(reporterOutputLogger.transports.console.colorize); - }); - }); + assert.isTrue(reporterOutputLogger.transports.console.colorize) + }) + }) describe('with color set to true', () => { beforeEach(() => { - configuration.applyLoggingOptions({ color: true }); - }); + applyLoggingOptions({ color: true }) + }) it('the application logger should be set to colorize', () => { - assert.isTrue(logger.transports.console.colorize); - }); + assert.isTrue(logger.transports.console.colorize) + }) it('the application output should be set to colorize', () => { - assert.isTrue(reporterOutputLogger.transports.console.colorize); - }); - }); + assert.isTrue(reporterOutputLogger.transports.console.colorize) + }) + }) describe('with color set to false', () => { beforeEach(() => { - configuration.applyLoggingOptions({ color: false }); - }); + applyLoggingOptions({ color: false }) + }) it('the application logger should be set not to colorize', () => { - assert.isFalse(logger.transports.console.colorize); - }); + assert.isFalse(logger.transports.console.colorize) + }) it('the application output should be set not to colorize', () => { - assert.isFalse(reporterOutputLogger.transports.console.colorize); - }); - }); + assert.isFalse(reporterOutputLogger.transports.console.colorize) + }) + }) describe('with loglevel not set', () => { beforeEach(() => { - configuration.applyLoggingOptions({}); - }); + applyLoggingOptions({}) + }) it('the application logger level is set to warn', () => { - assert.equal(logger.transports.console.level, 'warn'); - }); + assert.equal(logger.transports.console.level, 'warn') + }) it('the application output logger is not influenced', () => { - assert.isFalse(reporterOutputLogger.transports.console.silent); - assert.equal(reporterOutputLogger.transports.console.level, 'info'); - }); - }); + assert.isFalse(reporterOutputLogger.transports.console.silent) + assert.equal(reporterOutputLogger.transports.console.level, 'info') + }) + }) describe('with loglevel set to a valid value', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'error' }); - }); + applyLoggingOptions({ loglevel: 'error' }) + }) it('the application logger level is set', () => { - assert.equal(logger.transports.console.level, 'error'); - }); + assert.equal(logger.transports.console.level, 'error') + }) it('the application output logger is not influenced', () => { - assert.isFalse(reporterOutputLogger.transports.console.silent); - assert.equal(reporterOutputLogger.transports.console.level, 'info'); - }); - }); + assert.isFalse(reporterOutputLogger.transports.console.silent) + assert.equal(reporterOutputLogger.transports.console.level, 'info') + }) + }) describe('with loglevel set to a valid value using uppercase', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'ERROR' }); - }); + applyLoggingOptions({ loglevel: 'ERROR' }) + }) it('the value is understood', () => { - assert.equal(logger.transports.console.level, 'error'); - }); - }); + assert.equal(logger.transports.console.level, 'error') + }) + }) describe('with loglevel set to an invalid value', () => { it('throws an exception', () => { assert.throws(() => { - configuration.applyLoggingOptions({ loglevel: 'verbose' }); - }, /verbose.+unsupported/i); - }); - }); + applyLoggingOptions({ loglevel: 'verbose' }) + }, /verbose.+unsupported/i) + }) + }) describe('with loglevel set to silent', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'silent' }); - }); + applyLoggingOptions({ loglevel: 'silent' }) + }) it('the application logger gets silenced', () => { - assert.isTrue(logger.transports.console.silent); - }); + assert.isTrue(logger.transports.console.silent) + }) it('the application output logger is not influenced', () => { - assert.isFalse(reporterOutputLogger.transports.console.silent); - assert.equal(reporterOutputLogger.transports.console.level, 'info'); - }); - }); + assert.isFalse(reporterOutputLogger.transports.console.silent) + assert.equal(reporterOutputLogger.transports.console.level, 'info') + }) + }) describe('with loglevel set to warning', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'warning' }); - }); + applyLoggingOptions({ loglevel: 'warning' }) + }) it('the value is understood as warn', () => { - assert.equal(logger.transports.console.level, 'warn'); - }); - }); + assert.equal(logger.transports.console.level, 'warn') + }) + }) describe('with loglevel set to warn', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'warn' }); - }); + applyLoggingOptions({ loglevel: 'warn' }) + }) it('the application logger level is set to warn', () => { - assert.equal(logger.transports.console.level, 'warn'); - }); + assert.equal(logger.transports.console.level, 'warn') + }) it('the application logger is not set to add timestamps', () => { - assert.isFalse(logger.transports.console.timestamp); - }); - }); + assert.isFalse(logger.transports.console.timestamp) + }) + }) describe('with loglevel set to error', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'error' }); - }); + applyLoggingOptions({ loglevel: 'error' }) + }) it('the application logger level is set to error', () => { - assert.equal(logger.transports.console.level, 'error'); - }); + assert.equal(logger.transports.console.level, 'error') + }) it('the application logger is not set to add timestamps', () => { - assert.isFalse(logger.transports.console.timestamp); - }); - }); + assert.isFalse(logger.transports.console.timestamp) + }) + }) describe('with loglevel set to debug', () => { beforeEach(() => { - configuration.applyLoggingOptions({ loglevel: 'debug' }); - }); + applyLoggingOptions({ loglevel: 'debug' }) + }) it('the application logger level is set to debug', () => { - assert.equal(logger.transports.console.level, 'debug'); - }); + assert.equal(logger.transports.console.level, 'debug') + }) it('the application logger is set to add timestamps', () => { - assert.isTrue(logger.transports.console.timestamp); - }); - }); -}); - + assert.isTrue(logger.transports.console.timestamp) + }) + }) +}) describe('configuration', () => { describe("with -c set to string 'true'", () => { - const config = { c: 'true' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { c: 'true' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 'c'); - }); + assert.notProperty(normalizedConfig, 'c') + }) it('produces one warning', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe("with --color set to string 'true'", () => { - const config = { color: 'true' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { color: 'true' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to color set to boolean true', () => { - assert.propertyVal(normalizedConfig, 'color', true); - }); + assert.propertyVal(normalizedConfig, 'color', true) + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe("with --color set to string 'false'", () => { - const config = { color: 'false' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { color: 'false' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to color set to boolean false', () => { - assert.propertyVal(normalizedConfig, 'color', false); - }); + assert.propertyVal(normalizedConfig, 'color', false) + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with --color set to true', () => { - const config = { color: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { color: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to color set to boolean true', () => { - assert.propertyVal(normalizedConfig, 'color', true); - }); + assert.propertyVal(normalizedConfig, 'color', true) + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with --color set to false', () => { - const config = { color: false }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { color: false } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to color set to boolean false', () => { - assert.propertyVal(normalizedConfig, 'color', false); - }); + assert.propertyVal(normalizedConfig, 'color', false) + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with --level/-l set to a supported value', () => { - const config = { l: 'debug', level: 'debug' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { l: 'debug', level: 'debug' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to loglevel set to the value', () => { - assert.propertyVal(normalizedConfig, 'loglevel', 'debug'); - assert.notProperty(normalizedConfig, 'l'); - assert.notProperty(normalizedConfig, 'level'); - }); + assert.propertyVal(normalizedConfig, 'loglevel', 'debug') + assert.notProperty(normalizedConfig, 'l') + assert.notProperty(normalizedConfig, 'level') + }) it('produces one warnings', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with --level/-l set to a consolidated value', () => { - const config = { l: 'verbose', level: 'verbose' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { l: 'verbose', level: 'verbose' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to loglevel set to a corresponding value', () => { - assert.propertyVal(normalizedConfig, 'loglevel', 'debug'); - assert.notProperty(normalizedConfig, 'l'); - assert.notProperty(normalizedConfig, 'level'); - }); + assert.propertyVal(normalizedConfig, 'loglevel', 'debug') + assert.notProperty(normalizedConfig, 'l') + assert.notProperty(normalizedConfig, 'level') + }) it('produces one warning', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with --level/-l set to a removed value', () => { - const config = { l: 'complete', level: 'complete' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { l: 'complete', level: 'complete' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to loglevel set to the default value', () => { - assert.propertyVal(normalizedConfig, 'loglevel', 'warn'); - assert.notProperty(normalizedConfig, 'l'); - assert.notProperty(normalizedConfig, 'level'); - }); + assert.propertyVal(normalizedConfig, 'loglevel', 'warn') + assert.notProperty(normalizedConfig, 'l') + assert.notProperty(normalizedConfig, 'level') + }) it('produces one warning', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe("with -l set to 'silent'", () => { - const config = { l: 'silent' }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { l: 'silent' } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets coerced to loglevel set to silent', () => { - assert.propertyVal(normalizedConfig, 'loglevel', 'silent'); - assert.notProperty(normalizedConfig, 'l'); - assert.notProperty(normalizedConfig, 'level'); - }); + assert.propertyVal(normalizedConfig, 'loglevel', 'silent') + assert.notProperty(normalizedConfig, 'l') + assert.notProperty(normalizedConfig, 'level') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with --timestamp set', () => { - const config = { timestamp: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { timestamp: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 'timestamp'); - }); + assert.notProperty(normalizedConfig, 'timestamp') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces one error', () => { - assert.lengthOf(errors, 1); - }); - }); + assert.lengthOf(errors, 1) + }) + }) describe('with -t set', () => { - const config = { t: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { t: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 't'); - }); + assert.notProperty(normalizedConfig, 't') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces one error', () => { - assert.lengthOf(errors, 1); - }); - }); + assert.lengthOf(errors, 1) + }) + }) describe('with --silent set', () => { - const config = { silent: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { silent: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 'silent'); - }); + assert.notProperty(normalizedConfig, 'silent') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces one error', () => { - assert.lengthOf(errors, 1); - }); - }); + assert.lengthOf(errors, 1) + }) + }) describe('with -q set', () => { - const config = { q: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { q: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 'q'); - }); + assert.notProperty(normalizedConfig, 'q') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces one error', () => { - assert.lengthOf(errors, 1); - }); - }); + assert.lengthOf(errors, 1) + }) + }) describe('with --sandbox/-b set', () => { - const config = { sandbox: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { sandbox: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 'sandbox'); - }); + assert.notProperty(normalizedConfig, 'sandbox') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces one error', () => { - assert.lengthOf(errors, 1); - }); - }); + assert.lengthOf(errors, 1) + }) + }) describe('with -b set', () => { - const config = { b: true }; - const normalizedConfig = normalizeConfig(config); - const { warnings, errors } = validateConfig(config); + const config = { b: true } + const normalizedConfig = normalizeConfig(config) + const { warnings, errors } = validateConfig(config) it('gets removed', () => { - assert.notProperty(normalizedConfig, 'b'); - }); + assert.notProperty(normalizedConfig, 'b') + }) it('produces no warnings', () => { - assert.lengthOf(warnings, 0); - }); + assert.lengthOf(warnings, 0) + }) it('produces one error', () => { - assert.lengthOf(errors, 1); - }); - }); + assert.lengthOf(errors, 1) + }) + }) describe('with data set to { filename: apiDescription }', () => { - const config = { data: { 'filename.api': 'FORMAT: 1A\n# Sample API\n' } }; - const { warnings, errors } = validateConfig(config); + const config = { data: { 'filename.api': 'FORMAT: 1A\n# Sample API\n' } } + const { warnings, errors } = validateConfig(config) it('produces one warning', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with data set to { filename: { filename, raw: apiDescription } }', () => { const config = { data: { 'filename.api': { raw: 'FORMAT: 1A\n# Sample API\n', - filename: 'filename.api', - }, - }, - }; - const { warnings, errors } = validateConfig(config); + filename: 'filename.api' + } + } + } + const { warnings, errors } = validateConfig(config) it('produces one warning', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); + assert.lengthOf(errors, 0) + }) + }) describe('with both data and apiDescriptions set', () => { const config = { data: { 'filename.api': 'FORMAT: 1A\n# Sample API v1\n' }, - apiDescriptions: [{ - location: 'configuration.apiDescriptions[0]', - content: 'FORMAT: 1A\n# Sample API v2\n', - }], - }; - const { warnings, errors } = validateConfig(config); + apiDescriptions: [ + { + location: 'configuration.apiDescriptions[0]', + content: 'FORMAT: 1A\n# Sample API v2\n' + } + ] + } + const { warnings, errors } = validateConfig(config) it('produces one warning', () => { - assert.lengthOf(warnings, 1); - }); + assert.lengthOf(warnings, 1) + }) it('produces no errors', () => { - assert.lengthOf(errors, 0); - }); - }); -}); + assert.lengthOf(errors, 0) + }) + }) +}) diff --git a/test/unit/configuration/normalizeConfig-test.js b/test/unit/configuration/normalizeConfig-test.js index 8873ea837..2a8ef85c8 100644 --- a/test/unit/configuration/normalizeConfig-test.js +++ b/test/unit/configuration/normalizeConfig-test.js @@ -1,5 +1,5 @@ -const { assert } = require('chai'); -const { +import { assert } from 'chai' +import { coerceToArray, coerceToBoolean, removeUnsupportedOptions, @@ -7,8 +7,8 @@ const { coerceApiDescriptions, coerceDeprecatedDataOption, coerceDeprecatedLevelOption, - coerceUserOption, -} = require('../../../lib/configuration/normalizeConfig'); + coerceUserOption +} from '../../../lib/configuration/normalizeConfig' describe('normalizeConfig()', () => { describe('removeUnsupportedOptions()', () => { @@ -20,185 +20,233 @@ describe('normalizeConfig()', () => { timestamp: true, blueprintPath: './foo.apib', b: true, - sandbox: true, - }); - - ['q', 'silent', 't', 'timestamp', 'blueprintPath', 'b', 'sandbox'].forEach((optionName) => { + sandbox: true + }) + + ;[ + 'q', + 'silent', + 't', + 'timestamp', + 'blueprintPath', + 'b', + 'sandbox' + ].forEach((optionName) => { it(`removes "${optionName}"`, () => { - assert.notProperty(result, optionName); - }); - }); - }); - }); + assert.notProperty(result, optionName) + }) + }) + }) + }) describe('coercion', () => { describe('coerceToArray', () => { it('when given null', () => { - assert.deepEqual(coerceToArray(null), []); - }); + assert.deepEqual(coerceToArray(null), []) + }) it('when given a string', () => { - assert.deepEqual(coerceToArray('foo'), ['foo']); - }); + assert.deepEqual(coerceToArray('foo'), ['foo']) + }) it('when given an array', () => { - assert.deepEqual(coerceToArray(['foo', 'bar']), ['foo', 'bar']); - }); - }); + assert.deepEqual(coerceToArray(['foo', 'bar']), ['foo', 'bar']) + }) + }) describe('coerceToBoolean', () => { it('when given a boolean', () => { - assert.equal(coerceToBoolean(true), true); - assert.equal(coerceToBoolean(false), false); - }); + assert.equal(coerceToBoolean(true), true) + assert.equal(coerceToBoolean(false), false) + }) describe('when given a string', () => { it('that equals "true"', () => { - assert.equal(coerceToBoolean('true'), true); - }); + assert.equal(coerceToBoolean('true'), true) + }) it('that equals "false"', () => { - assert.equal(coerceToBoolean('false'), false); - }); + assert.equal(coerceToBoolean('false'), false) + }) it('that has a random value', () => { - assert.equal(coerceToBoolean('foo'), true); - }); + assert.equal(coerceToBoolean('foo'), true) + }) it('that is empty', () => { - assert.equal(coerceToBoolean(''), false); - }); - }); - }); + assert.equal(coerceToBoolean(''), false) + }) + }) + }) describe('c (color alias)', () => { - const result = coerceColorOption({ c: false }); + const result = coerceColorOption({ c: false }) it('coerces to boolean "color" option', () => { - assert.propertyVal(result, 'color', false); - }); + assert.propertyVal(result, 'color', false) + }) it('removes "c" option', () => { - assert.notProperty(result, 'c'); - }); - }); + assert.notProperty(result, 'c') + }) + }) describe('apiDescriptions', () => { describe('when given a string', () => { - const result = coerceApiDescriptions('foo'); + const result = coerceApiDescriptions('foo') it('coerces into list of descriptions', () => { assert.deepEqual(result, [ { location: 'configuration.apiDescriptions[0]', - content: 'foo', - }, - ]); - }); - }); + content: 'foo' + } + ]) + }) + }) describe('when given a list', () => { - const result = coerceApiDescriptions(['foo', 'bar']); + const result = coerceApiDescriptions(['foo', 'bar']) it('coerces into list of descriptions', () => { assert.deepEqual(result, [ { location: 'configuration.apiDescriptions[0]', content: 'foo' }, - { location: 'configuration.apiDescriptions[1]', content: 'bar' }, - ]); - }); - }); - }); + { location: 'configuration.apiDescriptions[1]', content: 'bar' } + ]) + }) + }) + }) describe('user', () => { - const result = coerceUserOption({ user: 'apiary' }); + const result = coerceUserOption({ user: 'apiary' }) it('coerces to base64 encoded "header"', () => { - assert.deepEqual(result.header, ['Authorization: Basic YXBpYXJ5']); - }); + assert.deepEqual(result.header, ['Authorization: Basic YXBpYXJ5']) + }) it('removes "user" option', () => { - assert.notProperty(result, 'user'); - }); - }); - }); + assert.notProperty(result, 'user') + }) + }) + }) describe('coercion of deprecated options', () => { describe('level', () => { describe('coerces to "debug"', () => { it('when given "silly"', () => { - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'silly' }), 'loglevel', 'debug'); - }); + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'silly' }), + 'loglevel', + 'debug' + ) + }) it('when given "verbose"', () => { - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'verbose' }), 'loglevel', 'debug'); - }); + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'verbose' }), + 'loglevel', + 'debug' + ) + }) it('when given "debug"', () => { - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'debug' }), 'loglevel', 'debug'); - }); - }); + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'debug' }), + 'loglevel', + 'debug' + ) + }) + }) describe('coerces to "error"', () => { it('when given "error"', () => { - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'error' }), 'loglevel', 'error'); - }); - }); + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'error' }), + 'loglevel', + 'error' + ) + }) + }) describe('coerces to "silent"', () => { it('when given "silent"', () => { - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'silent' }), 'loglevel', 'silent'); - }); - }); + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'silent' }), + 'loglevel', + 'silent' + ) + }) + }) describe('coerces to "warn"', () => { it('when given falsy value', () => { - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'warn' }), 'loglevel', 'warn'); - assert.propertyVal(coerceDeprecatedLevelOption({ l: 'foobar' }), 'loglevel', 'warn'); - assert.propertyVal(coerceDeprecatedLevelOption({ l: false }), 'loglevel', 'warn'); - assert.propertyVal(coerceDeprecatedLevelOption({ l: undefined }), 'loglevel', 'warn'); - assert.propertyVal(coerceDeprecatedLevelOption({ l: null }), 'loglevel', 'warn'); - }); - }); - }); + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'warn' }), + 'loglevel', + 'warn' + ) + assert.propertyVal( + coerceDeprecatedLevelOption({ l: 'foobar' }), + 'loglevel', + 'warn' + ) + assert.propertyVal( + coerceDeprecatedLevelOption({ l: false }), + 'loglevel', + 'warn' + ) + assert.propertyVal( + coerceDeprecatedLevelOption({ l: undefined }), + 'loglevel', + 'warn' + ) + assert.propertyVal( + coerceDeprecatedLevelOption({ l: null }), + 'loglevel', + 'warn' + ) + }) + }) + }) describe('data', () => { describe('coerces to "apiDescriptions"', () => { it('when given { filename: apiDescription } format', () => { const result = coerceDeprecatedDataOption({ data: { - 'filename.api': 'FORMAT: 1A\n# Sample API\n', - }, - }); + 'filename.api': 'FORMAT: 1A\n# Sample API\n' + } + }) assert.deepEqual(result, { apiDescriptions: [ { location: 'filename.api', - content: 'FORMAT: 1A\n# Sample API\n', - }, - ], - }); - }); + content: 'FORMAT: 1A\n# Sample API\n' + } + ] + }) + }) it('when given { filename, raw: apiDescription } format', () => { const result = coerceDeprecatedDataOption({ data: { 'filename.api': { raw: 'FORMAT: 1A\n# Sample API\n', - filename: 'filename.api', - }, - }, - }); + filename: 'filename.api' + } + } + }) assert.deepEqual(result, { apiDescriptions: [ { location: 'filename.api', - content: 'FORMAT: 1A\n# Sample API\n', - }, - ], - }); - }); + content: 'FORMAT: 1A\n# Sample API\n' + } + ] + }) + }) it('with both "data" and "apiDescriptions"', () => { const result = coerceDeprecatedDataOption({ @@ -206,25 +254,25 @@ describe('normalizeConfig()', () => { apiDescriptions: [ { location: 'configuration.apiDescriptions[0]', - content: 'FORMAT: 1A\n# Sample API v1\n', - }, - ], - }); + content: 'FORMAT: 1A\n# Sample API v1\n' + } + ] + }) assert.deepEqual(result, { apiDescriptions: [ { location: 'configuration.apiDescriptions[0]', - content: 'FORMAT: 1A\n# Sample API v1\n', + content: 'FORMAT: 1A\n# Sample API v1\n' }, { location: 'filename.api', - content: 'FORMAT: 1A\n# Sample API v2\n', - }, - ], - }); - }); - }); - }); - }); -}); + content: 'FORMAT: 1A\n# Sample API v2\n' + } + ] + }) + }) + }) + }) + }) +}) diff --git a/test/unit/configureReporters-test.js b/test/unit/configureReporters-test.js index 124993c7b..03b7d5dd9 100644 --- a/test/unit/configureReporters-test.js +++ b/test/unit/configureReporters-test.js @@ -3,23 +3,33 @@ */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. -const { EventEmitter } = require('events'); - -const { assert } = require('chai'); -const sinon = require('sinon'); -const proxyquire = require('proxyquire').noCallThru(); - -const loggerStub = require('../../lib/logger'); -const BaseReporterStub = sinon.spy(require('../../lib/reporters/BaseReporter')); -const XUnitReporterStub = sinon.spy(require('../../lib/reporters/XUnitReporter')); -const CliReporterStub = sinon.spy(require('../../lib/reporters/CLIReporter')); -const DotReporterStub = sinon.spy(require('../../lib/reporters/DotReporter')); -const NyanCatReporterStub = sinon.spy(require('../../lib/reporters/NyanReporter')); -const HtmlReporterStub = sinon.spy(require('../../lib/reporters/HTMLReporter')); -const MarkdownReporterStub = sinon.spy(require('../../lib/reporters/MarkdownReporter')); -const ApiaryReporterStub = sinon.spy(require('../../lib/reporters/ApiaryReporter')); - -const emitterStub = new EventEmitter(); +import { EventEmitter } from 'events' + +import { assert } from 'chai' +import sinon from 'sinon' +import { noCallThru } from 'proxyquire' + +import loggerStub from '../../lib/logger' +import BaseReporter from '../../lib/reporters/BaseReporter' +import XUnitReporter from '../../lib/reporters/XUnitReporter' +import CLIReporter from '../../lib/reporters/CLIReporter' +import DotReporter from '../../lib/reporters/DotReporter' +import NyanReporter from '../../lib/reporters/NyanReporter' +import HTMLReporter from '../../lib/reporters/HTMLReporter' +import MarkdownReporter from '../../lib/reporters/MarkdownReporter' +import ApiaryReporter from '../../lib/reporters/ApiaryReporter' + +const proxyquire = noCallThru() +const BaseReporterStub = sinon.spy(BaseReporter) +const XUnitReporterStub = sinon.spy(XUnitReporter) +const CliReporterStub = sinon.spy(CLIReporter) +const DotReporterStub = sinon.spy(DotReporter) +const NyanCatReporterStub = sinon.spy(NyanReporter) +const HtmlReporterStub = sinon.spy(HTMLReporter) +const MarkdownReporterStub = sinon.spy(MarkdownReporter) +const ApiaryReporterStub = sinon.spy(ApiaryReporter) + +const emitterStub = new EventEmitter() const configureReporters = proxyquire('../../lib/configureReporters', { './logger': loggerStub, @@ -30,171 +40,217 @@ const configureReporters = proxyquire('../../lib/configureReporters', { './reporters/NyanReporter': NyanCatReporterStub, './reporters/HTMLReporter': HtmlReporterStub, './reporters/MarkdownReporter': MarkdownReporterStub, - './reporters/ApiaryReporter': ApiaryReporterStub, -}); + './reporters/ApiaryReporter': ApiaryReporterStub +}).default function resetStubs() { - emitterStub.removeAllListeners(); - BaseReporterStub.resetHistory(); - CliReporterStub.resetHistory(); - XUnitReporterStub.resetHistory(); - DotReporterStub.resetHistory(); - NyanCatReporterStub.resetHistory(); - HtmlReporterStub.resetHistory(); - MarkdownReporterStub.resetHistory(); - return ApiaryReporterStub.resetHistory(); + emitterStub.removeAllListeners() + BaseReporterStub.resetHistory() + CliReporterStub.resetHistory() + XUnitReporterStub.resetHistory() + DotReporterStub.resetHistory() + NyanCatReporterStub.resetHistory() + HtmlReporterStub.resetHistory() + MarkdownReporterStub.resetHistory() + return ApiaryReporterStub.resetHistory() } - describe('configureReporters()', () => { const configuration = { emitter: emitterStub, reporter: [], output: [], - 'inline-errors': false, - }; + 'inline-errors': false + } - before(() => loggerStub.transports.console.silent = true); + before(() => (loggerStub.transports.console.silent = true)) - after(() => loggerStub.transports.console.silent = false); + after(() => (loggerStub.transports.console.silent = false)) describe('when there are no reporters', () => { - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should only add a CLIReporter', (done) => { - configureReporters(configuration, {}, null); - assert.isOk(CliReporterStub.called); - return done(); - }); + configureReporters(configuration, {}, null) + assert.isOk(CliReporterStub.called) + return done() + }) describe('when silent', () => { - before(() => configuration.loglevel = 'silent'); + before(() => (configuration.loglevel = 'silent')) - after(() => configuration.loglevel = 'silent'); + after(() => (configuration.loglevel = 'silent')) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should still add reporters', (done) => { - configureReporters(configuration, {}, null); - assert.ok(CliReporterStub.called); - return done(); - }); - }); - }); + configureReporters(configuration, {}, null) + assert.ok(CliReporterStub.called) + return done() + }) + }) + }) describe('when there are only cli-based reporters', () => { - before(() => configuration.reporter = ['dot', 'nyan']); + before(() => (configuration.reporter = ['dot', 'nyan'])) - after(() => configuration.reporter = []); + after(() => (configuration.reporter = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should add a cli-based reporter', (done) => { - configureReporters(configuration, {}, null); - assert.isOk(DotReporterStub.called); - return done(); - }); + configureReporters(configuration, {}, null) + assert.isOk(DotReporterStub.called) + return done() + }) it('should not add more than one cli-based reporters', (done) => { - configureReporters(configuration, {}, null); - assert.notOk(CliReporterStub.called); - return done(); - }); - }); - + configureReporters(configuration, {}, null) + assert.notOk(CliReporterStub.called) + return done() + }) + }) describe('when there are only file-based reporters', () => { - before(() => configuration.reporter = ['xunit', 'markdown']); + before(() => (configuration.reporter = ['xunit', 'markdown'])) - after(() => configuration.reporter = []); + after(() => (configuration.reporter = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should add a CLIReporter', (done) => { - configureReporters(configuration, {}, () => {}); - assert.isOk(CliReporterStub.called); - return done(); - }); + configureReporters(configuration, {}, () => {}) + assert.isOk(CliReporterStub.called) + return done() + }) describe('when the number of outputs is greater than or equals the number of reporters', () => { - before(() => configuration.output = ['file1', 'file2', 'file3']); + before(() => (configuration.output = ['file1', 'file2', 'file3'])) - after(() => configuration.output = []); + after(() => (configuration.output = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should use the output paths in the order provided', (done) => { - configureReporters(configuration, {}, () => {}); - assert.isOk(XUnitReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, 'file1')); - assert.isOk(MarkdownReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, 'file2')); - return done(); - }); - }); + configureReporters(configuration, {}, () => {}) + assert.isOk( + XUnitReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + 'file1' + ) + ) + assert.isOk( + MarkdownReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + 'file2' + ) + ) + return done() + }) + }) describe('when the number of outputs is less than the number of reporters', () => { - before(() => configuration.output = ['file1']); + before(() => (configuration.output = ['file1'])) - after(() => configuration.output = []); + after(() => (configuration.output = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should use the default output paths for the additional reporters', (done) => { - configureReporters(configuration, {}, () => {}); - assert.isOk(XUnitReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, 'file1')); - assert.isOk(MarkdownReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, undefined)); - return done(); - }); - }); - }); + configureReporters(configuration, {}, () => {}) + assert.isOk( + XUnitReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + 'file1' + ) + ) + assert.isOk( + MarkdownReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + undefined + ) + ) + return done() + }) + }) + }) describe('when there are both cli-based and file-based reporters', () => { - before(() => configuration.reporter = ['nyan', 'markdown', 'html']); + before(() => (configuration.reporter = ['nyan', 'markdown', 'html'])) - after(() => configuration.reporter = []); + after(() => (configuration.reporter = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should add a cli-based reporter', (done) => { - configureReporters(configuration, {}, () => {}); - assert.isOk(NyanCatReporterStub.called); - return done(); - }); + configureReporters(configuration, {}, () => {}) + assert.isOk(NyanCatReporterStub.called) + return done() + }) it('should not add more than one cli-based reporters', (done) => { - configureReporters(configuration, {}, () => {}); - assert.notOk(CliReporterStub.called); - assert.notOk(DotReporterStub.called); - return done(); - }); + configureReporters(configuration, {}, () => {}) + assert.notOk(CliReporterStub.called) + assert.notOk(DotReporterStub.called) + return done() + }) describe('when the number of outputs is greather than or equals the number of file-based reporters', () => { - before(() => configuration.output = ['file1', 'file2']); + before(() => (configuration.output = ['file1', 'file2'])) - after(() => configuration.output = []); + after(() => (configuration.output = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should use the output paths in the order provided', (done) => { - configureReporters(configuration, {}, () => {}); - assert.isOk(MarkdownReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, 'file1')); - assert.isOk(HtmlReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, 'file2')); - return done(); - }); - }); + configureReporters(configuration, {}, () => {}) + assert.isOk( + MarkdownReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + 'file1' + ) + ) + assert.isOk( + HtmlReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + 'file2' + ) + ) + return done() + }) + }) describe('when the number of outputs is less than the number of file-based reporters', () => { - before(() => configuration.output = ['file1']); + before(() => (configuration.output = ['file1'])) - after(() => configuration.output = []); + after(() => (configuration.output = [])) - beforeEach(() => resetStubs()); + beforeEach(() => resetStubs()) it('should use the default output paths for the additional reporters', (done) => { - configureReporters(configuration, {}, () => {}); - assert.isOk(MarkdownReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, 'file1')); - assert.isOk(HtmlReporterStub.calledWith(emitterStub, { fileBasedReporters: 2 }, undefined)); - return done(); - }); - }); - }); -}); + configureReporters(configuration, {}, () => {}) + assert.isOk( + MarkdownReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + 'file1' + ) + ) + assert.isOk( + HtmlReporterStub.calledWith( + emitterStub, + { fileBasedReporters: 2 }, + undefined + ) + ) + return done() + }) + }) + }) +}) diff --git a/test/unit/getGoBinary-test.js b/test/unit/getGoBinary-test.js index f334468f1..86eb9025e 100644 --- a/test/unit/getGoBinary-test.js +++ b/test/unit/getGoBinary-test.js @@ -1,97 +1,119 @@ -const childProcess = require('child_process'); -const path = require('path'); -const sinon = require('sinon'); -const { assert } = require('chai'); +import childProcess from 'child_process' +import path from 'path' +import sinon from 'sinon' +import { assert } from 'chai' -const getGoBinary = require('../../lib/getGoBinary'); +import getGoBinary from '../../lib/getGoBinary' describe('getGoBinary()', () => { - let goBin; - let goPath; + let goBin + let goPath beforeEach(() => { - goBin = process.env.GOBIN; - delete process.env.GOBIN; - goPath = process.env.GOPATH; - delete process.env.GOPATH; - }); + goBin = process.env.GOBIN + delete process.env.GOBIN + goPath = process.env.GOPATH + delete process.env.GOPATH + }) afterEach(() => { - process.env.GOBIN = goBin; - process.env.GOPATH = goPath; - }); + process.env.GOBIN = goBin + process.env.GOPATH = goPath + }) describe('when $GOBIN is set', () => { - let callbackArgs; + let callbackArgs beforeEach((done) => { - process.env.GOBIN = path.join('dummy', 'gobin', 'path'); + process.env.GOBIN = path.join('dummy', 'gobin', 'path') getGoBinary((...args) => { - callbackArgs = args; - done(); - }); - }); - - it('resolves as $GOBIN', () => assert.deepEqual(callbackArgs, [null, path.join('dummy', 'gobin', 'path')])); - }); + callbackArgs = args + done() + }) + }) + + it('resolves as $GOBIN', () => + assert.deepEqual(callbackArgs, [ + null, + path.join('dummy', 'gobin', 'path') + ])) + }) describe('when $GOPATH is set', () => { - let callbackArgs; + let callbackArgs beforeEach((done) => { - process.env.GOPATH = path.join('dummy', 'gopath', 'path'); + process.env.GOPATH = path.join('dummy', 'gopath', 'path') getGoBinary((...args) => { - callbackArgs = args; - done(); - }); - }); - - it('resolves as $GOPATH + /bin', () => assert.deepEqual(callbackArgs, [null, path.join('dummy', 'gopath', 'path', 'bin')])); - }); + callbackArgs = args + done() + }) + }) + + it('resolves as $GOPATH + /bin', () => + assert.deepEqual(callbackArgs, [ + null, + path.join('dummy', 'gopath', 'path', 'bin') + ])) + }) describe('when both $GOBIN and $GOPATH are set', () => { - let callbackArgs; + let callbackArgs beforeEach((done) => { - process.env.GOBIN = path.join('dummy', 'gobin', 'path'); - process.env.GOPATH = path.join('dummy', 'gopath', 'path'); + process.env.GOBIN = path.join('dummy', 'gobin', 'path') + process.env.GOPATH = path.join('dummy', 'gopath', 'path') getGoBinary((...args) => { - callbackArgs = args; - done(); - }); - }); - - it('resolves as $GOBIN', () => assert.deepEqual(callbackArgs, [null, path.join('dummy', 'gobin', 'path')])); - }); + callbackArgs = args + done() + }) + }) + + it('resolves as $GOBIN', () => + assert.deepEqual(callbackArgs, [ + null, + path.join('dummy', 'gobin', 'path') + ])) + }) describe('when neither $GOBIN nor $GOPATH are set', () => { - let callbackArgs; + let callbackArgs beforeEach((done) => { - sinon.stub(childProcess, 'exec').callsFake((command, callback) => callback(null, path.join('dummy', 'gopath', 'path'))); + sinon + .stub(childProcess, 'exec') + .callsFake((command, callback) => + callback(null, path.join('dummy', 'gopath', 'path')) + ) getGoBinary((...args) => { - callbackArgs = args; - done(); - }); - }); - after(() => childProcess.exec.restore()); - - it('calls \'go env GOPATH\' + /bin', () => assert.deepEqual(callbackArgs, [null, path.join('dummy', 'gopath', 'path', 'bin')])); - }); - - describe('when \'go env GOPATH\' fails', () => { - const error = new Error('Ouch!'); - let callbackArgs; + callbackArgs = args + done() + }) + }) + after(() => childProcess.exec.restore()) + + it("calls 'go env GOPATH' + /bin", () => + assert.deepEqual(callbackArgs, [ + null, + path.join('dummy', 'gopath', 'path', 'bin') + ])) + }) + + describe("when 'go env GOPATH' fails", () => { + const error = new Error('Ouch!') + let callbackArgs beforeEach((done) => { - sinon.stub(childProcess, 'exec').callsFake((command, callback) => callback(error)); + sinon + .stub(childProcess, 'exec') + .callsFake((command, callback) => callback(error)) getGoBinary((...args) => { - callbackArgs = args; - done(); - }); - }); - after(() => childProcess.exec.restore()); - - it('propagates the error', () => assert.deepEqual(callbackArgs, [error])); - }); -}); + callbackArgs = args + done() + }) + }) + after(() => childProcess.exec.restore()) + + it('propagates the error', () => assert.deepEqual(callbackArgs, [error])) + }) +}) diff --git a/test/unit/getProxySettings-test.js b/test/unit/getProxySettings-test.js index 9a506b3b6..40c96a900 100644 --- a/test/unit/getProxySettings-test.js +++ b/test/unit/getProxySettings-test.js @@ -1,59 +1,62 @@ -const { assert } = require('chai'); - -const getProxySettings = require('../../lib/getProxySettings'); +import { assert } from 'chai' +import getProxySettings from '../../lib/getProxySettings' describe('getProxySettings()', () => { it('detects HTTP_PROXY', () => { - assert.deepEqual(getProxySettings({ - SHELL: '/bin/bash', - USER: 'honza', - HTTP_PROXY: 'http://proxy.example.com:8080', - }), [ - 'HTTP_PROXY=http://proxy.example.com:8080', - ]); - }); + assert.deepEqual( + getProxySettings({ + SHELL: '/bin/bash', + USER: 'honza', + HTTP_PROXY: 'http://proxy.example.com:8080' + }), + ['HTTP_PROXY=http://proxy.example.com:8080'] + ) + }) it('detects HTTPS_PROXY', () => { - assert.deepEqual(getProxySettings({ - SHELL: '/bin/bash', - USER: 'honza', - HTTPS_PROXY: 'https://proxy.example.com:8080', - }), [ - 'HTTPS_PROXY=https://proxy.example.com:8080', - ]); - }); + assert.deepEqual( + getProxySettings({ + SHELL: '/bin/bash', + USER: 'honza', + HTTPS_PROXY: 'https://proxy.example.com:8080' + }), + ['HTTPS_PROXY=https://proxy.example.com:8080'] + ) + }) it('detects NO_PROXY', () => { - assert.deepEqual(getProxySettings({ - SHELL: '/bin/bash', - USER: 'honza', - NO_PROXY: '*', - }), [ - 'NO_PROXY=*', - ]); - }); + assert.deepEqual( + getProxySettings({ + SHELL: '/bin/bash', + USER: 'honza', + NO_PROXY: '*' + }), + ['NO_PROXY=*'] + ) + }) it('detects both lower and upper case', () => { - assert.deepEqual(getProxySettings({ - SHELL: '/bin/bash', - USER: 'honza', - http_proxy: 'http://proxy.example.com:8080', - NO_PROXY: '*', - }), [ - 'http_proxy=http://proxy.example.com:8080', - 'NO_PROXY=*', - ]); - }); + assert.deepEqual( + getProxySettings({ + SHELL: '/bin/bash', + USER: 'honza', + http_proxy: 'http://proxy.example.com:8080', + NO_PROXY: '*' + }), + ['http_proxy=http://proxy.example.com:8080', 'NO_PROXY=*'] + ) + }) it('skips environment variables set to empty strings', () => { - assert.deepEqual(getProxySettings({ - SHELL: '/bin/bash', - USER: 'honza', - http_proxy: 'http://proxy.example.com:8080', - NO_PROXY: '', - }), [ - 'http_proxy=http://proxy.example.com:8080', - ]); - }); -}); + assert.deepEqual( + getProxySettings({ + SHELL: '/bin/bash', + USER: 'honza', + http_proxy: 'http://proxy.example.com:8080', + NO_PROXY: '' + }), + ['http_proxy=http://proxy.example.com:8080'] + ) + }) +}) diff --git a/test/unit/hooksLog-test.js b/test/unit/hooksLog-test.js index 4d84fc933..854db06dc 100644 --- a/test/unit/hooksLog-test.js +++ b/test/unit/hooksLog-test.js @@ -1,79 +1,92 @@ -const clone = require('clone'); -const sinon = require('sinon'); -const util = require('util'); +import clone from 'clone' +import sinon from 'sinon' +import util from 'util' -const { assert } = require('chai'); +import { assert } from 'chai' -const hooksLog = require('../../lib/hooksLog'); -const reporterOutputLoggerStub = require('../../lib/reporters/reporterOutputLogger'); +import hooksLog from '../../lib/hooksLog' +import reporterOutputLoggerStub from '../../lib/reporters/reporterOutputLogger' describe('hooksLog()', () => { - const exampleLogs = [ - { content: 'some text' }, - ]; + const exampleLogs = [{ content: 'some text' }] before(() => { - sinon.stub(reporterOutputLoggerStub, 'hook').callsFake(() => { }); - }); + sinon.stub(reporterOutputLoggerStub, 'hook').callsFake(() => {}) + }) after(() => { - reporterOutputLoggerStub.hook.restore(); - }); + reporterOutputLoggerStub.hook.restore() + }) it('should print using util.format only when content is an object type', () => { - const data = hooksLog(clone(exampleLogs), reporterOutputLoggerStub, { hello: 'object world' }); - assert.equal(reporterOutputLoggerStub.hook.callCount, 1); - assert.deepEqual(reporterOutputLoggerStub.hook.getCall(0).args[0], { hello: 'object world' }); - assert.lengthOf(data, 2); - assert.isObject(data[1]); - assert.property(data[1], 'content'); - assert.property(data[1], 'timestamp'); - assert.isString(data[1].content); - assert.strictEqual(data[1].content, util.format({ hello: 'object world' })); - }); + const data = hooksLog(clone(exampleLogs), reporterOutputLoggerStub, { + hello: 'object world' + }) + assert.equal(reporterOutputLoggerStub.hook.callCount, 1) + assert.deepEqual(reporterOutputLoggerStub.hook.getCall(0).args[0], { + hello: 'object world' + }) + assert.lengthOf(data, 2) + assert.isObject(data[1]) + assert.property(data[1], 'content') + assert.property(data[1], 'timestamp') + assert.isString(data[1].content) + assert.strictEqual(data[1].content, util.format({ hello: 'object world' })) + }) describe('functionality', () => { beforeEach(() => { - reporterOutputLoggerStub.hook.resetHistory(); - }); + reporterOutputLoggerStub.hook.resetHistory() + }) it('should push message to the passed array and return the new array', () => { - const originLogs = []; - const data = hooksLog(originLogs, reporterOutputLoggerStub, 'one message'); - assert.isArray(data); - assert.lengthOf(data, 1); - assert.strictEqual(data, originLogs); - assert.deepEqual(data, originLogs); - assert.propertyVal(data[0], 'content', 'one message'); - }); + const originLogs = [] + const data = hooksLog(originLogs, reporterOutputLoggerStub, 'one message') + assert.isArray(data) + assert.lengthOf(data, 1) + assert.strictEqual(data, originLogs) + assert.deepEqual(data, originLogs) + assert.propertyVal(data[0], 'content', 'one message') + }) it('should push message to undefined logs and return new array instead', () => { - const originLogs = undefined; - const data = hooksLog(originLogs, reporterOutputLoggerStub, 'another message'); - assert.isArray(data); - assert.lengthOf(data, 1); - assert.isUndefined(originLogs); - assert.notDeepEqual(data, originLogs); - assert.propertyVal(data[0], 'content', 'another message'); - }); + const originLogs = undefined + const data = hooksLog( + originLogs, + reporterOutputLoggerStub, + 'another message' + ) + assert.isArray(data) + assert.lengthOf(data, 1) + assert.isUndefined(originLogs) + assert.notDeepEqual(data, originLogs) + assert.propertyVal(data[0], 'content', 'another message') + }) it('should append message to an existing logs array', () => { - const originLogs = clone(exampleLogs); - const data = hooksLog(originLogs, reporterOutputLoggerStub, 'some other idea'); - assert.isArray(data); - assert.lengthOf(data, 2); - assert.deepEqual(data, originLogs); - assert.deepEqual(data[0], exampleLogs[0]); - assert.propertyVal(data[1], 'content', 'some other idea'); - }); + const originLogs = clone(exampleLogs) + const data = hooksLog( + originLogs, + reporterOutputLoggerStub, + 'some other idea' + ) + assert.isArray(data) + assert.lengthOf(data, 2) + assert.deepEqual(data, originLogs) + assert.deepEqual(data[0], exampleLogs[0]) + assert.propertyVal(data[1], 'content', 'some other idea') + }) it('should use "hook" logger level', () => { - hooksLog([], reporterOutputLoggerStub, 'there is a log'); + hooksLog([], reporterOutputLoggerStub, 'there is a log') - assert.isTrue(reporterOutputLoggerStub.hook.called); - assert.equal(reporterOutputLoggerStub.hook.callCount, 1); + assert.isTrue(reporterOutputLoggerStub.hook.called) + assert.equal(reporterOutputLoggerStub.hook.callCount, 1) - assert.equal(reporterOutputLoggerStub.hook.getCall(0).args[0], 'there is a log'); - }); - }); -}); + assert.equal( + reporterOutputLoggerStub.hook.getCall(0).args[0], + 'there is a log' + ) + }) + }) +}) diff --git a/test/unit/init/applyAnswers-test.js b/test/unit/init/applyAnswers-test.js index 690f6c143..90a48c07b 100644 --- a/test/unit/init/applyAnswers-test.js +++ b/test/unit/init/applyAnswers-test.js @@ -1,95 +1,103 @@ -const { assert } = require('chai'); -const sinon = require('sinon'); - -const { _applyAnswers: applyAnswers } = require('../../../lib/init'); +import { assert } from 'chai' +import sinon from 'sinon' +import { applyAnswers } from '../../../lib/init' function createConfig() { - return { _: [], custom: {} }; + return { _: [], custom: {} } } - describe('init._applyAnswers()', () => { const ci = { appveyor: sinon.spy(), circleci: sinon.spy(), travisci: sinon.spy(), - wercker: sinon.spy(), - }; + wercker: sinon.spy() + } - beforeEach(() => Object.keys(ci).forEach(name => ci[name].resetHistory())); + beforeEach(() => Object.keys(ci).forEach((name) => ci[name].resetHistory())) it('applies the API description and the API host as positional CLI arguments', () => { const config = applyAnswers(createConfig(), { apiDescription: 'apiary.apib', - apiHost: 'http://127.0.0.1:5000', - }); - assert.deepEqual(config._, ['apiary.apib', 'http://127.0.0.1:5000']); - }); + apiHost: 'http://127.0.0.1:5000' + }) + assert.deepEqual(config._, ['apiary.apib', 'http://127.0.0.1:5000']) + }) it('sets the server', () => { - const config = applyAnswers(createConfig(), { server: 'npm start' }); - assert.equal(config.server, 'npm start'); - }); + const config = applyAnswers(createConfig(), { server: 'npm start' }) + assert.equal(config.server, 'npm start') + }) it('sets the server to null if not provided', () => { - const config = applyAnswers(createConfig(), {}); - assert.isNull(config.server); - }); + const config = applyAnswers(createConfig(), {}) + assert.isNull(config.server) + }) it('sets the language', () => { - const config = applyAnswers(createConfig(), { language: 'python' }); - assert.equal(config.language, 'python'); - }); + const config = applyAnswers(createConfig(), { language: 'python' }) + assert.equal(config.language, 'python') + }) it('uses default language (nodejs) when none is prompted', () => { - const config = applyAnswers(createConfig(), { language: undefined }); - assert.equal(config.language, 'nodejs'); - }); + const config = applyAnswers(createConfig(), { language: undefined }) + assert.equal(config.language, 'nodejs') + }) it('sets no reporter by default', () => { - const config = applyAnswers(createConfig(), {}); - assert.isUndefined(config.reporter); - }); - it('sets the reporter to \'apiary\' if asked', () => { - const config = applyAnswers(createConfig(), { apiary: true }); - assert.equal(config.reporter, 'apiary'); - }); + const config = applyAnswers(createConfig(), {}) + assert.isUndefined(config.reporter) + }) + it("sets the reporter to 'apiary' if asked", () => { + const config = applyAnswers(createConfig(), { apiary: true }) + assert.equal(config.reporter, 'apiary') + }) it('sets no custom data by default', () => { - const config = applyAnswers(createConfig(), {}); - assert.deepEqual(config.custom, {}); - }); + const config = applyAnswers(createConfig(), {}) + assert.deepEqual(config.custom, {}) + }) it('sets the Apiary API key if provided', () => { - const config = applyAnswers(createConfig(), { apiaryApiKey: '1234' }); - assert.equal(config.custom.apiaryApiKey, '1234'); - }); + const config = applyAnswers(createConfig(), { apiaryApiKey: '1234' }) + assert.equal(config.custom.apiaryApiKey, '1234') + }) it('sets the Apiary API name if provided', () => { - const config = applyAnswers(createConfig(), { apiaryApiName: 'myproject' }); - assert.equal(config.custom.apiaryApiName, 'myproject'); - }); + const config = applyAnswers(createConfig(), { apiaryApiName: 'myproject' }) + assert.equal(config.custom.apiaryApiName, 'myproject') + }) it('creates selected CI configuration if asked', () => { - applyAnswers(createConfig(), { createCI: 'wercker' }, { ci }); - assert.isTrue(ci.wercker.calledOnce); - assert.isFalse(ci.appveyor.called || ci.circleci.called || ci.travisci.called); - }); + applyAnswers(createConfig(), { createCI: 'wercker' }, { ci }) + assert.isTrue(ci.wercker.calledOnce) + assert.isFalse( + ci.appveyor.called || ci.circleci.called || ci.travisci.called + ) + }) it('updates AppVeyor if asked', () => { - applyAnswers(createConfig(), { appveyor: true }, { ci }); - assert.isTrue(ci.appveyor.calledOnce); - assert.isFalse(ci.circleci.called || ci.travisci.called || ci.wercker.called); - }); + applyAnswers(createConfig(), { appveyor: true }, { ci }) + assert.isTrue(ci.appveyor.calledOnce) + assert.isFalse( + ci.circleci.called || ci.travisci.called || ci.wercker.called + ) + }) it('updates CircleCI if asked', () => { - applyAnswers(createConfig(), { circleci: true }, { ci }); - assert.isTrue(ci.circleci.calledOnce); - assert.isFalse(ci.appveyor.called || ci.travisci.called || ci.wercker.called); - }); + applyAnswers(createConfig(), { circleci: true }, { ci }) + assert.isTrue(ci.circleci.calledOnce) + assert.isFalse( + ci.appveyor.called || ci.travisci.called || ci.wercker.called + ) + }) it('updates Travis CI if asked', () => { - applyAnswers(createConfig(), { travisci: true }, { ci }); - assert.isTrue(ci.travisci.calledOnce); - assert.isFalse(ci.appveyor.called || ci.circleci.called || ci.wercker.called); - }); + applyAnswers(createConfig(), { travisci: true }, { ci }) + assert.isTrue(ci.travisci.calledOnce) + assert.isFalse( + ci.appveyor.called || ci.circleci.called || ci.wercker.called + ) + }) it('updates Wercker if asked', () => { - applyAnswers(createConfig(), { wercker: true }, { ci }); - assert.isTrue(ci.wercker.calledOnce); - assert.isFalse(ci.appveyor.called || ci.circleci.called || ci.travisci.called); - }); + applyAnswers(createConfig(), { wercker: true }, { ci }) + assert.isTrue(ci.wercker.calledOnce) + assert.isFalse( + ci.appveyor.called || ci.circleci.called || ci.travisci.called + ) + }) it('updates multiple CIs if asked', () => { - applyAnswers(createConfig(), { wercker: true, circleci: true }, { ci }); - assert.isTrue(ci.circleci.calledOnce && ci.wercker.calledOnce); - assert.isFalse(ci.appveyor.called || ci.travisci.called); - }); -}); + applyAnswers(createConfig(), { wercker: true, circleci: true }, { ci }) + assert.isTrue(ci.circleci.calledOnce && ci.wercker.calledOnce) + assert.isFalse(ci.appveyor.called || ci.travisci.called) + }) +}) diff --git a/test/unit/init/detectApiDescription-test.js b/test/unit/init/detectApiDescription-test.js index 0afe95d0a..13f7c5acb 100644 --- a/test/unit/init/detectApiDescription-test.js +++ b/test/unit/init/detectApiDescription-test.js @@ -1,45 +1,50 @@ -const { assert } = require('chai'); - -const { _detectApiDescription: detectApiDescription } = require('../../../lib/init'); +import { assert } from 'chai' +import { detectApiDescription } from '../../../lib/init' describe('init._detectApiDescription()', () => { - it('defaults to API Blueprint on empty array', () => assert.equal(detectApiDescription([]), 'apiary.apib')); - - it('defaults to API Blueprint on arbitrary files', () => assert.equal(detectApiDescription(['foo', 'bar']), 'apiary.apib')); - - it('detects the first API Blueprint file', () => assert.equal( - detectApiDescription(['foo', 'boo.apib', 'bar', 'moo.apib']), - 'boo.apib' - )); - - it('detects the first .yml file containing \'swagger\' as OpenAPI 2', () => assert.equal( - detectApiDescription(['foo', 'this-is-swagger.yml', 'bar']), - 'this-is-swagger.yml' - )); - - it('detects the first .yaml file containing \'swagger\' as OpenAPI 2', () => assert.equal( - detectApiDescription(['foo', 'this-is-swagger.yaml', 'bar']), - 'this-is-swagger.yaml' - )); - - it('detects the first .yml file containing \'api\' as OpenAPI 2', () => assert.equal( - detectApiDescription(['foo', 'openapi.yml', 'bar']), - 'openapi.yml' - )); - - it('detects the first .yaml file containing \'api\' as OpenAPI 2', () => assert.equal( - detectApiDescription(['foo', 'openapi.yaml', 'bar']), - 'openapi.yaml' - )); - - it('prefers API Blueprint over OpenAPI 2', () => assert.equal( - detectApiDescription(['swagger.yml', 'boo.apib']), - 'boo.apib' - )); - - it('prefers \'swagger\' over \'api\'', () => assert.equal( - detectApiDescription(['api.yml', 'swagger.yml']), - 'swagger.yml' - )); -}); + it('defaults to API Blueprint on empty array', () => + assert.equal(detectApiDescription([]), 'apiary.apib')) + + it('defaults to API Blueprint on arbitrary files', () => + assert.equal(detectApiDescription(['foo', 'bar']), 'apiary.apib')) + + it('detects the first API Blueprint file', () => + assert.equal( + detectApiDescription(['foo', 'boo.apib', 'bar', 'moo.apib']), + 'boo.apib' + )) + + it("detects the first .yml file containing 'swagger' as OpenAPI 2", () => + assert.equal( + detectApiDescription(['foo', 'this-is-swagger.yml', 'bar']), + 'this-is-swagger.yml' + )) + + it("detects the first .yaml file containing 'swagger' as OpenAPI 2", () => + assert.equal( + detectApiDescription(['foo', 'this-is-swagger.yaml', 'bar']), + 'this-is-swagger.yaml' + )) + + it("detects the first .yml file containing 'api' as OpenAPI 2", () => + assert.equal( + detectApiDescription(['foo', 'openapi.yml', 'bar']), + 'openapi.yml' + )) + + it("detects the first .yaml file containing 'api' as OpenAPI 2", () => + assert.equal( + detectApiDescription(['foo', 'openapi.yaml', 'bar']), + 'openapi.yaml' + )) + + it('prefers API Blueprint over OpenAPI 2', () => + assert.equal(detectApiDescription(['swagger.yml', 'boo.apib']), 'boo.apib')) + + it("prefers 'swagger' over 'api'", () => + assert.equal( + detectApiDescription(['api.yml', 'swagger.yml']), + 'swagger.yml' + )) +}) diff --git a/test/unit/init/detectCI-test.js b/test/unit/init/detectCI-test.js index 93d3f3dff..13c9aff26 100644 --- a/test/unit/init/detectCI-test.js +++ b/test/unit/init/detectCI-test.js @@ -1,21 +1,25 @@ -const { assert } = require('chai'); - -const { _detectCI: detectCI } = require('../../../lib/init'); +import { assert } from 'chai' +import { detectCI } from '../../../lib/init' describe('init._detectCI()', () => { - it('detects no CI on empty array', () => assert.deepEqual(detectCI([]), [])); + it('detects no CI on empty array', () => assert.deepEqual(detectCI([]), [])) - it('detects AppVeyor', () => assert.deepEqual(detectCI(['README', 'appveyor.yml']), ['appveyor'])); + it('detects AppVeyor', () => + assert.deepEqual(detectCI(['README', 'appveyor.yml']), ['appveyor'])) - it('detects CircleCI', () => assert.deepEqual(detectCI(['README', '.circleci']), ['circleci'])); + it('detects CircleCI', () => + assert.deepEqual(detectCI(['README', '.circleci']), ['circleci'])) - it('detects Travis CI', () => assert.deepEqual(detectCI(['README', '.travis.yml']), ['travisci'])); + it('detects Travis CI', () => + assert.deepEqual(detectCI(['README', '.travis.yml']), ['travisci'])) - it('detects Wercker', () => assert.deepEqual(detectCI(['README', 'wercker.yml']), ['wercker'])); + it('detects Wercker', () => + assert.deepEqual(detectCI(['README', 'wercker.yml']), ['wercker'])) - it('detects multiple CIs', () => assert.deepEqual( - detectCI(['README', 'wercker.yml', '.circleci']), - ['wercker', 'circleci'] - )); -}); + it('detects multiple CIs', () => + assert.deepEqual(detectCI(['README', 'wercker.yml', '.circleci']), [ + 'wercker', + 'circleci' + ])) +}) diff --git a/test/unit/init/detectLanguage-test.js b/test/unit/init/detectLanguage-test.js index 2a411b90e..53c7ca90d 100644 --- a/test/unit/init/detectLanguage-test.js +++ b/test/unit/init/detectLanguage-test.js @@ -1,20 +1,22 @@ -const { assert } = require('chai'); - -const { _detectLanguage: detectLanguage } = require('../../../lib/init'); +import { assert } from 'chai' +import { detectLanguage } from '../../../lib/init' describe('init._detectLanguage()', () => { - it('defaults to JavaScript', () => assert.equal(detectLanguage([]), 'nodejs')); - - [ + it('defaults to JavaScript', () => assert.equal(detectLanguage([]), 'nodejs')) + ;[ { name: 'Rust', value: 'rust', file: 'Cargo.toml' }, { name: 'Go', value: 'go', file: 'foo.go' }, - { name: 'PHP', value: 'php', file: 'composer.json' }, + { name: 'PHP', value: 'php', file: 'composer.json' } ].forEach(({ name, value, file }) => { - it(`prioritizes ${name} over Python`, () => assert.equal(detectLanguage(['README', 'Pipfile', file]), value)); - it(`prioritizes ${name} over Ruby`, () => assert.equal(detectLanguage(['README', 'Gemfile', file]), value)); - }); + it(`prioritizes ${name} over Python`, () => + assert.equal(detectLanguage(['README', 'Pipfile', file]), value)) + it(`prioritizes ${name} over Ruby`, () => + assert.equal(detectLanguage(['README', 'Gemfile', file]), value)) + }) - it('detects Python', () => assert.equal(detectLanguage(['README', 'Pipfile']), 'python')); - it('detects Ruby', () => assert.equal(detectLanguage(['README', 'Gemfile']), 'ruby')); -}); + it('detects Python', () => + assert.equal(detectLanguage(['README', 'Pipfile']), 'python')) + it('detects Ruby', () => + assert.equal(detectLanguage(['README', 'Gemfile']), 'ruby')) +}) diff --git a/test/unit/init/detectServer-test.js b/test/unit/init/detectServer-test.js index fd18422ee..73d1dfc78 100644 --- a/test/unit/init/detectServer-test.js +++ b/test/unit/init/detectServer-test.js @@ -1,18 +1,23 @@ -const { assert } = require('chai'); - -const { _detectServer: detectServer } = require('../../../lib/init'); +import { assert } from 'chai' +import { detectServer } from '../../../lib/init' describe('init._detectServer()', () => { - it('defaults to \'npm start\' script', () => assert.equal(detectServer([]), 'npm start')); + it("defaults to 'npm start' script", () => { + assert.equal(detectServer([]), 'npm start') + }) - it('assumes Python project means Django application', () => assert.equal( - detectServer(['README', 'Pipfile']), - 'python manage.py runserver' - )); + it('assumes Python project means Django application', () => { + assert.equal( + detectServer(['README', 'Pipfile']), + 'python manage.py runserver' + ) + }) - it('assumes Ruby project means RoR application', () => assert.equal( - detectServer(['README', 'Gemfile']), - 'bundle exec rails server' - )); -}); + it('assumes Ruby project means RoR application', () => { + assert.equal( + detectServer(['README', 'Gemfile']), + 'bundle exec rails server' + ) + }) +}) diff --git a/test/unit/init/printClosingMessage-test.js b/test/unit/init/printClosingMessage-test.js index 2c36476b7..df9ef1d43 100644 --- a/test/unit/init/printClosingMessage-test.js +++ b/test/unit/init/printClosingMessage-test.js @@ -1,32 +1,30 @@ -const { assert } = require('chai'); - -const { - _printClosingMessage: printClosingMessage, -} = require('../../../lib/init'); +import { assert } from 'chai' +import { printClosingMessage } from '../../../lib/init' function print(s) { - print.output += `${s}\n`; + print.output += `${s}\n` } - describe('init._printClosingMessage()', () => { - beforeEach(() => { print.output = ''; }); + beforeEach(() => { + print.output = '' + }) it('mentions the config has been saved to dredd.yml', () => { - printClosingMessage({ language: 'nodejs' }, print); - assert.include(print.output, 'saved to dredd.yml'); - }); + printClosingMessage({ language: 'nodejs' }, print) + assert.include(print.output, 'saved to dredd.yml') + }) it('does not mention hooks when the language is JavaScript', () => { - printClosingMessage({ language: 'nodejs' }, print); - assert.notInclude(print.output, 'hooks'); - }); + printClosingMessage({ language: 'nodejs' }, print) + assert.notInclude(print.output, 'hooks') + }) it('does mention hooks when the language is not JavaScript', () => { - printClosingMessage({ language: 'python' }, print); - assert.include(print.output, 'hooks'); - }); + printClosingMessage({ language: 'python' }, print) + assert.include(print.output, 'hooks') + }) it('hints how to install non-JavaScript hooks', () => { - printClosingMessage({ language: 'python' }, print); - assert.include(print.output, 'pip install dredd_hooks'); - }); -}); + printClosingMessage({ language: 'python' }, print) + assert.include(print.output, 'pip install dredd_hooks') + }) +}) diff --git a/test/unit/init/updateAppVeyor-test.js b/test/unit/init/updateAppVeyor-test.js index 122649544..548c0f864 100644 --- a/test/unit/init/updateAppVeyor-test.js +++ b/test/unit/init/updateAppVeyor-test.js @@ -1,33 +1,31 @@ -const { assert } = require('chai'); - -const { _updateAppVeyor: updateAppVeyor } = require('../../../lib/init'); +import { assert } from 'chai' +import { updateAppVeyor } from '../../../lib/init' function createOptions(contents) { - return { editYaml: (file, update) => update(contents) }; + return { editYaml: (file, update) => update(contents) } } - describe('init._updateAppVeyor()', () => { it('is able to create a new config file', () => { - const contents = {}; - updateAppVeyor(createOptions(contents)); + const contents = {} + updateAppVeyor(createOptions(contents)) - assert.include(JSON.stringify(contents), 'dredd'); - }); + assert.include(JSON.stringify(contents), 'dredd') + }) it('adds commands to install Dredd', () => { - const contents = { install: ['pipenv install'] }; - updateAppVeyor(createOptions(contents)); + const contents = { install: ['pipenv install'] } + updateAppVeyor(createOptions(contents)) - assert.equal(contents.install.length, 4); - assert.match(contents.install[3], /npm.+i.+dredd/); - }); + assert.equal(contents.install.length, 4) + assert.match(contents.install[3], /npm.+i.+dredd/) + }) it('adds a command to run Dredd', () => { - const contents = { test_script: ['pytest'] }; - updateAppVeyor(createOptions(contents)); + const contents = { test_script: ['pytest'] } + updateAppVeyor(createOptions(contents)) - assert.deepEqual(contents.test_script, ['pytest', 'dredd']); - }); -}); + assert.deepEqual(contents.test_script, ['pytest', 'dredd']) + }) +}) diff --git a/test/unit/init/updateCircleCI-test.js b/test/unit/init/updateCircleCI-test.js index b2d2865bf..1f66ca54e 100644 --- a/test/unit/init/updateCircleCI-test.js +++ b/test/unit/init/updateCircleCI-test.js @@ -1,28 +1,26 @@ -const { assert } = require('chai'); - -const { _updateCircleCI: updateCircleCI } = require('../../../lib/init'); +import { assert } from 'chai' +import { updateCircleCI } from '../../../lib/init' function createOptions(contents) { - return { editYaml: (file, update) => update(contents) }; + return { editYaml: (file, update) => update(contents) } } - describe('init._updateCircleCI()', () => { it('is able to create a new config file', () => { - const contents = {}; - updateCircleCI(createOptions(contents)); - - assert.include(JSON.stringify(contents), 'dredd'); - }); - - it('sets a \'dredd\' job', () => { - const contents = {}; - updateCircleCI(createOptions(contents)); - - assert.match(contents.jobs.dredd.docker[0].image, /\/node:/); - assert.equal(contents.jobs.dredd.steps[0], 'checkout'); - assert.match(contents.jobs.dredd.steps[1].run, /npm.+i.+dredd/); - assert.equal(contents.jobs.dredd.steps[2].run, 'dredd'); - }); -}); + const contents = {} + updateCircleCI(createOptions(contents)) + + assert.include(JSON.stringify(contents), 'dredd') + }) + + it("sets a 'dredd' job", () => { + const contents = {} + updateCircleCI(createOptions(contents)) + + assert.match(contents.jobs.dredd.docker[0].image, /\/node:/) + assert.equal(contents.jobs.dredd.steps[0], 'checkout') + assert.match(contents.jobs.dredd.steps[1].run, /npm.+i.+dredd/) + assert.equal(contents.jobs.dredd.steps[2].run, 'dredd') + }) +}) diff --git a/test/unit/init/updateTravisCI-test.js b/test/unit/init/updateTravisCI-test.js index dcfa19089..397c49f1b 100644 --- a/test/unit/init/updateTravisCI-test.js +++ b/test/unit/init/updateTravisCI-test.js @@ -1,34 +1,32 @@ -const { assert } = require('chai'); - -const { _updateTravisCI: updateTravisCI } = require('../../../lib/init'); +import { assert } from 'chai' +import { updateTravisCI } from '../../../lib/init' function createOptions(contents) { - return { editYaml: (file, update) => update(contents) }; + return { editYaml: (file, update) => update(contents) } } - describe('init._updateTravisCI()', () => { it('is able to create a new config file', () => { - const contents = {}; - updateTravisCI(createOptions(contents)); + const contents = {} + updateTravisCI(createOptions(contents)) - assert.include(JSON.stringify(contents), 'dredd'); - }); + assert.include(JSON.stringify(contents), 'dredd') + }) it('adds a command to install Dredd', () => { - const contents = { before_install: ['pipenv install'] }; - updateTravisCI(createOptions(contents)); + const contents = { before_install: ['pipenv install'] } + updateTravisCI(createOptions(contents)) - assert.equal(contents.before_install[0], 'pipenv install'); - assert.match(contents.before_install[1], /npm.+i.+dredd/); - assert.equal(contents.before_install.length, 2); - }); + assert.equal(contents.before_install[0], 'pipenv install') + assert.match(contents.before_install[1], /npm.+i.+dredd/) + assert.equal(contents.before_install.length, 2) + }) it('adds a command to run Dredd', () => { - const contents = { before_script: ['pytest'] }; - updateTravisCI(createOptions(contents)); + const contents = { before_script: ['pytest'] } + updateTravisCI(createOptions(contents)) - assert.deepEqual(contents.before_script, ['pytest', 'dredd']); - }); -}); + assert.deepEqual(contents.before_script, ['pytest', 'dredd']) + }) +}) diff --git a/test/unit/init/updateWercker-test.js b/test/unit/init/updateWercker-test.js index 4b6830a0e..bf3aef558 100644 --- a/test/unit/init/updateWercker-test.js +++ b/test/unit/init/updateWercker-test.js @@ -1,34 +1,30 @@ -const { assert } = require('chai'); - -const { _updateWercker: updateWercker } = require('../../../lib/init'); +import { assert } from 'chai' +import { updateWercker } from '../../../lib/init' function createOptions(contents) { - return { editYaml: (file, update) => update(contents) }; + return { editYaml: (file, update) => update(contents) } } - describe('init._updateWercker()', () => { it('is able to create a new config file', () => { - const contents = {}; - updateWercker(createOptions(contents)); + const contents = {} + updateWercker(createOptions(contents)) - assert.include(JSON.stringify(contents), 'dredd'); - }); + assert.include(JSON.stringify(contents), 'dredd') + }) it('adds commands to install and run Dredd', () => { const contents = { build: { - steps: [ - { script: { name: 'pipenv-install', code: 'pipenv install' } }, - ], - }, - }; - updateWercker(createOptions(contents)); - - assert.equal(contents.build.steps.length, 3); - assert.match(contents.build.steps[0].script.code, /npm.+i.+dredd/); - assert.equal(contents.build.steps[1].script.code, 'pipenv install'); - assert.equal(contents.build.steps[2].script.code, 'dredd'); - }); -}); + steps: [{ script: { name: 'pipenv-install', code: 'pipenv install' } }] + } + } + updateWercker(createOptions(contents)) + + assert.equal(contents.build.steps.length, 3) + assert.match(contents.build.steps[0].script.code, /npm.+i.+dredd/) + assert.equal(contents.build.steps[1].script.code, 'pipenv install') + assert.equal(contents.build.steps[2].script.code, 'dredd') + }) +}) diff --git a/test/unit/isURL-test.js b/test/unit/isURL-test.js index 08fccf9a3..a1d38828a 100644 --- a/test/unit/isURL-test.js +++ b/test/unit/isURL-test.js @@ -1,26 +1,25 @@ -const { assert } = require('chai'); - -const isURL = require('../../lib/isURL'); +import { assert } from 'chai' +import isURL from '../../lib/isURL' describe('isURL()', () => { it('recognizes HTTP URL', () => { - assert.isTrue(isURL('http://example.com')); - }); + assert.isTrue(isURL('http://example.com')) + }) it('recognizes HTTPS URL', () => { - assert.isTrue(isURL('https://example.com')); - }); + assert.isTrue(isURL('https://example.com')) + }) it('returns false for UNIX paths', () => { - assert.isFalse(isURL('/home/honza')); - }); + assert.isFalse(isURL('/home/honza')) + }) it('returns false for Windows paths', () => { - assert.isFalse(isURL('C:\\Users\\Honza')); - }); + assert.isFalse(isURL('C:\\Users\\Honza')) + }) it('returns false for file://', () => { - assert.isFalse(isURL('file:///home/honza')); - }); -}); + assert.isFalse(isURL('file:///home/honza')) + }) +}) diff --git a/test/unit/performRequest/createTransactionResponse-test.js b/test/unit/performRequest/createTransactionResponse-test.js index 1caee05a7..67633912e 100644 --- a/test/unit/performRequest/createTransactionResponse-test.js +++ b/test/unit/performRequest/createTransactionResponse-test.js @@ -1,47 +1,48 @@ -const { assert } = require('chai'); - -const { - _createTransactionResponse: createTransactionResponse, -} = require('../../../lib/performRequest'); +import { assert } from 'chai' +import { createTransactionResponse } from '../../../lib/performRequest' describe('performRequest._createTransactionResponse()', () => { - const res = { statusCode: 200, headers: {} }; + const res = { statusCode: 200, headers: {} } - it('sets the status code', () => assert.deepEqual( - createTransactionResponse(res), - { statusCode: 200, headers: {} } - )); + it('sets the status code', () => + assert.deepEqual(createTransactionResponse(res), { + statusCode: 200, + headers: {} + })) it('copies the headers', () => { - const headers = { 'Content-Type': 'application/json' }; + const headers = { 'Content-Type': 'application/json' } const transactionRes = createTransactionResponse({ statusCode: 200, - headers, - }); - headers['X-Header'] = 'abcd'; + headers + }) + headers['X-Header'] = 'abcd' - assert.deepEqual( - transactionRes, - { statusCode: 200, headers: { 'Content-Type': 'application/json' } } - ); - }); - it('does not set empty body', () => assert.deepEqual( - createTransactionResponse(res, Buffer.from([])), - { statusCode: 200, headers: {} } - )); - it('sets textual body as a string with UTF-8 encoding', () => assert.deepEqual( - createTransactionResponse(res, Buffer.from('řeřicha')), - { - statusCode: 200, headers: {}, body: 'řeřicha', bodyEncoding: 'utf-8', - } - )); - it('sets binary body as a string with Base64 encoding', () => assert.deepEqual( - createTransactionResponse(res, Buffer.from([0xFF, 0xBE])), - { + assert.deepEqual(transactionRes, { + statusCode: 200, + headers: { 'Content-Type': 'application/json' } + }) + }) + it('does not set empty body', () => + assert.deepEqual(createTransactionResponse(res, Buffer.from([])), { + statusCode: 200, + headers: {} + })) + it('sets textual body as a string with UTF-8 encoding', () => + assert.deepEqual(createTransactionResponse(res, Buffer.from('řeřicha')), { statusCode: 200, headers: {}, - body: Buffer.from([0xFF, 0xBE]).toString('base64'), - bodyEncoding: 'base64', - } - )); -}); + body: 'řeřicha', + bodyEncoding: 'utf-8' + })) + it('sets binary body as a string with Base64 encoding', () => + assert.deepEqual( + createTransactionResponse(res, Buffer.from([0xff, 0xbe])), + { + statusCode: 200, + headers: {}, + body: Buffer.from([0xff, 0xbe]).toString('base64'), + bodyEncoding: 'base64' + } + )) +}) diff --git a/test/unit/performRequest/detectBodyEncoding-test.js b/test/unit/performRequest/detectBodyEncoding-test.js index beb624d07..b66a031ae 100644 --- a/test/unit/performRequest/detectBodyEncoding-test.js +++ b/test/unit/performRequest/detectBodyEncoding-test.js @@ -1,21 +1,15 @@ -const { assert } = require('chai'); - -const { - _detectBodyEncoding: detectBodyEncoding, -} = require('../../../lib/performRequest'); +import { assert } from 'chai' +import { detectBodyEncoding } from '../../../lib/performRequest' describe('performRequest._detectBodyEncoding()', () => { - it('detects binary content as Base64', () => assert.equal( - detectBodyEncoding(Buffer.from([0xFF, 0xEF, 0xBF, 0xBE])), - 'base64' - )); - it('detects textual content as UTF-8', () => assert.equal( - detectBodyEncoding(Buffer.from('řeřicha')), - 'utf-8' - )); - it('detects no content as UTF-8', () => assert.equal( - detectBodyEncoding(Buffer.from([])), - 'utf-8' - )); -}); + it('detects binary content as Base64', () => + assert.equal( + detectBodyEncoding(Buffer.from([0xff, 0xef, 0xbf, 0xbe])), + 'base64' + )) + it('detects textual content as UTF-8', () => + assert.equal(detectBodyEncoding(Buffer.from('řeřicha')), 'utf-8')) + it('detects no content as UTF-8', () => + assert.equal(detectBodyEncoding(Buffer.from([])), 'utf-8')) +}) diff --git a/test/unit/performRequest/getBodyAsBuffer-test.js b/test/unit/performRequest/getBodyAsBuffer-test.js index 8e644294d..9737f76d9 100644 --- a/test/unit/performRequest/getBodyAsBuffer-test.js +++ b/test/unit/performRequest/getBodyAsBuffer-test.js @@ -1,43 +1,44 @@ -const { assert } = require('chai'); - -const { - _getBodyAsBuffer: getBodyAsBuffer, -} = require('../../../lib/performRequest'); +import { assert } from 'chai' +import { getBodyAsBuffer } from '../../../lib/performRequest' describe('performRequest._getBodyAsBuffer()', () => { describe('when the body is a Buffer', () => { it('returns the body unmodified', () => { - const body = Buffer.from([0xFF, 0xEF, 0xBF, 0xBE]); - assert.equal(getBodyAsBuffer(body), body); - }); + const body = Buffer.from([0xff, 0xef, 0xbf, 0xbe]) + assert.equal(getBodyAsBuffer(body), body) + }) it('ignores encoding', () => { - const body = Buffer.from([0xFF, 0xEF, 0xBF, 0xBE]); - assert.equal(getBodyAsBuffer(body, 'utf-8'), body); - }); - }); - - [undefined, null, ''].forEach((body) => { + const body = Buffer.from([0xff, 0xef, 0xbf, 0xbe]) + assert.equal(getBodyAsBuffer(body, 'utf-8'), body) + }) + }) + ;[undefined, null, ''].forEach((body) => { describe(`when the body is ${JSON.stringify(body)}`, () => { - it('returns empty Buffer without encoding', () => assert.deepEqual(getBodyAsBuffer(body), Buffer.from([]))); - it('returns empty Buffer with encoding set to UTF-8', () => assert.deepEqual(getBodyAsBuffer(body, 'utf-8'), Buffer.from([]))); - it('returns empty Buffer with encoding set to Base64', () => assert.deepEqual(getBodyAsBuffer(body, 'base64'), Buffer.from([]))); - }); - }); + it('returns empty Buffer without encoding', () => + assert.deepEqual(getBodyAsBuffer(body), Buffer.from([]))) + it('returns empty Buffer with encoding set to UTF-8', () => + assert.deepEqual(getBodyAsBuffer(body, 'utf-8'), Buffer.from([]))) + it('returns empty Buffer with encoding set to Base64', () => + assert.deepEqual(getBodyAsBuffer(body, 'base64'), Buffer.from([]))) + }) + }) describe('when the body is neither Buffer or string', () => { it('gracefully stringifies the input', () => { - const body = new Error('Ouch!'); - assert.deepEqual(getBodyAsBuffer(body), Buffer.from('Error: Ouch!')); - }); - }); + const body = new Error('Ouch!') + assert.deepEqual(getBodyAsBuffer(body), Buffer.from('Error: Ouch!')) + }) + }) describe('when the body is a string', () => { - it('assumes UTF-8 without encoding', () => assert.deepEqual(getBodyAsBuffer('abc'), Buffer.from('abc'))); - it('respects encoding set to UTF-8', () => assert.deepEqual(getBodyAsBuffer('abc', 'utf-8'), Buffer.from('abc'))); + it('assumes UTF-8 without encoding', () => + assert.deepEqual(getBodyAsBuffer('abc'), Buffer.from('abc'))) + it('respects encoding set to UTF-8', () => + assert.deepEqual(getBodyAsBuffer('abc', 'utf-8'), Buffer.from('abc'))) it('respects encoding set to Base64', () => { - const body = Buffer.from('abc').toString('base64'); - assert.deepEqual(getBodyAsBuffer(body, 'base64'), Buffer.from('abc')); - }); - }); -}); + const body = Buffer.from('abc').toString('base64') + assert.deepEqual(getBodyAsBuffer(body, 'base64'), Buffer.from('abc')) + }) + }) +}) diff --git a/test/unit/performRequest/normalizeBodyEncoding-test.js b/test/unit/performRequest/normalizeBodyEncoding-test.js index 78a69f759..dbcee1737 100644 --- a/test/unit/performRequest/normalizeBodyEncoding-test.js +++ b/test/unit/performRequest/normalizeBodyEncoding-test.js @@ -1,15 +1,22 @@ -const { assert } = require('chai'); - -const { - _normalizeBodyEncoding: normalizeBodyEncoding, -} = require('../../../lib/performRequest'); +import { assert } from 'chai' +import { normalizeBodyEncoding } from '../../../lib/performRequest' describe('performRequest._normalizeBodyEncoding()', () => { - ['utf-8', 'utf8', 'UTF-8', 'UTF8'].forEach(value => it(`normalizes ${JSON.stringify(value)} to utf-8`, () => assert.equal(normalizeBodyEncoding(value), 'utf-8'))); - ['base64', 'Base64'].forEach(value => it(`normalizes ${JSON.stringify(value)} to base64`, () => assert.equal(normalizeBodyEncoding(value), 'base64'))); - [undefined, null, '', false].forEach(value => it(`defaults ${JSON.stringify(value)} to utf-8`, () => assert.equal(normalizeBodyEncoding(value), 'utf-8'))); - it('throws an error on "latin2"', () => assert.throws(() => { - normalizeBodyEncoding('latin2'); - }, /^unsupported encoding/i)); -}); + ;['utf-8', 'utf8', 'UTF-8', 'UTF8'].forEach((value) => + it(`normalizes ${JSON.stringify(value)} to utf-8`, () => + assert.equal(normalizeBodyEncoding(value), 'utf-8')) + ) + ;['base64', 'Base64'].forEach((value) => + it(`normalizes ${JSON.stringify(value)} to base64`, () => + assert.equal(normalizeBodyEncoding(value), 'base64')) + ) + ;[undefined, null, '', false].forEach((value) => + it(`defaults ${JSON.stringify(value)} to utf-8`, () => + assert.equal(normalizeBodyEncoding(value), 'utf-8')) + ) + it('throws an error on "latin2"', () => + assert.throws(() => { + normalizeBodyEncoding('latin2') + }, /^unsupported encoding/i)) +}) diff --git a/test/unit/performRequest/normalizeContentLengthHeader-test.js b/test/unit/performRequest/normalizeContentLengthHeader-test.js index 98395cfdf..517ecbc62 100644 --- a/test/unit/performRequest/normalizeContentLengthHeader-test.js +++ b/test/unit/performRequest/normalizeContentLengthHeader-test.js @@ -1,99 +1,131 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); - -const { - _normalizeContentLengthHeader: normalizeContentLengthHeader, -} = require('../../../lib/performRequest'); +import sinon from 'sinon' +import { assert } from 'chai' +import { normalizeContentLengthHeader } from '../../../lib/performRequest' describe('performRequest._normalizeContentLengthHeader()', () => { - let headers; + let headers - const logger = { warn: sinon.spy() }; - beforeEach(() => logger.warn.resetHistory()); + const logger = { warn: sinon.spy() } + beforeEach(() => logger.warn.resetHistory()) describe('when there is no body and no Content-Length', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({}, Buffer.from(''), { logger }); - }); + headers = normalizeContentLengthHeader({}, Buffer.from(''), { logger }) + }) - it('does not warn', () => assert.isFalse(logger.warn.called)); - it('has the Content-Length header set to 0', () => assert.deepPropertyVal(headers, 'Content-Length', '0')); - }); + it('does not warn', () => assert.isFalse(logger.warn.called)) + it('has the Content-Length header set to 0', () => + assert.deepPropertyVal(headers, 'Content-Length', '0')) + }) describe('when there is no body and the Content-Length is set to 0', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({ - 'Content-Length': '0', - }, Buffer.from(''), { logger }); - }); - - it('does not warn', () => assert.isFalse(logger.warn.called)); - it('has the Content-Length header set to 0', () => assert.deepPropertyVal(headers, 'Content-Length', '0')); - }); + headers = normalizeContentLengthHeader( + { + 'Content-Length': '0' + }, + Buffer.from(''), + { logger } + ) + }) + + it('does not warn', () => assert.isFalse(logger.warn.called)) + it('has the Content-Length header set to 0', () => + assert.deepPropertyVal(headers, 'Content-Length', '0')) + }) describe('when there is body and the Content-Length is not set', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({}, Buffer.from('abcd'), { logger }); - }); + headers = normalizeContentLengthHeader({}, Buffer.from('abcd'), { + logger + }) + }) - it('does not warn', () => assert.isFalse(logger.warn.called)); - it('has the Content-Length header set to 4', () => assert.deepPropertyVal(headers, 'Content-Length', '4')); - }); + it('does not warn', () => assert.isFalse(logger.warn.called)) + it('has the Content-Length header set to 4', () => + assert.deepPropertyVal(headers, 'Content-Length', '4')) + }) describe('when there is body and the Content-Length is correct', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({ - 'Content-Length': '4', - }, Buffer.from('abcd'), { logger }); - }); - - it('does not warn', () => assert.isFalse(logger.warn.called)); - it('has the Content-Length header set to 4', () => assert.deepPropertyVal(headers, 'Content-Length', '4')); - }); + headers = normalizeContentLengthHeader( + { + 'Content-Length': '4' + }, + Buffer.from('abcd'), + { logger } + ) + }) + + it('does not warn', () => assert.isFalse(logger.warn.called)) + it('has the Content-Length header set to 4', () => + assert.deepPropertyVal(headers, 'Content-Length', '4')) + }) describe('when there is no body and the Content-Length is wrong', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({ - 'Content-Length': '42', - }, Buffer.from(''), { logger }); - }); - - it('warns about the discrepancy', () => assert.match(logger.warn.lastCall.args[0], /but the real body length is/)); - it('has the Content-Length header set to 0', () => assert.deepPropertyVal(headers, 'Content-Length', '0')); - }); + headers = normalizeContentLengthHeader( + { + 'Content-Length': '42' + }, + Buffer.from(''), + { logger } + ) + }) + + it('warns about the discrepancy', () => + assert.match(logger.warn.lastCall.args[0], /but the real body length is/)) + it('has the Content-Length header set to 0', () => + assert.deepPropertyVal(headers, 'Content-Length', '0')) + }) describe('when there is body and the Content-Length is wrong', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({ - 'Content-Length': '42', - }, Buffer.from('abcd'), { logger }); - }); - - it('warns about the discrepancy', () => assert.match(logger.warn.lastCall.args[0], /but the real body length is/)); - it('has the Content-Length header set to 4', () => assert.deepPropertyVal(headers, 'Content-Length', '4')); - }); + headers = normalizeContentLengthHeader( + { + 'Content-Length': '42' + }, + Buffer.from('abcd'), + { logger } + ) + }) + + it('warns about the discrepancy', () => + assert.match(logger.warn.lastCall.args[0], /but the real body length is/)) + it('has the Content-Length header set to 4', () => + assert.deepPropertyVal(headers, 'Content-Length', '4')) + }) describe('when the existing header name has unusual casing', () => { beforeEach(() => { - headers = normalizeContentLengthHeader({ - 'CoNtEnT-lEnGtH': '4', - }, Buffer.from('abcd'), { logger }); - }); - - it('has the CoNtEnT-lEnGtH header set to 4', () => assert.deepEqual(headers, { 'CoNtEnT-lEnGtH': '4' })); - }); + headers = normalizeContentLengthHeader( + { + 'CoNtEnT-lEnGtH': '4' + }, + Buffer.from('abcd'), + { logger } + ) + }) + + it('has the CoNtEnT-lEnGtH header set to 4', () => + assert.deepEqual(headers, { 'CoNtEnT-lEnGtH': '4' })) + }) describe('when there are modifications to the headers', () => { - const originalHeaders = {}; + const originalHeaders = {} beforeEach(() => { - headers = normalizeContentLengthHeader(originalHeaders, Buffer.from('abcd'), { logger }); - }); + headers = normalizeContentLengthHeader( + originalHeaders, + Buffer.from('abcd'), + { logger } + ) + }) it('does not modify the original headers object', () => { - assert.deepEqual(originalHeaders, {}); - assert.deepEqual(headers, { 'Content-Length': '4' }); - }); - }); -}); + assert.deepEqual(originalHeaders, {}) + assert.deepEqual(headers, { 'Content-Length': '4' }) + }) + }) +}) diff --git a/test/unit/performRequest/performRequest-test.js b/test/unit/performRequest/performRequest-test.js index 192959de8..6e2a5f0b5 100644 --- a/test/unit/performRequest/performRequest-test.js +++ b/test/unit/performRequest/performRequest-test.js @@ -1,137 +1,145 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); - -const performRequest = require('../../../lib/performRequest'); +import sinon from 'sinon' +import { assert } from 'chai' +import performRequest from '../../../lib/performRequest' describe('performRequest()', () => { - const uri = 'http://example.com/42'; - const uriS = 'https://example.com/42'; + const uri = 'http://example.com/42' + const uriS = 'https://example.com/42' const transactionReq = { method: 'POST', headers: { 'Content-Type': 'text/plain' }, - body: 'Hello', - }; - const res = { statusCode: 200, headers: { 'Content-Type': 'text/plain' } }; - const request = sinon.stub().callsArgWithAsync(1, null, res, Buffer.from('Bye')); - const logger = { debug: sinon.spy() }; + body: 'Hello' + } + const res = { statusCode: 200, headers: { 'Content-Type': 'text/plain' } } + const request = sinon + .stub() + .callsArgWithAsync(1, null, res, Buffer.from('Bye')) + const logger = { debug: sinon.spy() } - beforeEach(() => { logger.debug.resetHistory(); }); + beforeEach(() => { + logger.debug.resetHistory() + }) it('does not modify the original HTTP options object', (done) => { - const httpOptions = { json: true }; + const httpOptions = { json: true } performRequest(uri, transactionReq, { http: httpOptions, request }, () => { - assert.deepEqual(httpOptions, { json: true }); - done(); - }); - }); + assert.deepEqual(httpOptions, { json: true }) + done() + }) + }) it('does not allow to override the hardcoded HTTP options', (done) => { - performRequest(uri, transactionReq, { http: { proxy: true }, request }, () => { - assert.isFalse(request.firstCall.args[0].proxy); - done(); - }); - }); + performRequest( + uri, + transactionReq, + { http: { proxy: true }, request }, + () => { + assert.isFalse(request.firstCall.args[0].proxy) + done() + } + ) + }) it('forbids the HTTP client library to respect proxy settings', (done) => { performRequest(uri, transactionReq, { request }, () => { - assert.isFalse(request.firstCall.args[0].proxy); - done(); - }); - }); + assert.isFalse(request.firstCall.args[0].proxy) + done() + }) + }) it('forbids the HTTP client library to follow redirects', (done) => { performRequest(uri, transactionReq, { request }, () => { - assert.isFalse(request.firstCall.args[0].followRedirect); - done(); - }); - }); + assert.isFalse(request.firstCall.args[0].followRedirect) + done() + }) + }) it('propagates the HTTP method to the HTTP client library', (done) => { performRequest(uri, transactionReq, { request }, () => { - assert.equal(request.firstCall.args[0].method, transactionReq.method); - done(); - }); - }); + assert.equal(request.firstCall.args[0].method, transactionReq.method) + done() + }) + }) it('propagates the URI to the HTTP client library', (done) => { performRequest(uri, transactionReq, { request }, () => { - assert.equal(request.firstCall.args[0].uri, uri); - done(); - }); - }); + assert.equal(request.firstCall.args[0].uri, uri) + done() + }) + }) it('propagates the HTTP request body as a Buffer', (done) => { performRequest(uri, transactionReq, { request }, () => { - assert.deepEqual(request.firstCall.args[0].body, Buffer.from('Hello')); - done(); - }); - }); + assert.deepEqual(request.firstCall.args[0].body, Buffer.from('Hello')) + done() + }) + }) it('handles exceptions when preparing the HTTP request body', (done) => { const invalidTransactionReq = Object.assign( { bodyEncoding: 'latin2' }, transactionReq - ); + ) performRequest(uri, invalidTransactionReq, { request }, (err) => { - assert.instanceOf(err, Error); - done(); - }); - }); + assert.instanceOf(err, Error) + done() + }) + }) it('logs before performing the HTTP request', (done) => { performRequest(uri, transactionReq, { request, logger }, () => { assert.equal( logger.debug.firstCall.args[0], `Performing HTTP request to the server under test: POST ${uri}` - ); - done(); - }); - }); + ) + done() + }) + }) it('logs before performing the HTTPS request', (done) => { performRequest(uriS, transactionReq, { request, logger }, () => { assert.equal( logger.debug.firstCall.args[0], `Performing HTTPS request to the server under test: POST ${uriS}` - ); - done(); - }); - }); + ) + done() + }) + }) it('logs on receiving the HTTP response', (done) => { performRequest(uri, transactionReq, { request, logger }, () => { assert.equal( logger.debug.lastCall.args[0], 'Handling HTTP response from the server under test' - ); - done(); - }); - }); + ) + done() + }) + }) it('logs on receiving the HTTPS response', (done) => { performRequest(uriS, transactionReq, { request, logger }, () => { assert.equal( logger.debug.lastCall.args[0], 'Handling HTTPS response from the server under test' - ); - done(); - }); - }); + ) + done() + }) + }) it('handles exceptions when requesting the server under test', (done) => { - const error = new Error('Ouch!'); - const invalidRequest = sinon.stub().throws(error); + const error = new Error('Ouch!') + const invalidRequest = sinon.stub().throws(error) performRequest(uri, transactionReq, { request: invalidRequest }, (err) => { - assert.deepEqual(err, error); - done(); - }); - }); + assert.deepEqual(err, error) + done() + }) + }) it('handles errors when requesting the server under test', (done) => { - const error = new Error('Ouch!'); - const invalidRequest = sinon.stub().callsArgWithAsync(1, error); + const error = new Error('Ouch!') + const invalidRequest = sinon.stub().callsArgWithAsync(1, error) performRequest(uri, transactionReq, { request: invalidRequest }, (err) => { - assert.deepEqual(err, error); - done(); - }); - }); + assert.deepEqual(err, error) + done() + }) + }) it('provides the real HTTP response object', (done) => { performRequest(uri, transactionReq, { request }, (err, real) => { assert.deepEqual(real, { statusCode: 200, headers: { 'Content-Type': 'text/plain' }, body: 'Bye', - bodyEncoding: 'utf-8', - }); - done(); - }); - }); -}); + bodyEncoding: 'utf-8' + }) + done() + }) + }) +}) diff --git a/test/unit/prettifyResponse-test.js b/test/unit/prettifyResponse-test.js index eddcc786f..aac5e8745 100644 --- a/test/unit/prettifyResponse-test.js +++ b/test/unit/prettifyResponse-test.js @@ -1,71 +1,73 @@ -const sinon = require('sinon'); -const { assert } = require('chai'); +import sinon from 'sinon' +import { assert } from 'chai' -const loggerStub = require('../../lib/logger'); -const prettifyResponse = require('../../lib/prettifyResponse'); +import loggerStub from '../../lib/logger' +import prettifyResponse from '../../lib/prettifyResponse' describe('prettifyResponse(response)', () => { describe('with a real object without any circular references', () => { it('should print JSON.stringified application/json header based response', () => { const output = prettifyResponse({ headers: { - 'content-type': 'application/json', + 'content-type': 'application/json' }, - body: - { a: 'b' }, - }); + body: { a: 'b' } + }) const expectedOutput = `\ headers: \n content-type: application/json\n body: \n{ "a": "b" }\n\ -`; - assert.equal(output, expectedOutput); - }); - +` + assert.equal(output, expectedOutput) + }) it('should print indented XML when content-type is text/html', () => { const output = prettifyResponse({ headers: { - 'content-type': 'text/html', + 'content-type': 'text/html' }, - body: '
before paragraph

in para italics
bold afterwords

', - }); + body: + '
before paragraph

in para italics
bold afterwords

' + }) const expectedOutput = `\ headers: \n content-type: text/html\n body: \n
before paragraph

in para italics
bold afterwords

-
\n`; - assert.equal(output, expectedOutput); - }); - }); +\n` + assert.equal(output, expectedOutput) + }) + }) describe('with an object in body that references itself (circular)', () => { before(() => { - sinon.stub(loggerStub, 'debug'); + sinon.stub(loggerStub, 'debug') - const body = { a: 'b' }; - body.c = body; + const body = { a: 'b' } + body.c = body prettifyResponse({ headers: { - 'content-type': 'application/json', + 'content-type': 'application/json' }, - body, - }); - }); + body + }) + }) - after(() => sinon.stub(loggerStub.debug.restore())); + after(() => sinon.stub(loggerStub.debug.restore())) - it('should\'ve printed into debug', () => { - assert.isOk(loggerStub.debug.called); - assert.isObject(loggerStub.debug.firstCall); - assert.isArray(loggerStub.debug.firstCall.args); - assert.lengthOf(loggerStub.debug.firstCall.args, 1); - assert.equal(loggerStub.debug.firstCall.args[0], 'Could not stringify: [object Object]'); - }); - }); -}); + it("should've printed into debug", () => { + assert.isOk(loggerStub.debug.called) + assert.isObject(loggerStub.debug.firstCall) + assert.isArray(loggerStub.debug.firstCall.args) + assert.lengthOf(loggerStub.debug.firstCall.args, 1) + assert.equal( + loggerStub.debug.firstCall.args[0], + 'Could not stringify: [object Object]' + ) + }) + }) +}) diff --git a/test/unit/reporters/ApiaryReporter-test.js b/test/unit/reporters/ApiaryReporter-test.js index 0dc904139..447ad588a 100644 --- a/test/unit/reporters/ApiaryReporter-test.js +++ b/test/unit/reporters/ApiaryReporter-test.js @@ -1,55 +1,56 @@ -const clone = require('clone'); -const nock = require('nock'); -const proxyquire = require('proxyquire'); -const sinon = require('sinon'); -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import clone from 'clone' +import nock from 'nock' +import proxyquire from 'proxyquire' +import sinon from 'sinon' +import { assert } from 'chai' +import { EventEmitter } from 'events' -const apiDescriptions = require('../../fixtures/apiDescriptions'); -const loggerStub = require('../../../lib/logger'); -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import apiDescriptions from '../../fixtures/apiDescriptions' +import loggerStub from '../../../lib/logger' +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' const ApiaryReporter = proxyquire('../../../lib/reporters/ApiaryReporter', { '../logger': loggerStub, - './reporterOutputLogger': reporterOutputLoggerStub, -}); + './reporterOutputLogger': reporterOutputLoggerStub +}).default -const PORT = 9876; -nock.enableNetConnect(); +const PORT = 9876 +nock.enableNetConnect() describe('ApiaryReporter', () => { - let env = {}; + let env = {} beforeEach(() => { - sinon.stub(loggerStub, 'debug'); - sinon.stub(reporterOutputLoggerStub, 'complete'); - }); + sinon.stub(loggerStub, 'debug') + sinon.stub(reporterOutputLoggerStub, 'complete') + }) afterEach(() => { - sinon.stub(loggerStub.debug.restore()); - sinon.stub(reporterOutputLoggerStub.complete.restore()); - }); + sinon.stub(loggerStub.debug.restore()) + sinon.stub(reporterOutputLoggerStub.complete.restore()) + }) - before(() => nock.disableNetConnect()); + before(() => nock.disableNetConnect()) - after(() => nock.enableNetConnect()); + after(() => nock.enableNetConnect()) describe('without API key or without suite', () => { - let test = {}; - let emitter = {}; + let test = {} + let emitter = {} beforeEach((done) => { - emitter = new EventEmitter(); - env = { CIRCLE_VARIABLE: 'CIRCLE_VALUE' }; - env.APIARY_API_URL = `https://127.0.0.1:${PORT}`; - delete env.APIARY_API_KEY; - delete env.APIARY_API_NAME; + emitter = new EventEmitter() + env = { CIRCLE_VARIABLE: 'CIRCLE_VALUE' } + env.APIARY_API_URL = `https://127.0.0.1:${PORT}` + delete env.APIARY_API_KEY + delete env.APIARY_API_NAME test = { status: 'fail', title: 'POST /machines', - message: "headers: Value of the ‘content-type’ must be application/json.\nbody: No validator found for real data media type 'text/plain' and expected data media type 'application/json'.\nstatusCode: Real and expected data does not match.\n", + message: + "headers: Value of the ‘content-type’ must be application/json.\nbody: No validator found for real data media type 'text/plain' and expected data media type 'application/json'.\nstatusCode: Real and expected data does not match.\n", - startedAt: (1234567890 * 1000), // JavaScript Date.now() timestamp (UNIX-like timestamp * 1000 precision) + startedAt: 1234567890 * 1000, // JavaScript Date.now() timestamp (UNIX-like timestamp * 1000 precision) origin: { filename: './test/fixtures/multifile/greeting.apib', @@ -57,25 +58,26 @@ describe('ApiaryReporter', () => { resourceGroupName: '', resourceName: '/greeting', actionName: 'GET', - exampleName: '', + exampleName: '' }, actual: { statusCode: 400, headers: { - 'content-type': 'text/plain', + 'content-type': 'text/plain' }, - body: 'Foo bar', + body: 'Foo bar' }, expected: { headers: { - 'content-type': 'application/json', + 'content-type': 'application/json' }, - body: '{\n "type": "bulldozer",\n "name": "willy",\n "id": "5229c6e8e4b0bd7dbb07e29c"\n}\n', - status: '202', + body: + '{\n "type": "bulldozer",\n "name": "willy",\n "id": "5229c6e8e4b0bd7dbb07e29c"\n}\n', + status: '202' }, request: { @@ -83,20 +85,21 @@ describe('ApiaryReporter', () => { headers: { 'Content-Type': 'application/json', 'User-Agent': 'Dredd/0.2.1 (Darwin 13.0.0; x64)', - 'Content-Length': 44, + 'Content-Length': 44 }, uri: '/machines', - method: 'POST', + method: 'POST' }, results: { headers: { - results: [{ - pointer: '/content-type', - severity: 'error', - message: 'Value of the ‘content-type’ must be application/json.', - }, + results: [ + { + pointer: '/content-type', + severity: 'error', + message: 'Value of the ‘content-type’ must be application/json.' + } ], realType: 'application/vnd.apiary.http-headers+json', expectedType: 'application/vnd.apiary.http-headers+json', @@ -107,26 +110,29 @@ describe('ApiaryReporter', () => { propertyValue: 'text/plain', attributeName: 'enum', attributeValue: ['application/json'], - message: 'Value of the ‘content-type’ must be application/json.', + message: + 'Value of the ‘content-type’ must be application/json.', validator: 'enum', validatorName: 'enum', - validatorValue: ['application/json'], + validatorValue: ['application/json'] }, - length: 1, - }, + length: 1 + } }, body: { - results: [{ - message: "No validator found for real data media type 'text/plain' and expected data media type 'application/json'.", - severity: 'error', - }, + results: [ + { + message: + "No validator found for real data media type 'text/plain' and expected data media type 'application/json'.", + severity: 'error' + } ], realType: 'text/plain', expectedType: 'application/json', validator: null, - rawData: null, + rawData: null }, statusCode: { @@ -134,637 +140,822 @@ describe('ApiaryReporter', () => { expectedType: 'text/vnd.apiary.status-code', validator: 'TextDiff', rawData: '@@ -1,3 +1,9 @@\n-400\n+undefined\n', - results: [{ - severity: 'error', - message: 'Real and expected data does not match.', - }, - ], - }, - }, - }; + results: [ + { + severity: 'error', + message: 'Real and expected data does not match.' + } + ] + } + } + } - nock.disableNetConnect(); + nock.disableNetConnect() - done(); - }); + done() + }) afterEach((done) => { - nock.enableNetConnect(); - nock.cleanAll(); - done(); - }); + nock.enableNetConnect() + nock.cleanAll() + done() + }) describe('constructor', () => { describe('when custom settings contain API URL without trailing slash', () => { const custom = { apiaryReporterEnv: env, - apiaryApiUrl: 'https://api.example.com:1234', - }; + apiaryApiUrl: 'https://api.example.com:1234' + } it('uses the provided API URL in configuration', () => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }) assert.equal( apiaryReporter.configuration.apiUrl, 'https://api.example.com:1234' - ); - }); - }); + ) + }) + }) describe('when custom settings contain API URL with trailing slash', () => { const custom = { apiaryReporterEnv: env, - apiaryApiUrl: 'https://api.example.com:1234/', - }; + apiaryApiUrl: 'https://api.example.com:1234/' + } it('uses the provided API URL in configuration, without trailing slash', () => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }) assert.equal( apiaryReporter.configuration.apiUrl, 'https://api.example.com:1234' - ); - }); - }); - }); + ) + }) + }) + }) describe('_performRequestAsync', () => { describe('when custom settings contain API URL without trailing slash', () => { const custom = { apiaryReporterEnv: env, - apiaryApiUrl: 'https://api.example.com:1234', - }; + apiaryApiUrl: 'https://api.example.com:1234' + } it('should use API URL without double slashes', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }) apiaryReporter._performRequestAsync('/', 'POST', '', () => { - assert.isOk(loggerStub.debug.calledWithMatch('POST https://api.example.com:1234/ (without body)')); - done(); - }); - }); - }); + assert.isOk( + loggerStub.debug.calledWithMatch( + 'POST https://api.example.com:1234/ (without body)' + ) + ) + done() + }) + }) + }) describe('when custom settings contain API URL with trailing slash', () => { const custom = { apiaryReporterEnv: env, - apiaryApiUrl: 'https://api.example.com:1234/', - }; + apiaryApiUrl: 'https://api.example.com:1234/' + } - describe('when provided with root path', () => it('should use API URL without double slashes', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }); - apiaryReporter._performRequestAsync('/', 'POST', '', () => { - assert.isOk(loggerStub.debug.calledWithMatch('POST https://api.example.com:1234/ (without body)')); - done(); - }); - })); - - describe('when provided with non-root path', () => it('should use API URL without double slashes', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }); - apiaryReporter._performRequestAsync('/hello?q=1', 'POST', '', () => { - assert.isOk(loggerStub.debug.calledWithMatch('POST https://api.example.com:1234/hello?q=1 (without body)')); - done(); - }); - })); - }); + describe('when provided with root path', () => + it('should use API URL without double slashes', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }) + apiaryReporter._performRequestAsync('/', 'POST', '', () => { + assert.isOk( + loggerStub.debug.calledWithMatch( + 'POST https://api.example.com:1234/ (without body)' + ) + ) + done() + }) + })) + + describe('when provided with non-root path', () => + it('should use API URL without double slashes', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter(emitter, {}, { custom }) + apiaryReporter._performRequestAsync( + '/hello?q=1', + 'POST', + '', + () => { + assert.isOk( + loggerStub.debug.calledWithMatch( + 'POST https://api.example.com:1234/hello?q=1 (without body)' + ) + ) + done() + } + ) + })) + }) describe('when server is not available', () => { beforeEach(() => { - nock.enableNetConnect(); - nock.cleanAll(); - }); + nock.enableNetConnect() + nock.cleanAll() + }) it('should log human readable message', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) apiaryReporter._performRequestAsync('/', 'POST', '', (error) => { - assert.isNotNull(error); - done(); - }); - }); + assert.isNotNull(error) + done() + }) + }) it('should set server error to true', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) apiaryReporter._performRequestAsync('/', 'POST', '', () => { - assert.isTrue(apiaryReporter.serverError); - done(); - }); - }); - }); - }); + assert.isTrue(apiaryReporter.serverError) + done() + }) + }) + }) + }) describe('when starting', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - let requestBody = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + let requestBody = null beforeEach(() => { - requestBody = null; - const uri = '/apis/public/tests/runs'; - const reportUrl = 'https://absolutely.fancy.url/wich-can-change/some/id'; + requestBody = null + const uri = '/apis/public/tests/runs' + const reportUrl = 'https://absolutely.fancy.url/wich-can-change/some/id' // This is a hack how to get access to the performed request from nock // nock isn't able to provide it function getBody(body) { - requestBody = body; - return body; + requestBody = body + return body } call = nock(env.APIARY_API_URL) .filteringRequestBody(getBody) .post(uri) - .reply(201, JSON.stringify({ _id: runId, reportUrl })); - }); + .reply(201, JSON.stringify({ _id: runId, reportUrl })) + }) it('should set uuid', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) return emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.uuid); - return done(); - }); - }); + assert.isNotNull(apiaryReporter.uuid) + return done() + }) + }) it('should set start time', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.startedAt); - done(); - }); - }); + assert.isNotNull(apiaryReporter.startedAt) + done() + }) + }) it('should call "create new test run" HTTP resource', (done) => { - emitter = new EventEmitter(); - (new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } })); + emitter = new EventEmitter() + new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }) emitter.emit('start', apiDescriptions, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should attach test run ID back to the reporter as remoteId', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.remoteId); - done(); - }); - }); + assert.isNotNull(apiaryReporter.remoteId) + done() + }) + }) it('should attach test run reportUrl to the reporter as reportUrl', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.reportUrl); - done(); - }); - }); + assert.isNotNull(apiaryReporter.reportUrl) + done() + }) + }) it('should have blueprints key in the request and it should be an array and members should have proper structure', (done) => { - emitter = new EventEmitter(); - (new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } })); + emitter = new EventEmitter() + new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }) emitter.emit('start', apiDescriptions, () => { - const parsedBody = JSON.parse(requestBody); - assert.isArray(parsedBody.blueprints); - assert.lengthOf(parsedBody.blueprints, 1); + const parsedBody = JSON.parse(requestBody) + assert.isArray(parsedBody.blueprints) + assert.lengthOf(parsedBody.blueprints, 1) for (const blueprint of parsedBody.blueprints) { - assert.property(blueprint, 'raw'); - assert.propertyVal(blueprint, 'raw', 'FORMAT: 1A\n\n# Machines API\n\n# Group Machines\n\n# Machines collection [/machines/{id}]\n + Parameters\n - id (number, `1`)\n\n## Get Machines [GET]\n\n- Request (application/json)\n + Parameters\n - id (number, `2`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n\n- Request (application/json)\n + Parameters\n - id (number, `3`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n'); - assert.property(blueprint, 'filename'); - assert.propertyVal(blueprint, 'filename', './test/fixtures/multiple-examples.apib'); - assert.property(blueprint, 'annotations'); - assert.isArray(blueprint.annotations); + assert.property(blueprint, 'raw') + assert.propertyVal( + blueprint, + 'raw', + 'FORMAT: 1A\n\n# Machines API\n\n# Group Machines\n\n# Machines collection [/machines/{id}]\n + Parameters\n - id (number, `1`)\n\n## Get Machines [GET]\n\n- Request (application/json)\n + Parameters\n - id (number, `2`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n\n- Request (application/json)\n + Parameters\n - id (number, `3`)\n\n- Response 200 (application/json; charset=utf-8)\n\n [\n {\n "type": "bulldozer",\n "name": "willy"\n }\n ]\n' + ) + assert.property(blueprint, 'filename') + assert.propertyVal( + blueprint, + 'filename', + './test/fixtures/multiple-examples.apib' + ) + assert.property(blueprint, 'annotations') + assert.isArray(blueprint.annotations) } - done(); - }); - }); + done() + }) + }) it('should have various needed keys in test-run payload sent to apiary', (done) => { - emitter = new EventEmitter(); - (new ApiaryReporter(emitter, {}, { server: 'http://my.server.co:8080', custom: { apiaryReporterEnv: env } })); + emitter = new EventEmitter() + new ApiaryReporter( + emitter, + {}, + { + server: 'http://my.server.co:8080', + custom: { apiaryReporterEnv: env } + } + ) emitter.emit('start', apiDescriptions, () => { - const parsedBody = JSON.parse(requestBody); - assert.propertyVal(parsedBody, 'endpoint', 'http://my.server.co:8080'); - done(); - }); - }); + const parsedBody = JSON.parse(requestBody) + assert.propertyVal(parsedBody, 'endpoint', 'http://my.server.co:8080') + done() + }) + }) it('should send the test-run as public one', (done) => { - emitter = new EventEmitter(); - (new ApiaryReporter(emitter, {}, { server: 'http://my.server.co:8080', custom: { apiaryReporterEnv: env } })); - emitter.emit('start', apiDescriptions, () => { - const parsedBody = JSON.parse(requestBody); - assert.strictEqual(parsedBody.public, true); - done(); - }); - }); - - describe('serverError is true', () => it('should not do anything', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.serverError = true; + emitter = new EventEmitter() + new ApiaryReporter( + emitter, + {}, + { + server: 'http://my.server.co:8080', + custom: { apiaryReporterEnv: env } + } + ) emitter.emit('start', apiDescriptions, () => { - assert.isFalse(call.isDone()); - done(); - }); - })); - }); + const parsedBody = JSON.parse(requestBody) + assert.strictEqual(parsedBody.public, true) + done() + }) + }) + + describe('serverError is true', () => + it('should not do anything', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.serverError = true + emitter.emit('start', apiDescriptions, () => { + assert.isFalse(call.isDone()) + done() + }) + })) + }) describe('when adding passing test', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - test = null; - let requestBody = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + test = null + let requestBody = null beforeEach(() => { - const uri = `/apis/public/tests/steps?testRunId=${runId}`; + const uri = `/apis/public/tests/steps?testRunId=${runId}` // This is a hack how to get access to the performed request from nock // nock isn't able to provide it function getBody(body) { - requestBody = body; - return body; + requestBody = body + return body } call = nock(env.APIARY_API_URL) .filteringRequestBody(getBody) .post(uri) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test pass', test, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should have origin with filename in the request', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test pass', test, () => { - const parsedBody = JSON.parse(requestBody); - assert.property(parsedBody.origin, 'filename'); - done(); - }); - }); + const parsedBody = JSON.parse(requestBody) + assert.property(parsedBody.origin, 'filename') + done() + }) + }) it('should have startedAt timestamp in the request', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - emitter.emit('test pass', test, () => { - const parsedBody = JSON.parse(requestBody); - assert.propertyVal(parsedBody, 'startedAt', (1234567890 * 1000)); - done(); - }); - }); - - describe('serverError is true', () => it('should not do anything', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.serverError = true; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test pass', test, () => { - assert.isFalse(call.isDone()); - done(); - }); - })); - }); + const parsedBody = JSON.parse(requestBody) + assert.propertyVal(parsedBody, 'startedAt', 1234567890 * 1000) + done() + }) + }) + + describe('serverError is true', () => + it('should not do anything', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.serverError = true + emitter.emit('test pass', test, () => { + assert.isFalse(call.isDone()) + done() + }) + })) + }) describe('when adding failing test', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - test = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + test = null beforeEach(() => { - const uri = `/apis/public/tests/steps?testRunId=${runId}`; + const uri = `/apis/public/tests/steps?testRunId=${runId}` call = nock(env.APIARY_API_URL) .post(uri) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test fail', test, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); - - describe('when serverError is true', () => it('should not do anything', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.serverError = true; - emitter.emit('test fail', test, () => { - assert.isFalse(call.isDone()); - done(); - }); - })); - }); + assert.isTrue(call.isDone()) + done() + }) + }) + + describe('when serverError is true', () => + it('should not do anything', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.serverError = true + emitter.emit('test fail', test, () => { + assert.isFalse(call.isDone()) + done() + }) + })) + }) describe('when adding skipped test', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - let clonedTest = null; - let requestBody = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + let clonedTest = null + let requestBody = null beforeEach(() => { - clonedTest = clone(test); - clonedTest.status = 'skip'; + clonedTest = clone(test) + clonedTest.status = 'skip' - const uri = `/apis/public/tests/steps?testRunId=${runId}`; + const uri = `/apis/public/tests/steps?testRunId=${runId}` // This is a hack how to get access to the performed request from nock // nock isn't able to provide it function getBody(body) { - requestBody = body; - return body; + requestBody = body + return body } call = nock(env.APIARY_API_URL) .filteringRequestBody(getBody) .post(uri) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test skip', clonedTest, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should send status skipped', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - emitter.emit('test skip', clonedTest, () => { - assert.equal(JSON.parse(requestBody).result, 'skip'); - done(); - }); - }); - - describe('when serverError is true', () => it('should not do anything', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.serverError = true; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test skip', clonedTest, () => { - assert.isFalse(call.isDone()); - done(); - }); - })); - }); - + assert.equal(JSON.parse(requestBody).result, 'skip') + done() + }) + }) + + describe('when serverError is true', () => + it('should not do anything', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.serverError = true + emitter.emit('test skip', clonedTest, () => { + assert.isFalse(call.isDone()) + done() + }) + })) + }) describe('when adding test with error', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - test = null; - let requestBody = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + test = null + let requestBody = null beforeEach(() => { - const uri = `/apis/public/tests/steps?testRunId=${runId}`; + const uri = `/apis/public/tests/steps?testRunId=${runId}` - test.status = 'error'; + test.status = 'error' // This is a hack how to get access to the performed request from nock // nock isn't able to provide it function getBody(body) { - requestBody = body; - return body; + requestBody = body + return body } call = nock(env.APIARY_API_URL) .filteringRequestBody(getBody) .post(uri) - .reply(201, { _id: runId }); - }); - - const connectionErrors = ['ECONNRESET', 'ENOTFOUND', 'ESOCKETTIMEDOUT', 'ETIMEDOUT', 'ECONNREFUSED', 'EHOSTUNREACH', 'EPIPE']; + .reply(201, { _id: runId }) + }) + + const connectionErrors = [ + 'ECONNRESET', + 'ENOTFOUND', + 'ESOCKETTIMEDOUT', + 'ETIMEDOUT', + 'ECONNREFUSED', + 'EHOSTUNREACH', + 'EPIPE' + ] connectionErrors.forEach((errType) => { describe(`when error type is ${errType}`, () => { it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - const error = new Error('some error'); - error.code = errType; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + const error = new Error('some error') + error.code = errType emitter.emit('test error', error, test, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should set result to error', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - const error = new Error('some error'); - error.code = errType; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + const error = new Error('some error') + error.code = errType emitter.emit('test error', error, test, () => { - assert.equal(JSON.parse(requestBody).result, 'error'); - done(); - }); - }); - + assert.equal(JSON.parse(requestBody).result, 'error') + done() + }) + }) it('should set error message', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - const error = new Error('some error'); - error.code = errType; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + const error = new Error('some error') + error.code = errType emitter.emit('test error', error, test, () => { - assert.isArray(JSON.parse(requestBody).results.errors); - assert.include(JSON.parse(requestBody).results.errors.map(value => JSON.stringify(value)).join(), - 'Error connecting to server under test!'); - done(); - }); - }); - }); - }); + assert.isArray(JSON.parse(requestBody).results.errors) + assert.include( + JSON.parse(requestBody) + .results.errors.map((value) => JSON.stringify(value)) + .join(), + 'Error connecting to server under test!' + ) + done() + }) + }) + }) + }) describe('when any other error', () => { it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - const error = new Error('some error'); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + const error = new Error('some error') emitter.emit('test error', error, test, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should set result to error', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - const error = new Error('some error'); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + const error = new Error('some error') emitter.emit('test error', error, test, () => { - assert.equal(JSON.parse(requestBody).result, 'error'); - done(); - }); - }); + assert.equal(JSON.parse(requestBody).result, 'error') + done() + }) + }) it('should set descriptive error', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - const error = new Error('some error'); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + const error = new Error('some error') + emitter.emit('test error', error, test, () => { + assert.isArray(JSON.parse(requestBody).results.errors) + assert.include( + JSON.parse(requestBody) + .results.errors.map((value) => JSON.stringify(value)) + .join(), + 'Unhandled error occured when executing the transaction.' + ) + done() + }) + }) + }) + + describe('when serverError is true', () => + it('should not do anything', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.serverError = true + const error = new Error('some error') emitter.emit('test error', error, test, () => { - assert.isArray(JSON.parse(requestBody).results.errors); - assert.include(JSON.parse(requestBody).results.errors.map(value => JSON.stringify(value)).join(), - 'Unhandled error occured when executing the transaction.'); - done(); - }); - }); - }); - - - describe('when serverError is true', () => it('should not do anything', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.serverError = true; - const error = new Error('some error'); - emitter.emit('test error', error, test, () => { - assert.isFalse(call.isDone()); - done(); - }); - })); - }); + assert.isFalse(call.isDone()) + done() + }) + })) + }) describe('when ending', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - let requestBody = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + let requestBody = null beforeEach(() => { - const uri = `/apis/public/tests/run/${runId}`; + const uri = `/apis/public/tests/run/${runId}` // This is a hack how to get access to the performed request from nock // nock isn't able to provide it function getBody(body) { - requestBody = body; - return body; + requestBody = body + return body } call = nock(env.APIARY_API_URL) .filteringRequestBody(getBody) .patch(uri) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should update "test run" resource with result data', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('end', () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should return generated url if no reportUrl is available', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledWith('See results in Apiary at: https://app.apiary.io/public/tests/run/507f1f77bcf86cd799439011')); - done(); - }); - }); + assert.isOk( + reporterOutputLoggerStub.complete.calledWith( + 'See results in Apiary at: https://app.apiary.io/public/tests/run/507f1f77bcf86cd799439011' + ) + ) + done() + }) + }) it('should return reportUrl from testRun entity', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.reportUrl = 'https://absolutely.fancy.url/wich-can-change/some/id'; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.reportUrl = + 'https://absolutely.fancy.url/wich-can-change/some/id' emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledWith('See results in Apiary at: https://absolutely.fancy.url/wich-can-change/some/id')); - done(); - }); - }); + assert.isOk( + reporterOutputLoggerStub.complete.calledWith( + 'See results in Apiary at: https://absolutely.fancy.url/wich-can-change/some/id' + ) + ) + done() + }) + }) it('should send runner.logs to Apiary at the end of testRun', (done) => { - emitter = new EventEmitter(); - const logMessages = ['a', 'b']; - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }, { logs: clone(logMessages) }); - apiaryReporter.remoteId = runId; - emitter.emit('end', () => { - assert.isString(requestBody); - const parsedBody = JSON.parse(requestBody); - assert.isObject(parsedBody); - assert.property(parsedBody, 'logs'); - assert.deepEqual(parsedBody.logs, logMessages); - done(); - }); - }); - - describe('serverError is true', () => it('should not do enything', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.serverError = true; + emitter = new EventEmitter() + const logMessages = ['a', 'b'] + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } }, + { logs: clone(logMessages) } + ) + apiaryReporter.remoteId = runId emitter.emit('end', () => { - assert.isFalse(call.isDone()); - done(); - }); - })); - }); - }); + assert.isString(requestBody) + const parsedBody = JSON.parse(requestBody) + assert.isObject(parsedBody) + assert.property(parsedBody, 'logs') + assert.deepEqual(parsedBody.logs, logMessages) + done() + }) + }) + + describe('serverError is true', () => + it('should not do enything', (done) => { + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.serverError = true + emitter.emit('end', () => { + assert.isFalse(call.isDone()) + done() + }) + })) + }) + }) describe('with Apiary API token and suite id', () => { - let test = {}; - let emitter = {}; - env = {}; + let test = {} + let emitter = {} + env = {} beforeEach((done) => { - emitter = new EventEmitter(); + emitter = new EventEmitter() - env = {}; - env.APIARY_API_URL = `https://127.0.0.1:${PORT}`; - env.APIARY_API_KEY = 'aff888af9993db9ef70edf3c878ab521'; - env.APIARY_API_NAME = 'jakubtest'; + env = {} + env.APIARY_API_URL = `https://127.0.0.1:${PORT}` + env.APIARY_API_KEY = 'aff888af9993db9ef70edf3c878ab521' + env.APIARY_API_NAME = 'jakubtest' test = { status: 'fail', title: 'POST /machines', - message: "headers: Value of the ‘content-type’ must be application/json.\nbody: No validator found for real data media type 'text/plain' and expected data media type 'application/json'.\nstatusCode: Real and expected data does not match.\n", + message: + "headers: Value of the ‘content-type’ must be application/json.\nbody: No validator found for real data media type 'text/plain' and expected data media type 'application/json'.\nstatusCode: Real and expected data does not match.\n", - startedAt: (1234567890 * 1000), // JavaScript Date.now() timestamp (UNIX-like timestamp * 1000 precision) + startedAt: 1234567890 * 1000, // JavaScript Date.now() timestamp (UNIX-like timestamp * 1000 precision) actual: { statusCode: 400, headers: { - 'content-type': 'text/plain', + 'content-type': 'text/plain' }, - body: 'Foo bar', + body: 'Foo bar' }, expected: { headers: { - 'content-type': 'application/json', + 'content-type': 'application/json' }, - body: '{\n "type": "bulldozer",\n "name": "willy",\n "id": "5229c6e8e4b0bd7dbb07e29c"\n}\n', - status: '202', + body: + '{\n "type": "bulldozer",\n "name": "willy",\n "id": "5229c6e8e4b0bd7dbb07e29c"\n}\n', + status: '202' }, request: { @@ -772,20 +963,21 @@ describe('ApiaryReporter', () => { headers: { 'Content-Type': 'application/json', 'User-Agent': 'Dredd/0.2.1 (Darwin 13.0.0; x64)', - 'Content-Length': 44, + 'Content-Length': 44 }, uri: '/machines', - method: 'POST', + method: 'POST' }, results: { headers: { - results: [{ - pointer: '/content-type', - severity: 'error', - message: 'Value of the ‘content-type’ must be application/json.', - }, + results: [ + { + pointer: '/content-type', + severity: 'error', + message: 'Value of the ‘content-type’ must be application/json.' + } ], realType: 'application/vnd.apiary.http-headers+json', expectedType: 'application/vnd.apiary.http-headers+json', @@ -796,26 +988,29 @@ describe('ApiaryReporter', () => { propertyValue: 'text/plain', attributeName: 'enum', attributeValue: ['application/json'], - message: 'Value of the ‘content-type’ must be application/json.', + message: + 'Value of the ‘content-type’ must be application/json.', validator: 'enum', validatorName: 'enum', - validatorValue: ['application/json'], + validatorValue: ['application/json'] }, - length: 1, - }, + length: 1 + } }, body: { - results: [{ - message: "No validator found for real data media type 'text/plain' and expected data media type 'application/json'.", - severity: 'error', - }, + results: [ + { + message: + "No validator found for real data media type 'text/plain' and expected data media type 'application/json'.", + severity: 'error' + } ], realType: 'text/plain', expectedType: 'application/json', validator: null, - rawData: null, + rawData: null }, statusCode: { @@ -823,194 +1018,246 @@ describe('ApiaryReporter', () => { expectedType: 'text/vnd.apiary.status-code', validator: 'TextDiff', rawData: '@@ -1,3 +1,9 @@\n-400\n+undefined\n', - results: [{ - severity: 'error', - message: 'Real and expected data does not match.', - }, - ], - }, - }, - }; + results: [ + { + severity: 'error', + message: 'Real and expected data does not match.' + } + ] + } + } + } - nock.disableNetConnect(); - done(); - }); + nock.disableNetConnect() + done() + }) afterEach((done) => { - nock.enableNetConnect(); - nock.cleanAll(); - done(); - }); + nock.enableNetConnect() + nock.cleanAll() + done() + }) describe('when starting', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - const reportUrl = 'https://absolutely.fancy.url/wich-can-change/some/id'; - let requestBody = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + const reportUrl = 'https://absolutely.fancy.url/wich-can-change/some/id' + let requestBody = null beforeEach(() => { - const uri = `/apis/${env.APIARY_API_NAME}/tests/runs`; + const uri = `/apis/${env.APIARY_API_NAME}/tests/runs` - requestBody = null; + requestBody = null function getBody(body) { - requestBody = body; - return body; + requestBody = body + return body } call = nock(env.APIARY_API_URL) .filteringRequestBody(getBody) .post(uri) .matchHeader('Authentication', `Token ${env.APIARY_API_KEY}`) - .reply(201, { _id: runId, reportUrl }); - }); + .reply(201, { _id: runId, reportUrl }) + }) it('should set uuid', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.uuid); - done(); - }); - }); + assert.isNotNull(apiaryReporter.uuid) + done() + }) + }) it('should set start time', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.startedAt); - done(); - }); - }); + assert.isNotNull(apiaryReporter.startedAt) + done() + }) + }) it('should call "create new test run" HTTP resource', (done) => { - emitter = new EventEmitter(); - (new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } })); + emitter = new EventEmitter() + new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }) emitter.emit('start', apiDescriptions, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should attach test run ID back to the reporter as remoteId', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.remoteId); - done(); - }); - }); + assert.isNotNull(apiaryReporter.remoteId) + done() + }) + }) it('should attach test run reportUrl to the reporter as reportUrl', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) emitter.emit('start', apiDescriptions, () => { - assert.isNotNull(apiaryReporter.reportUrl); - done(); - }); - }); + assert.isNotNull(apiaryReporter.reportUrl) + done() + }) + }) it('should send the test-run as non-public', (done) => { - emitter = new EventEmitter(); - (new ApiaryReporter(emitter, {}, { server: 'http://my.server.co:8080', custom: { apiaryReporterEnv: env } })); + emitter = new EventEmitter() + new ApiaryReporter( + emitter, + {}, + { + server: 'http://my.server.co:8080', + custom: { apiaryReporterEnv: env } + } + ) emitter.emit('start', apiDescriptions, () => { - const parsedBody = JSON.parse(requestBody); - assert.strictEqual(parsedBody.public, false); - done(); - }); - }); - }); + const parsedBody = JSON.parse(requestBody) + assert.strictEqual(parsedBody.public, false) + done() + }) + }) + }) describe('when adding passing test', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - test = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + test = null beforeEach(() => { - const uri = `/apis/${env.APIARY_API_NAME}/tests/steps?testRunId=${runId}`; + const uri = `/apis/${env.APIARY_API_NAME}/tests/steps?testRunId=${runId}` call = nock(env.APIARY_API_URL) .post(uri) .matchHeader('Authentication', `Token ${env.APIARY_API_KEY}`) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test pass', test, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) + }) describe('when adding failing test', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; - test = null; + let call = null + const runId = '507f1f77bcf86cd799439011' + test = null beforeEach(() => { - const uri = `/apis/${env.APIARY_API_NAME}/tests/steps?testRunId=${runId}`; + const uri = `/apis/${env.APIARY_API_NAME}/tests/steps?testRunId=${runId}` call = nock(env.APIARY_API_URL) .post(uri) .matchHeader('Authentication', `Token ${env.APIARY_API_KEY}`) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should call "create new test step" HTTP resource', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('test fail', test, () => { - assert.isTrue(call.isDone()); - done(); - }); - }); - }); - + assert.isTrue(call.isDone()) + done() + }) + }) + }) describe('when ending', () => { - let call = null; - const runId = '507f1f77bcf86cd799439011'; + let call = null + const runId = '507f1f77bcf86cd799439011' beforeEach(() => { - const uri = `/apis/${env.APIARY_API_NAME}/tests/run/${runId}`; + const uri = `/apis/${env.APIARY_API_NAME}/tests/run/${runId}` call = nock(env.APIARY_API_URL) .patch(uri) .matchHeader('Authentication', `Token ${env.APIARY_API_KEY}`) - .reply(201, { _id: runId }); - }); + .reply(201, { _id: runId }) + }) it('should update "test run" resource with result data', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('end', () => { - assert.isTrue(call.isDone()); - done(); - }); - }); + assert.isTrue(call.isDone()) + done() + }) + }) it('should return generated url if reportUrl is not available', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledWith('See results in Apiary at: https://app.apiary.io/jakubtest/tests/run/507f1f77bcf86cd799439011')); - done(); - }); - }); + assert.isOk( + reporterOutputLoggerStub.complete.calledWith( + 'See results in Apiary at: https://app.apiary.io/jakubtest/tests/run/507f1f77bcf86cd799439011' + ) + ) + done() + }) + }) it('should return reportUrl from testRun entity', (done) => { - emitter = new EventEmitter(); - const apiaryReporter = new ApiaryReporter(emitter, {}, { custom: { apiaryReporterEnv: env } }); - apiaryReporter.remoteId = runId; - apiaryReporter.reportUrl = 'https://absolutely.fancy.url/wich-can-change/some/id'; + emitter = new EventEmitter() + const apiaryReporter = new ApiaryReporter( + emitter, + {}, + { custom: { apiaryReporterEnv: env } } + ) + apiaryReporter.remoteId = runId + apiaryReporter.reportUrl = + 'https://absolutely.fancy.url/wich-can-change/some/id' emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledWith('See results in Apiary at: https://absolutely.fancy.url/wich-can-change/some/id')); - done(); - }); - }); - }); - }); -}); + assert.isOk( + reporterOutputLoggerStub.complete.calledWith( + 'See results in Apiary at: https://absolutely.fancy.url/wich-can-change/some/id' + ) + ) + done() + }) + }) + }) + }) +}) diff --git a/test/unit/reporters/BaseReporter-test.js b/test/unit/reporters/BaseReporter-test.js index 9e3867932..de3802acb 100644 --- a/test/unit/reporters/BaseReporter-test.js +++ b/test/unit/reporters/BaseReporter-test.js @@ -1,17 +1,18 @@ -const proxyquire = require('proxyquire').noCallThru(); -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { noCallThru } from 'proxyquire' +import { assert } from 'chai' +import { EventEmitter } from 'events' -const loggerStub = require('../../../lib/logger'); +import loggerStub from '../../../lib/logger' +const proxyquire = noCallThru() const BaseReporter = proxyquire('../../../lib/reporters/BaseReporter', { - '../logger': loggerStub, -}); + '../logger': loggerStub +}).default describe('BaseReporter', () => { - let stats = {}; - let test = {}; - let emitter = {}; + let stats = {} + let test = {} + let emitter = {} beforeEach(() => { stats = { @@ -22,145 +23,147 @@ describe('BaseReporter', () => { skipped: 0, start: 0, end: 0, - duration: 0, - }; - emitter = new EventEmitter(); - (new BaseReporter(emitter, stats)); - }); + duration: 0 + } + emitter = new EventEmitter() + new BaseReporter(emitter, stats) + }) describe('when starting', () => { before(() => { - stats = { start: null }; - }); + stats = { start: null } + }) - it('should set the start date', done => emitter.emit('start', '', () => { - assert.isOk(stats.start); - done(); - })); - }); + it('should set the start date', (done) => + emitter.emit('start', '', () => { + assert.isOk(stats.start) + done() + })) + }) describe('when ending', () => { before(() => { - stats = { start: null }; - }); + stats = { start: null } + }) - it('should set the end date', done => emitter.emit('end', () => { - assert.isOk(stats.end); - done(); - })); - }); + it('should set the end date', (done) => + emitter.emit('end', () => { + assert.isOk(stats.end) + done() + })) + }) describe('when test starts', () => { beforeEach(() => { test = { status: 'pass', - title: 'Passing Test', - }; - emitter.emit('test start', test); - }); + title: 'Passing Test' + } + emitter.emit('test start', test) + }) - it('should increment the counter', () => assert.equal(stats.tests, 1)); + it('should increment the counter', () => assert.equal(stats.tests, 1)) - it('should set the start time', () => assert.isOk(test.start)); - }); + it('should set the start time', () => assert.isOk(test.start)) + }) describe('when test passes', () => { beforeEach(() => { test = { status: 'pass', - title: 'Passing Test', - }; - emitter.emit('test start', test); - emitter.emit('test pass', test); - }); + title: 'Passing Test' + } + emitter.emit('test start', test) + emitter.emit('test pass', test) + }) - it('should increment the counter', () => assert.equal(stats.passes, 1)); + it('should increment the counter', () => assert.equal(stats.passes, 1)) - it('should set the end time', () => assert.isOk(test.end)); - }); + it('should set the end time', () => assert.isOk(test.end)) + }) describe('when test is skipped', () => { beforeEach(() => { test = { status: 'skipped', - title: 'Skipped Test', - }; - emitter.emit('test start', test); - emitter.emit('test skip', test); - }); + title: 'Skipped Test' + } + emitter.emit('test start', test) + emitter.emit('test skip', test) + }) - it('should increment the counter', () => assert.isOk(stats.skipped === 1)); - }); + it('should increment the counter', () => assert.isOk(stats.skipped === 1)) + }) describe('when test fails', () => { beforeEach(() => { test = { status: 'failed', - title: 'Failed Test', - }; - emitter.emit('test start', test); - emitter.emit('test fail', test); - }); + title: 'Failed Test' + } + emitter.emit('test start', test) + emitter.emit('test fail', test) + }) - it('should increment the counter', () => assert.isOk(stats.failures === 1)); + it('should increment the counter', () => assert.isOk(stats.failures === 1)) - it('should set the end time', () => assert.isOk(test.end)); - }); + it('should set the end time', () => assert.isOk(test.end)) + }) describe('when test errors', () => { beforeEach(() => { test = { status: 'error', - title: 'Errored Test', - }; - emitter.emit('test start', test); - emitter.emit('test error', new Error('Error'), test); - }); + title: 'Errored Test' + } + emitter.emit('test start', test) + emitter.emit('test error', new Error('Error'), test) + }) - it('should increment the counter', () => assert.isOk(stats.errors === 1)); + it('should increment the counter', () => assert.isOk(stats.errors === 1)) - it('should set the end time', () => assert.isOk(test.end)); - }); + it('should set the end time', () => assert.isOk(test.end)) + }) describe('when passing test start is UTC string', () => { beforeEach(() => { test = { status: 'pass', - title: 'Passing Test', - }; - emitter.emit('test start', test); - test.start = '2017-06-15T09:29:50.588Z'; - emitter.emit('test pass', test); - }); + title: 'Passing Test' + } + emitter.emit('test start', test) + test.start = '2017-06-15T09:29:50.588Z' + emitter.emit('test pass', test) + }) - it('should set the duration', () => assert.isNotNaN(test.duration)); - }); + it('should set the duration', () => assert.isNotNaN(test.duration)) + }) describe('when failed test start is UTC string', () => { beforeEach(() => { test = { status: 'pass', - title: 'Failed Test', - }; - emitter.emit('test start', test); - test.start = '2017-06-15T09:29:50.588Z'; - emitter.emit('test fail', test); - }); + title: 'Failed Test' + } + emitter.emit('test start', test) + test.start = '2017-06-15T09:29:50.588Z' + emitter.emit('test fail', test) + }) - it('should set the duration', () => assert.isNotNaN(test.duration)); - }); + it('should set the duration', () => assert.isNotNaN(test.duration)) + }) describe('when errored test start is UTC string', () => { beforeEach(() => { test = { status: 'pass', - title: 'Errored Test', - }; - emitter.emit('test start', test); - test.start = '2017-06-15T09:29:50.588Z'; - emitter.emit('test error', new Error('Error'), test); - }); - - it('should set the duration', () => assert.isNotNaN(test.duration)); - }); -}); + title: 'Errored Test' + } + emitter.emit('test start', test) + test.start = '2017-06-15T09:29:50.588Z' + emitter.emit('test error', new Error('Error'), test) + }) + + it('should set the duration', () => assert.isNotNaN(test.duration)) + }) +}) diff --git a/test/unit/reporters/CLIReporter-test.js b/test/unit/reporters/CLIReporter-test.js index 764d31cec..c1cba563b 100644 --- a/test/unit/reporters/CLIReporter-test.js +++ b/test/unit/reporters/CLIReporter-test.js @@ -1,221 +1,235 @@ -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { noCallThru } from 'proxyquire' +import sinon from 'sinon' +import { assert } from 'chai' +import { EventEmitter } from 'events' -const loggerStub = require('../../../lib/logger'); -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import loggerStub from '../../../lib/logger' +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' +const proxyquire = noCallThru() const CLIReporter = proxyquire('../../../lib/reporters/CLIReporter', { '../logger': loggerStub, - './reporterOutputLogger': reporterOutputLoggerStub, -}); + './reporterOutputLogger': reporterOutputLoggerStub +}).default describe('CLIReporter', () => { - let test = {}; + let test = {} before(() => { - loggerStub.transports.console.silent = true; - reporterOutputLoggerStub.transports.console.silent = true; - }); + loggerStub.transports.console.silent = true + reporterOutputLoggerStub.transports.console.silent = true + }) after(() => { - loggerStub.transports.console.silent = false; - reporterOutputLoggerStub.transports.console.silent = false; - }); + loggerStub.transports.console.silent = false + reporterOutputLoggerStub.transports.console.silent = false + }) describe('when starting', () => { - beforeEach(() => sinon.spy(loggerStub, 'debug')); + beforeEach(() => sinon.spy(loggerStub, 'debug')) - afterEach(() => loggerStub.debug.restore()); + afterEach(() => loggerStub.debug.restore()) it('should write starting to the console', (done) => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, true)); - loggerStub.debug.resetHistory(); + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, true) + loggerStub.debug.resetHistory() emitter.emit('start', '', () => { - assert.isOk(loggerStub.debug.calledOnce); - done(); - }); - }); - }); + assert.isOk(loggerStub.debug.calledOnce) + done() + }) + }) + }) describe('when adding passing test', () => { before(() => { test = { status: 'pass', - title: 'Passing Test', - }; - }); + title: 'Passing Test' + } + }) - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'pass')); + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'pass')) - afterEach(() => reporterOutputLoggerStub.pass.restore()); + afterEach(() => reporterOutputLoggerStub.pass.restore()) it('should write pass to the console', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, true)); - emitter.emit('test pass', test); - assert.isOk(reporterOutputLoggerStub.pass.calledOnce); - }); + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, true) + emitter.emit('test pass', test) + assert.isOk(reporterOutputLoggerStub.pass.calledOnce) + }) describe('when details=true', () => { - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'request')); + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'request')) - afterEach(() => reporterOutputLoggerStub.request.restore()); + afterEach(() => reporterOutputLoggerStub.request.restore()) it('should write details for passing tests', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, true, true)); - emitter.emit('test pass', test); - assert.isOk(reporterOutputLoggerStub.request.calledOnce); - }); - }); - }); + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, true, true) + emitter.emit('test pass', test) + assert.isOk(reporterOutputLoggerStub.request.calledOnce) + }) + }) + }) describe('when adding failing test', () => { before(() => { test = { status: 'fail', - title: 'Failing Test', - }; - }); + title: 'Failing Test' + } + }) describe('when errors are inline', () => { - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'fail')); + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'fail')) - afterEach(() => reporterOutputLoggerStub.fail.restore()); + afterEach(() => reporterOutputLoggerStub.fail.restore()) it('should write fail to the console', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, true)); - emitter.emit('test fail', test); - assert.isOk(reporterOutputLoggerStub.fail.calledTwice); - }); - }); + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, true) + emitter.emit('test fail', test) + assert.isOk(reporterOutputLoggerStub.fail.calledTwice) + }) + }) describe('when errors are aggregated', () => { - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'fail')); + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'fail')) - afterEach(() => reporterOutputLoggerStub.fail.restore()); + afterEach(() => reporterOutputLoggerStub.fail.restore()) it('should not write full failure to the console at the time of failure', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, false)); - emitter.emit('test fail', test); - assert.isOk(reporterOutputLoggerStub.fail.calledOnce); - }); + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, false) + emitter.emit('test fail', test) + assert.isOk(reporterOutputLoggerStub.fail.calledOnce) + }) it('should write full failure to the console after execution is complete', (done) => { - const emitter = new EventEmitter(); - const cliReporter = new CLIReporter(emitter, {}, false); - cliReporter.errors = [test]; + const emitter = new EventEmitter() + const cliReporter = new CLIReporter(emitter, {}, false) + cliReporter.errors = [test] emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.fail.calledTwice); - done(); - }); - }); - }); - }); + assert.isOk(reporterOutputLoggerStub.fail.calledTwice) + done() + }) + }) + }) + }) describe('when adding error test', () => { before(() => { test = { status: 'error', - title: 'Error Test', - }; - }); + title: 'Error Test' + } + }) - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'error')); + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'error')) - afterEach(() => reporterOutputLoggerStub.error.restore()); + afterEach(() => reporterOutputLoggerStub.error.restore()) it('should write error to the console', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, false)); - emitter.emit('test error', new Error('Error'), test); - assert.isOk(reporterOutputLoggerStub.error.calledTwice); - }); - }); - + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, false) + emitter.emit('test error', new Error('Error'), test) + assert.isOk(reporterOutputLoggerStub.error.calledTwice) + }) + }) describe('when adding error test with connection refused', () => { before(() => { test = { status: 'error', - title: 'Error Test', - }; - }); - - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'error')); - - afterEach(() => reporterOutputLoggerStub.error.restore()); - - const connectionErrors = ['ECONNRESET', 'ENOTFOUND', 'ESOCKETTIMEDOUT', 'ETIMEDOUT', 'ECONNREFUSED', 'EHOSTUNREACH', 'EPIPE']; - - Array.from(connectionErrors).forEach(errType => describe(`when error type ${errType}`, () => it('should write error to the console', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, false)); - const error = new Error('connect'); - error.code = errType; - emitter.emit('test error', error, test); - - const messages = Object.keys(reporterOutputLoggerStub.error.args).map((value, index) => reporterOutputLoggerStub.error.args[index][0]); - - assert.include(messages.join(), 'Error connecting'); - }))); - }); + title: 'Error Test' + } + }) + + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'error')) + + afterEach(() => reporterOutputLoggerStub.error.restore()) + + const connectionErrors = [ + 'ECONNRESET', + 'ENOTFOUND', + 'ESOCKETTIMEDOUT', + 'ETIMEDOUT', + 'ECONNREFUSED', + 'EHOSTUNREACH', + 'EPIPE' + ] + + Array.from(connectionErrors).forEach((errType) => + describe(`when error type ${errType}`, () => + it('should write error to the console', () => { + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, false) + const error = new Error('connect') + error.code = errType + emitter.emit('test error', error, test) + + const messages = Object.keys(reporterOutputLoggerStub.error.args).map( + (value, index) => reporterOutputLoggerStub.error.args[index][0] + ) + + assert.include(messages.join(), 'Error connecting') + })) + ) + }) describe('when adding skipped test', () => { before(() => { test = { status: 'skip', - title: 'Skipped Test', - }; - }); + title: 'Skipped Test' + } + }) - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'skip')); + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'skip')) - afterEach(() => reporterOutputLoggerStub.skip.restore()); + afterEach(() => reporterOutputLoggerStub.skip.restore()) it('should write skip to the console', () => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, false)); - emitter.emit('test skip', test); - assert.isOk(reporterOutputLoggerStub.skip.calledOnce); - }); - }); - + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, false) + emitter.emit('test skip', test) + assert.isOk(reporterOutputLoggerStub.skip.calledOnce) + }) + }) describe('when creating report', () => { before(() => { test = { status: 'fail', - title: 'Failing Test', - }; - }); - - beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'complete')); - - afterEach(() => reporterOutputLoggerStub.complete.restore()); - - describe('when there is at least one test', () => it('should write to the console', (done) => { - const emitter = new EventEmitter(); - const cliReporter = new CLIReporter(emitter, {}, false); - cliReporter.stats.tests = 1; - emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledTwice); - done(); - }); - })); - - describe('when there are no tests', () => it('should write to the console', (done) => { - const emitter = new EventEmitter(); - (new CLIReporter(emitter, {}, false)); - emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledOnce); - done(); - }); - })); - }); -}); + title: 'Failing Test' + } + }) + + beforeEach(() => sinon.spy(reporterOutputLoggerStub, 'complete')) + + afterEach(() => reporterOutputLoggerStub.complete.restore()) + + describe('when there is at least one test', () => + it('should write to the console', (done) => { + const emitter = new EventEmitter() + const cliReporter = new CLIReporter(emitter, {}, false) + cliReporter.stats.tests = 1 + emitter.emit('end', () => { + assert.isOk(reporterOutputLoggerStub.complete.calledTwice) + done() + }) + })) + + describe('when there are no tests', () => + it('should write to the console', (done) => { + const emitter = new EventEmitter() + new CLIReporter(emitter, {}, false) + emitter.emit('end', () => { + assert.isOk(reporterOutputLoggerStub.complete.calledOnce) + done() + }) + })) + }) +}) diff --git a/test/unit/reporters/DotReporter-test.js b/test/unit/reporters/DotReporter-test.js index 09dfbe9e1..6528aa2ec 100644 --- a/test/unit/reporters/DotReporter-test.js +++ b/test/unit/reporters/DotReporter-test.js @@ -1,25 +1,30 @@ -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); +import { noCallThru } from 'proxyquire' +import sinon from 'sinon' -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { assert } from 'chai' +import { EventEmitter } from 'events' -const loggerStub = require('../../../lib/logger'); -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import loggerStub from '../../../lib/logger' +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' +const proxyquire = noCallThru() const DotReporter = proxyquire('../../../lib/reporters/DotReporter', { - '../logger': loggerStub, -}); + '../logger': loggerStub +}).default describe('DotReporter', () => { - let stats = {}; - let test = []; - let emitter; - let dotReporter; + let stats = {} + let test = [] + let emitter + let dotReporter - before(() => { loggerStub.transports.console.silent = true; }); + before(() => { + loggerStub.transports.console.silent = true + }) - after(() => { loggerStub.transports.console.silent = false; }); + after(() => { + loggerStub.transports.console.silent = false + }) beforeEach(() => { stats = { @@ -30,131 +35,138 @@ describe('DotReporter', () => { skipped: 0, start: 0, end: 0, - duration: 0, - }; - emitter = new EventEmitter(); - dotReporter = new DotReporter(emitter, stats); - }); + duration: 0 + } + emitter = new EventEmitter() + dotReporter = new DotReporter(emitter, stats) + }) describe('when starting', () => { - beforeEach(() => sinon.spy(loggerStub, 'debug')); + beforeEach(() => sinon.spy(loggerStub, 'debug')) - afterEach(() => loggerStub.debug.restore()); + afterEach(() => loggerStub.debug.restore()) - it('should log that testing has begun', () => emitter.emit('start', '', () => assert.isOk(loggerStub.debug.called))); - }); + it('should log that testing has begun', () => + emitter.emit('start', '', () => assert.isOk(loggerStub.debug.called))) + }) describe('when ending', () => { beforeEach(() => { - stats.tests = 1; - sinon.spy(reporterOutputLoggerStub, 'complete'); - sinon.stub(dotReporter, 'write'); - }); + stats.tests = 1 + sinon.spy(reporterOutputLoggerStub, 'complete') + sinon.stub(dotReporter, 'write') + }) afterEach(() => { - reporterOutputLoggerStub.complete.restore(); - dotReporter.write.restore(); - }); + reporterOutputLoggerStub.complete.restore() + dotReporter.write.restore() + }) - it('should log that testing is complete', () => emitter.emit('end', () => assert.isOk(reporterOutputLoggerStub.complete.calledTwice))); + it('should log that testing is complete', () => + emitter.emit('end', () => + assert.isOk(reporterOutputLoggerStub.complete.calledTwice) + )) describe('when there are failures', () => { before(() => { test = { status: 'fail', - title: 'failing test', - }; - }); + title: 'failing test' + } + }) beforeEach(() => { - dotReporter.errors = [test]; - dotReporter.stats.tests = 1; - emitter.emit('test start', test); - sinon.spy(reporterOutputLoggerStub, 'fail'); - }); - - afterEach(() => reporterOutputLoggerStub.fail.restore()); - - it('should log the failures at the end of testing', done => emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.fail.called); - done(); - })); - }); - }); + dotReporter.errors = [test] + dotReporter.stats.tests = 1 + emitter.emit('test start', test) + sinon.spy(reporterOutputLoggerStub, 'fail') + }) + + afterEach(() => reporterOutputLoggerStub.fail.restore()) + + it('should log the failures at the end of testing', (done) => + emitter.emit('end', () => { + assert.isOk(reporterOutputLoggerStub.fail.called) + done() + })) + }) + }) describe('when test passes', () => { before(() => { test = { status: 'pass', - title: 'Passing Test', - }; - }); + title: 'Passing Test' + } + }) beforeEach(() => { - sinon.stub(dotReporter, 'write'); - emitter.emit('test start', test); - emitter.emit('test pass', test); - }); + sinon.stub(dotReporter, 'write') + emitter.emit('test start', test) + emitter.emit('test pass', test) + }) - after(() => dotReporter.write.restore()); + after(() => dotReporter.write.restore()) - it('should write a .', () => assert.isOk(dotReporter.write.calledWith('.'))); - }); + it('should write a .', () => assert.isOk(dotReporter.write.calledWith('.'))) + }) describe('when test is skipped', () => { before(() => { test = { status: 'skipped', - title: 'Skipped Test', - }; - }); + title: 'Skipped Test' + } + }) beforeEach(() => { - sinon.stub(dotReporter, 'write'); - emitter.emit('test start', test); - emitter.emit('test skip', test); - }); + sinon.stub(dotReporter, 'write') + emitter.emit('test start', test) + emitter.emit('test skip', test) + }) - after(() => dotReporter.write.restore()); + after(() => dotReporter.write.restore()) - it('should write a -', () => assert.isOk(dotReporter.write.calledWith('-'))); - }); + it('should write a -', () => assert.isOk(dotReporter.write.calledWith('-'))) + }) describe('when test fails', () => { before(() => { test = { status: 'failed', - title: 'Failed Test', - }; - }); + title: 'Failed Test' + } + }) beforeEach(() => { - sinon.stub(dotReporter, 'write'); - emitter.emit('test start', test); - emitter.emit('test fail', test); - }); + sinon.stub(dotReporter, 'write') + emitter.emit('test start', test) + emitter.emit('test fail', test) + }) - after(() => dotReporter.write.restore()); + after(() => dotReporter.write.restore()) - it('should write an F', () => assert.isOk(dotReporter.write.calledWith('F'))); - }); + it('should write an F', () => + assert.isOk(dotReporter.write.calledWith('F'))) + }) describe('when test errors', () => { before(() => { test = { status: 'error', - title: 'Errored Test', - }; - }); + title: 'Errored Test' + } + }) beforeEach(() => { - sinon.stub(dotReporter, 'write'); - emitter.emit('test start', test); - emitter.emit('test error', new Error('Error'), test); - }); + sinon.stub(dotReporter, 'write') + emitter.emit('test start', test) + emitter.emit('test error', new Error('Error'), test) + }) - after(() => dotReporter.write.restore()); + after(() => dotReporter.write.restore()) - it('should write an E', () => assert.isOk(dotReporter.write.calledWith('E'))); - }); -}); + it('should write an E', () => + assert.isOk(dotReporter.write.calledWith('E'))) + }) +}) diff --git a/test/unit/reporters/HTMLReporter-test.js b/test/unit/reporters/HTMLReporter-test.js index 689786d71..144b9278a 100644 --- a/test/unit/reporters/HTMLReporter-test.js +++ b/test/unit/reporters/HTMLReporter-test.js @@ -1,42 +1,44 @@ -const fsStub = require('fs'); -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); +import fsStub from 'fs' +import { noCallThru } from 'proxyquire' +import sinon from 'sinon' -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { assert } from 'chai' +import { EventEmitter } from 'events' -const loggerStub = require('../../../lib/logger'); -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import loggerStub from '../../../lib/logger' +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' -const makeDirStub = (input, options) => makeDirStubImpl(input, options); -let makeDirStubImpl = () => Promise.resolve(); -const makeDirStubImplBackup = makeDirStubImpl; +const proxyquire = noCallThru() + +const makeDirStub = (input, options) => makeDirStubImpl(input, options) +let makeDirStubImpl = () => Promise.resolve() +const makeDirStubImplBackup = makeDirStubImpl const HTMLReporter = proxyquire('../../../lib/reporters/HTMLReporter', { '../logger': loggerStub, './reporterOutputLogger': reporterOutputLoggerStub, fs: fsStub, - 'make-dir': makeDirStub, -}); + 'make-dir': makeDirStub +}).default describe('HTMLReporter', () => { - let emitter; - let htmlReporter; - let stats; - let test = {}; + let emitter + let htmlReporter + let stats + let test = {} before(() => { - loggerStub.transports.console.silent = true; - reporterOutputLoggerStub.transports.console.silent = true; - }); + loggerStub.transports.console.silent = true + reporterOutputLoggerStub.transports.console.silent = true + }) after(() => { - loggerStub.transports.console.silent = false; - reporterOutputLoggerStub.transports.console.silent = false; - }); + loggerStub.transports.console.silent = false + reporterOutputLoggerStub.transports.console.silent = false + }) beforeEach(() => { - emitter = new EventEmitter(); + emitter = new EventEmitter() stats = { tests: 0, failures: 0, @@ -45,152 +47,166 @@ describe('HTMLReporter', () => { skipped: 0, start: 0, end: 0, - duration: 0, - }; - htmlReporter = new HTMLReporter(emitter, stats, 'test.html'); - }); + duration: 0 + } + htmlReporter = new HTMLReporter(emitter, stats, 'test.html') + }) describe('when starting', () => { describe('when file exists', () => { before(() => { - sinon.stub(fsStub, 'existsSync').callsFake(() => true); - sinon.stub(loggerStub, 'warn'); - }); + sinon.stub(fsStub, 'existsSync').callsFake(() => true) + sinon.stub(loggerStub, 'warn') + }) after(() => { - fsStub.existsSync.restore(); - loggerStub.warn.restore(); - }); + fsStub.existsSync.restore() + loggerStub.warn.restore() + }) - it('should warn about the existing file', () => assert.isOk(loggerStub.warn.called)); - }); + it('should warn about the existing file', () => + assert.isOk(loggerStub.warn.called)) + }) describe('when file does not exist', () => { before(() => { - sinon.stub(fsStub, 'existsSync').callsFake(() => false); - sinon.stub(fsStub, 'unlinkSync'); - }); + sinon.stub(fsStub, 'existsSync').callsFake(() => false) + sinon.stub(fsStub, 'unlinkSync') + }) after(() => { - fsStub.existsSync.restore(); - fsStub.unlinkSync.restore(); - }); + fsStub.existsSync.restore() + fsStub.unlinkSync.restore() + }) - it('should not attempt to delete a file', () => assert.isOk(fsStub.unlinkSync.notCalled)); - }); + it('should not attempt to delete a file', () => + assert.isOk(fsStub.unlinkSync.notCalled)) + }) - it('should write the prelude to the buffer', done => emitter.emit('start', '', () => { - assert.isOk(htmlReporter.buf.includes('Dredd')); - done(); - })); - }); + it('should write the prelude to the buffer', (done) => + emitter.emit('start', '', () => { + assert.isOk(htmlReporter.buf.includes('Dredd')) + done() + })) + }) describe('when ending', () => { - before(() => { stats.tests = 1; }); + before(() => { + stats.tests = 1 + }) describe('when can create output directory', () => { beforeEach(() => { - sinon.stub(fsStub, 'writeFile').callsFake((path, data, callback) => callback()); - makeDirStubImpl = sinon.spy(makeDirStubImpl); - }); + sinon + .stub(fsStub, 'writeFile') + .callsFake((path, data, callback) => callback()) + makeDirStubImpl = sinon.spy(makeDirStubImpl) + }) afterEach(() => { - fsStub.writeFile.restore(); - makeDirStubImpl = makeDirStubImplBackup; - }); - - it('should write the file', done => emitter.emit('end', () => { - assert.isOk(makeDirStubImpl.called); - assert.isOk(fsStub.writeFile.called); - done(); - })); - }); + fsStub.writeFile.restore() + makeDirStubImpl = makeDirStubImplBackup + }) + + it('should write the file', (done) => + emitter.emit('end', () => { + assert.isOk(makeDirStubImpl.called) + assert.isOk(fsStub.writeFile.called) + done() + })) + }) describe('when cannot create output directory', () => { beforeEach(() => { - sinon.stub(reporterOutputLoggerStub, 'error'); - sinon.stub(fsStub, 'writeFile').callsFake((path, data, callback) => callback()); - makeDirStubImpl = sinon.stub().callsFake(() => Promise.reject(new Error())); - }); + sinon.stub(reporterOutputLoggerStub, 'error') + sinon + .stub(fsStub, 'writeFile') + .callsFake((path, data, callback) => callback()) + makeDirStubImpl = sinon + .stub() + .callsFake(() => Promise.reject(new Error())) + }) after(() => { - reporterOutputLoggerStub.error.restore(); - fsStub.writeFile.restore(); - makeDirStubImpl = makeDirStubImplBackup; - }); - - it('should write to log', done => emitter.emit('end', () => { - assert.isOk(makeDirStubImpl.called); - assert.isOk(fsStub.writeFile.notCalled); - assert.isOk(reporterOutputLoggerStub.error.called); - done(); - })); - }); - }); + reporterOutputLoggerStub.error.restore() + fsStub.writeFile.restore() + makeDirStubImpl = makeDirStubImplBackup + }) + + it('should write to log', (done) => + emitter.emit('end', () => { + assert.isOk(makeDirStubImpl.called) + assert.isOk(fsStub.writeFile.notCalled) + assert.isOk(reporterOutputLoggerStub.error.called) + done() + })) + }) + }) describe('when test passes', () => { before(() => { test = { status: 'pass', - title: 'Passing Test', - }; - }); + title: 'Passing Test' + } + }) it('should call the pass event', () => { - emitter.emit('test start', test); - emitter.emit('test pass', test); - assert.isOk(htmlReporter.buf.includes('Pass')); - }); - - describe('when details=true', () => it('should write details for passing tests', () => { - htmlReporter.details = true; - emitter.emit('test pass', test); - assert.isOk(htmlReporter.buf.includes('Request')); - })); - }); + emitter.emit('test start', test) + emitter.emit('test pass', test) + assert.isOk(htmlReporter.buf.includes('Pass')) + }) + + describe('when details=true', () => + it('should write details for passing tests', () => { + htmlReporter.details = true + emitter.emit('test pass', test) + assert.isOk(htmlReporter.buf.includes('Request')) + })) + }) describe('when test is skipped', () => { before(() => { test = { status: 'skipped', - title: 'Skipped Test', - }; - }); + title: 'Skipped Test' + } + }) it('should call the skip event', () => { - emitter.emit('test start', test); - emitter.emit('test skip', test); - assert.isOk(htmlReporter.buf.includes('Skip')); - }); - }); + emitter.emit('test start', test) + emitter.emit('test skip', test) + assert.isOk(htmlReporter.buf.includes('Skip')) + }) + }) describe('when test fails', () => { before(() => { test = { status: 'failed', - title: 'Failed Test', - }; - }); + title: 'Failed Test' + } + }) it('should call the fail event', () => { - emitter.emit('test start', test); - emitter.emit('test fail', test); - assert.isOk(htmlReporter.buf.includes('Fail')); - }); - }); + emitter.emit('test start', test) + emitter.emit('test fail', test) + assert.isOk(htmlReporter.buf.includes('Fail')) + }) + }) describe('when test errors', () => { before(() => { test = { status: 'error', - title: 'Errored Test', - }; - }); + title: 'Errored Test' + } + }) it('should call the error event', () => { - emitter.emit('test start', test); - emitter.emit('test error', new Error('Error'), test); - assert.isOk(htmlReporter.buf.includes('Error')); - }); - }); -}); + emitter.emit('test start', test) + emitter.emit('test error', new Error('Error'), test) + assert.isOk(htmlReporter.buf.includes('Error')) + }) + }) +}) diff --git a/test/unit/reporters/MarkdownReporter-test.js b/test/unit/reporters/MarkdownReporter-test.js index 9318ce7bb..6ef67d74b 100644 --- a/test/unit/reporters/MarkdownReporter-test.js +++ b/test/unit/reporters/MarkdownReporter-test.js @@ -1,42 +1,44 @@ -const fsStub = require('fs'); -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); +import fsStub from 'fs' +import { noCallThru } from 'proxyquire' +import sinon from 'sinon' -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { assert } from 'chai' +import { EventEmitter } from 'events' -const loggerStub = require('../../../lib/logger'); -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import loggerStub from '../../../lib/logger' +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' -const makeDirStub = (input, options) => makeDirStubImpl(input, options); -let makeDirStubImpl = () => Promise.resolve(); -const makeDirStubImplBackup = makeDirStubImpl; +const makeDirStub = (input, options) => makeDirStubImpl(input, options) +let makeDirStubImpl = () => Promise.resolve() +const makeDirStubImplBackup = makeDirStubImpl + +const proxyquire = noCallThru() const MarkdownReporter = proxyquire('../../../lib/reporters/MarkdownReporter', { '../logger': loggerStub, './reporterOutputLogger': reporterOutputLoggerStub, fs: fsStub, - 'make-dir': makeDirStub, -}); + 'make-dir': makeDirStub +}).default describe('MarkdownReporter', () => { - let mdReporter; - let emitter; - let stats; - let test = {}; + let mdReporter + let emitter + let stats + let test = {} before(() => { - loggerStub.transports.console.silent = true; - reporterOutputLoggerStub.transports.console.silent = true; - }); + loggerStub.transports.console.silent = true + reporterOutputLoggerStub.transports.console.silent = true + }) after(() => { - loggerStub.transports.console.silent = false; - reporterOutputLoggerStub.transports.console.silent = false; - }); + loggerStub.transports.console.silent = false + reporterOutputLoggerStub.transports.console.silent = false + }) beforeEach(() => { - emitter = new EventEmitter(); + emitter = new EventEmitter() stats = { tests: 0, failures: 0, @@ -45,160 +47,171 @@ describe('MarkdownReporter', () => { skipped: 0, start: 0, end: 0, - duration: 0, - }; - mdReporter = new MarkdownReporter(emitter, stats, 'test.md'); - }); - + duration: 0 + } + mdReporter = new MarkdownReporter(emitter, stats, 'test.md') + }) describe('when creating', () => { describe('when file exists', () => { before(() => { - sinon.stub(fsStub, 'existsSync').callsFake(() => true); - sinon.stub(loggerStub, 'warn'); - }); + sinon.stub(fsStub, 'existsSync').callsFake(() => true) + sinon.stub(loggerStub, 'warn') + }) after(() => { - fsStub.existsSync.restore(); - loggerStub.warn.restore(); - }); + fsStub.existsSync.restore() + loggerStub.warn.restore() + }) - it('should warn about the existing file', () => assert.isOk(loggerStub.warn.called)); - }); + it('should warn about the existing file', () => + assert.isOk(loggerStub.warn.called)) + }) describe('when file does not exist', () => { before(() => { - sinon.stub(fsStub, 'existsSync').callsFake(() => false); - sinon.stub(fsStub, 'unlinkSync'); - }); + sinon.stub(fsStub, 'existsSync').callsFake(() => false) + sinon.stub(fsStub, 'unlinkSync') + }) after(() => { - fsStub.existsSync.restore(); - fsStub.unlinkSync.restore(); - }); + fsStub.existsSync.restore() + fsStub.unlinkSync.restore() + }) it('should create the file', (done) => { - assert.isOk(fsStub.unlinkSync.notCalled); - done(); - }); - }); - }); - - describe('when starting', () => it('should write the title to the buffer', done => emitter.emit('start', '', () => { - assert.isOk(mdReporter.buf.includes('Dredd')); - done(); - }))); + assert.isOk(fsStub.unlinkSync.notCalled) + done() + }) + }) + }) + + describe('when starting', () => + it('should write the title to the buffer', (done) => + emitter.emit('start', '', () => { + assert.isOk(mdReporter.buf.includes('Dredd')) + done() + }))) describe('when ending', () => { describe('when can create output directory', () => { beforeEach(() => { - sinon.stub(fsStub, 'writeFile').callsFake((path, data, callback) => callback()); - makeDirStubImpl = sinon.spy(makeDirStubImpl); - }); + sinon + .stub(fsStub, 'writeFile') + .callsFake((path, data, callback) => callback()) + makeDirStubImpl = sinon.spy(makeDirStubImpl) + }) afterEach(() => { - fsStub.writeFile.restore(); - makeDirStubImpl = makeDirStubImplBackup; - }); - - it('should write buffer to file', done => emitter.emit('end', () => { - emitter.emit('end', () => {}); - assert.isOk(makeDirStubImpl.called); - assert.isOk(fsStub.writeFile.called); - done(); - })); - }); + fsStub.writeFile.restore() + makeDirStubImpl = makeDirStubImplBackup + }) + + it('should write buffer to file', (done) => + emitter.emit('end', () => { + emitter.emit('end', () => {}) + assert.isOk(makeDirStubImpl.called) + assert.isOk(fsStub.writeFile.called) + done() + })) + }) describe('when cannot create output directory', () => { beforeEach(() => { - sinon.stub(fsStub, 'writeFile').callsFake((path, data, callback) => callback()); - sinon.stub(reporterOutputLoggerStub, 'error'); - makeDirStubImpl = sinon.stub().callsFake(() => Promise.reject(new Error())); - }); + sinon + .stub(fsStub, 'writeFile') + .callsFake((path, data, callback) => callback()) + sinon.stub(reporterOutputLoggerStub, 'error') + makeDirStubImpl = sinon + .stub() + .callsFake(() => Promise.reject(new Error())) + }) after(() => { - fsStub.writeFile.restore(); - reporterOutputLoggerStub.error.restore(); - makeDirStubImpl = makeDirStubImplBackup; - }); - - it('should write to log', done => emitter.emit('end', () => { - assert.isOk(makeDirStubImpl.called); - assert.isOk(fsStub.writeFile.notCalled); - assert.isOk(reporterOutputLoggerStub.error.called); - done(); - })); - }); - }); + fsStub.writeFile.restore() + reporterOutputLoggerStub.error.restore() + makeDirStubImpl = makeDirStubImplBackup + }) + + it('should write to log', (done) => + emitter.emit('end', () => { + assert.isOk(makeDirStubImpl.called) + assert.isOk(fsStub.writeFile.notCalled) + assert.isOk(reporterOutputLoggerStub.error.called) + done() + })) + }) + }) describe('when test passes', () => { beforeEach(() => { test = { status: 'pass', - title: 'Passing Test', - }; - emitter.emit('test start', test); - emitter.emit('test pass', test); - }); + title: 'Passing Test' + } + emitter.emit('test start', test) + emitter.emit('test pass', test) + }) it('should write pass to the buffer', (done) => { - assert.isOk(mdReporter.buf.includes('Pass')); - done(); - }); - - describe('when details=true', () => it('should write details for passing tests', (done) => { - mdReporter.details = true; - emitter.emit('test pass', test); - assert.isOk(mdReporter.buf.includes('Request')); - done(); - })); - }); + assert.isOk(mdReporter.buf.includes('Pass')) + done() + }) + + describe('when details=true', () => + it('should write details for passing tests', (done) => { + mdReporter.details = true + emitter.emit('test pass', test) + assert.isOk(mdReporter.buf.includes('Request')) + done() + })) + }) describe('when test is skipped', () => { beforeEach(() => { test = { status: 'skipped', - title: 'Skipped Test', - }; - emitter.emit('test start', test); - emitter.emit('test skip', test); - }); + title: 'Skipped Test' + } + emitter.emit('test start', test) + emitter.emit('test skip', test) + }) it('should write skip to the buffer', (done) => { - assert.isOk(mdReporter.buf.includes('Skip')); - done(); - }); - }); + assert.isOk(mdReporter.buf.includes('Skip')) + done() + }) + }) describe('when test fails', () => { beforeEach(() => { test = { status: 'failed', - title: 'Failed Test', - }; - emitter.emit('test start', test); - emitter.emit('test fail', test); - }); + title: 'Failed Test' + } + emitter.emit('test start', test) + emitter.emit('test fail', test) + }) it('should write fail to the buffer', (done) => { - assert.isOk(mdReporter.buf.includes('Fail')); - done(); - }); - }); + assert.isOk(mdReporter.buf.includes('Fail')) + done() + }) + }) describe('when test errors', () => { beforeEach(() => { test = { status: 'error', - title: 'Errored Test', - }; - emitter.emit('test start', test); - emitter.emit('test error', new Error('Error'), test); - }); + title: 'Errored Test' + } + emitter.emit('test start', test) + emitter.emit('test error', new Error('Error'), test) + }) it('should write error to the buffer', (done) => { - assert.isOk(mdReporter.buf.includes('Error')); - done(); - }); - }); -}); + assert.isOk(mdReporter.buf.includes('Error')) + done() + }) + }) +}) diff --git a/test/unit/reporters/NyanReporter-test.js b/test/unit/reporters/NyanReporter-test.js index eabad11ff..8dd5d000c 100644 --- a/test/unit/reporters/NyanReporter-test.js +++ b/test/unit/reporters/NyanReporter-test.js @@ -1,30 +1,31 @@ -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); +import { noCallThru } from 'proxyquire' +import sinon from 'sinon' -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { assert } from 'chai' +import { EventEmitter } from 'events' -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' +const proxyquire = noCallThru() const NyanCatReporter = proxyquire('../../../lib/reporters/NyanReporter', { - './reporterOutputLogger': reporterOutputLoggerStub, -}); + './reporterOutputLogger': reporterOutputLoggerStub +}).default describe('NyanCatReporter', () => { - let emitter; - let stats; - let nyanReporter; + let emitter + let stats + let nyanReporter before(() => { - reporterOutputLoggerStub.transports.console.silent = true; - }); + reporterOutputLoggerStub.transports.console.silent = true + }) after(() => { - reporterOutputLoggerStub.transports.console.silent = false; - }); + reporterOutputLoggerStub.transports.console.silent = false + }) beforeEach(() => { - emitter = new EventEmitter(); + emitter = new EventEmitter() stats = { tests: 0, failures: 0, @@ -33,144 +34,147 @@ describe('NyanCatReporter', () => { skipped: 0, start: 0, end: 0, - duration: 0, - }; - nyanReporter = new NyanCatReporter(emitter, stats); - }); + duration: 0 + } + nyanReporter = new NyanCatReporter(emitter, stats) + }) describe('when starting', () => { beforeEach(() => { - sinon.spy(nyanReporter, 'cursorHide'); - sinon.spy(nyanReporter, 'draw'); - sinon.stub(nyanReporter, 'write'); - }); + sinon.spy(nyanReporter, 'cursorHide') + sinon.spy(nyanReporter, 'draw') + sinon.stub(nyanReporter, 'write') + }) afterEach(() => { - nyanReporter.cursorHide.restore(); - nyanReporter.draw.restore(); - nyanReporter.write.restore(); - }); - - it('should hide the cursor and draw the cat', done => emitter.emit('start', '', () => { - assert.isOk(nyanReporter.cursorHide.calledOnce); - assert.isOk(nyanReporter.draw.calledOnce); - done(); - })); - }); + nyanReporter.cursorHide.restore() + nyanReporter.draw.restore() + nyanReporter.write.restore() + }) + + it('should hide the cursor and draw the cat', (done) => + emitter.emit('start', '', () => { + assert.isOk(nyanReporter.cursorHide.calledOnce) + assert.isOk(nyanReporter.draw.calledOnce) + done() + })) + }) describe('when ending', () => { beforeEach(() => { - sinon.spy(reporterOutputLoggerStub, 'complete'); - sinon.spy(nyanReporter, 'draw'); - sinon.stub(nyanReporter, 'write'); - }); + sinon.spy(reporterOutputLoggerStub, 'complete') + sinon.spy(nyanReporter, 'draw') + sinon.stub(nyanReporter, 'write') + }) afterEach(() => { - reporterOutputLoggerStub.complete.restore(); - nyanReporter.draw.restore(); - nyanReporter.write.restore(); - }); + reporterOutputLoggerStub.complete.restore() + nyanReporter.draw.restore() + nyanReporter.write.restore() + }) - it('should log that testing is complete', done => emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.complete.calledTwice); - done(); - })); + it('should log that testing is complete', (done) => + emitter.emit('end', () => { + assert.isOk(reporterOutputLoggerStub.complete.calledTwice) + done() + })) describe('when there are failures', () => { beforeEach(() => { const test = { status: 'fail', - title: 'failing test', - }; - nyanReporter.errors = [test]; - emitter.emit('test start', test); - sinon.spy(reporterOutputLoggerStub, 'fail'); - }); - - afterEach(() => reporterOutputLoggerStub.fail.restore()); - - it('should log the failures at the end of testing', done => emitter.emit('end', () => { - assert.isOk(reporterOutputLoggerStub.fail.calledTwice); - done(); - })); - }); - }); + title: 'failing test' + } + nyanReporter.errors = [test] + emitter.emit('test start', test) + sinon.spy(reporterOutputLoggerStub, 'fail') + }) + + afterEach(() => reporterOutputLoggerStub.fail.restore()) + + it('should log the failures at the end of testing', (done) => + emitter.emit('end', () => { + assert.isOk(reporterOutputLoggerStub.fail.calledTwice) + done() + })) + }) + }) describe('when test finished', () => { describe('when test passes', () => { beforeEach(() => { const test = { status: 'pass', - title: 'Passing Test', - }; - sinon.stub(nyanReporter, 'write'); - sinon.spy(nyanReporter, 'draw'); - emitter.emit('test pass', test); - }); + title: 'Passing Test' + } + sinon.stub(nyanReporter, 'write') + sinon.spy(nyanReporter, 'draw') + emitter.emit('test pass', test) + }) afterEach(() => { - nyanReporter.draw.restore(); - nyanReporter.write.restore(); - }); + nyanReporter.draw.restore() + nyanReporter.write.restore() + }) - it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)); - }); + it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)) + }) describe('when test is skipped', () => { beforeEach(() => { const test = { status: 'skipped', - title: 'Skipped Test', - }; - sinon.spy(nyanReporter, 'draw'); - sinon.stub(nyanReporter, 'write'); - emitter.emit('test skip', test); - }); + title: 'Skipped Test' + } + sinon.spy(nyanReporter, 'draw') + sinon.stub(nyanReporter, 'write') + emitter.emit('test skip', test) + }) afterEach(() => { - nyanReporter.draw.restore(); - nyanReporter.write.restore(); - }); + nyanReporter.draw.restore() + nyanReporter.write.restore() + }) - it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)); - }); + it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)) + }) describe('when test fails', () => { beforeEach(() => { const test = { status: 'failed', - title: 'Failed Test', - }; - sinon.spy(nyanReporter, 'draw'); - sinon.stub(nyanReporter, 'write'); - emitter.emit('test fail', test); - }); + title: 'Failed Test' + } + sinon.spy(nyanReporter, 'draw') + sinon.stub(nyanReporter, 'write') + emitter.emit('test fail', test) + }) afterEach(() => { - nyanReporter.draw.restore(); - nyanReporter.write.restore(); - }); + nyanReporter.draw.restore() + nyanReporter.write.restore() + }) - it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)); - }); + it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)) + }) describe('when test errors', () => { beforeEach(() => { const test = { status: 'error', - title: 'Errored Test', - }; - sinon.spy(nyanReporter, 'draw'); - sinon.stub(nyanReporter, 'write'); - emitter.emit('test error', new Error('Error'), test); - }); + title: 'Errored Test' + } + sinon.spy(nyanReporter, 'draw') + sinon.stub(nyanReporter, 'write') + emitter.emit('test error', new Error('Error'), test) + }) afterEach(() => { - nyanReporter.write.restore(); - nyanReporter.draw.restore(); - }); - - it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)); - }); - }); -}); + nyanReporter.write.restore() + nyanReporter.draw.restore() + }) + + it('should draw the cat', () => assert.isOk(nyanReporter.draw.calledOnce)) + }) + }) +}) diff --git a/test/unit/reporters/XUnitReporter-test.js b/test/unit/reporters/XUnitReporter-test.js index c614b5666..98329a1e3 100644 --- a/test/unit/reporters/XUnitReporter-test.js +++ b/test/unit/reporters/XUnitReporter-test.js @@ -1,171 +1,177 @@ -const fsStub = require('fs'); -const proxyquire = require('proxyquire').noCallThru(); -const sinon = require('sinon'); +import fsStub from 'fs' +import { noCallThru } from 'proxyquire' +import sinon from 'sinon' -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import { assert } from 'chai' +import { EventEmitter } from 'events' -const loggerStub = require('../../../lib/logger'); -const reporterOutputLoggerStub = require('../../../lib/reporters/reporterOutputLogger'); +import loggerStub from '../../../lib/logger' +import reporterOutputLoggerStub from '../../../lib/reporters/reporterOutputLogger' -const makeDirStub = (input, options) => makeDirStubImpl(input, options); -let makeDirStubImpl = () => Promise.resolve(); -const makeDirStubImplBackup = makeDirStubImpl; +const makeDirStub = (input, options) => makeDirStubImpl(input, options) +let makeDirStubImpl = () => Promise.resolve() +const makeDirStubImplBackup = makeDirStubImpl + +const proxyquire = noCallThru() const XUnitReporter = proxyquire('../../../lib/reporters/XUnitReporter', { '../logger': loggerStub, './reporterOutputLogger': reporterOutputLoggerStub, fs: fsStub, - 'make-dir': makeDirStub, -}); + 'make-dir': makeDirStub +}).default describe('XUnitReporter', () => { - let test = {}; + let test = {} before(() => { - loggerStub.transports.console.silent = true; - reporterOutputLoggerStub.transports.console.silent = true; - }); + loggerStub.transports.console.silent = true + reporterOutputLoggerStub.transports.console.silent = true + }) after(() => { - loggerStub.transports.console.silent = false; - reporterOutputLoggerStub.transports.console.silent = false; - }); + loggerStub.transports.console.silent = false + reporterOutputLoggerStub.transports.console.silent = false + }) describe('when creating', () => { describe('when file exists', () => { before(() => { - sinon.stub(fsStub, 'existsSync').callsFake(() => true); - sinon.stub(fsStub, 'unlinkSync').callsFake(() => true); - sinon.stub(loggerStub, 'warn'); - }); + sinon.stub(fsStub, 'existsSync').callsFake(() => true) + sinon.stub(fsStub, 'unlinkSync').callsFake(() => true) + sinon.stub(loggerStub, 'warn') + }) after(() => { - fsStub.existsSync.restore(); - fsStub.unlinkSync.restore(); - loggerStub.warn.restore(); - }); + fsStub.existsSync.restore() + fsStub.unlinkSync.restore() + loggerStub.warn.restore() + }) it('should warn about the existing file', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); - assert.isOk(loggerStub.warn.called); - }); - }); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') + assert.isOk(loggerStub.warn.called) + }) + }) describe('when file does not exist', () => { before(() => { - sinon.stub(fsStub, 'existsSync').callsFake(() => false); - sinon.stub(fsStub, 'unlinkSync'); - }); + sinon.stub(fsStub, 'existsSync').callsFake(() => false) + sinon.stub(fsStub, 'unlinkSync') + }) after(() => { - fsStub.existsSync.restore(); - fsStub.unlinkSync.restore(); - }); + fsStub.existsSync.restore() + fsStub.unlinkSync.restore() + }) it('should create the file', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); - assert.isOk(fsStub.unlinkSync.notCalled); - }); - }); - }); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') + assert.isOk(fsStub.unlinkSync.notCalled) + }) + }) + }) describe('when starting', () => { describe('when can create output directory', () => { beforeEach(() => { - sinon.stub(fsStub, 'appendFileSync'); - makeDirStubImpl = sinon.spy(makeDirStubImpl); - }); + sinon.stub(fsStub, 'appendFileSync') + makeDirStubImpl = sinon.spy(makeDirStubImpl) + }) afterEach(() => { - fsStub.appendFileSync.restore(); - makeDirStubImpl = makeDirStubImplBackup; - }); + fsStub.appendFileSync.restore() + makeDirStubImpl = makeDirStubImplBackup + }) it('should write opening to file', (done) => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') emitter.emit('start', '', () => { - assert.isOk(makeDirStubImpl.called); - assert.isOk(fsStub.appendFileSync.called); - done(); - }); - }); - }); + assert.isOk(makeDirStubImpl.called) + assert.isOk(fsStub.appendFileSync.called) + done() + }) + }) + }) describe('when cannot create output directory', () => { beforeEach(() => { - sinon.stub(fsStub, 'appendFileSync'); - sinon.stub(reporterOutputLoggerStub, 'error'); - makeDirStubImpl = sinon.stub().callsFake(() => Promise.reject(new Error())); - }); + sinon.stub(fsStub, 'appendFileSync') + sinon.stub(reporterOutputLoggerStub, 'error') + makeDirStubImpl = sinon + .stub() + .callsFake(() => Promise.reject(new Error())) + }) after(() => { - fsStub.appendFileSync.restore(); - reporterOutputLoggerStub.error.restore(); - makeDirStubImpl = makeDirStubImplBackup; - }); + fsStub.appendFileSync.restore() + reporterOutputLoggerStub.error.restore() + makeDirStubImpl = makeDirStubImplBackup + }) it('should write to log', (done) => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') emitter.emit('start', '', () => { - assert.isOk(makeDirStubImpl.called); - assert.isOk(fsStub.appendFileSync.notCalled); - assert.isOk(reporterOutputLoggerStub.error.called); - done(); - }); - }); - }); - }); + assert.isOk(makeDirStubImpl.called) + assert.isOk(fsStub.appendFileSync.notCalled) + assert.isOk(reporterOutputLoggerStub.error.called) + done() + }) + }) + }) + }) describe('when ending', () => { beforeEach(() => { - sinon.stub(fsStub, 'appendFileSync'); - sinon.stub(fsStub, 'readFile'); - fsStub.readFile.yields(null, 'da\nta'); - sinon.stub(fsStub, 'writeFile'); - fsStub.writeFile.yields(null); - }); + sinon.stub(fsStub, 'appendFileSync') + sinon.stub(fsStub, 'readFile') + fsStub.readFile.yields(null, 'da\nta') + sinon.stub(fsStub, 'writeFile') + fsStub.writeFile.yields(null) + }) afterEach(() => { - fsStub.appendFileSync.restore(); - fsStub.readFile.restore(); - fsStub.writeFile.restore(); - }); + fsStub.appendFileSync.restore() + fsStub.readFile.restore() + fsStub.writeFile.restore() + }) describe('when there is one test', () => { it('should write tests to file', () => { - const emitter = new EventEmitter(); - const xUnitReporter = new XUnitReporter(emitter, {}); - xUnitReporter.stats.tests = 1; - emitter.emit('test pass', test); - assert.isOk(fsStub.appendFileSync.called); - }); - - describe('when the file writes successfully', () => it('should read the file and update the stats', (done) => { - const emitter = new EventEmitter(); - const xUnitReporter = new XUnitReporter(emitter, {}); - xUnitReporter.stats.tests = 1; - + const emitter = new EventEmitter() + const xUnitReporter = new XUnitReporter(emitter, {}) + xUnitReporter.stats.tests = 1 + emitter.emit('test pass', test) + assert.isOk(fsStub.appendFileSync.called) + }) + + describe('when the file writes successfully', () => + it('should read the file and update the stats', (done) => { + const emitter = new EventEmitter() + const xUnitReporter = new XUnitReporter(emitter, {}) + xUnitReporter.stats.tests = 1 + + emitter.emit('end', () => { + assert.isOk(fsStub.writeFile.called) + done() + }) + })) + }) + + describe('when there are no tests', () => + it('should write empty suite', (done) => { + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}) emitter.emit('end', () => { - assert.isOk(fsStub.writeFile.called); - done(); - }); - })); - }); - - describe('when there are no tests', () => it('should write empty suite', (done) => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {})); - emitter.emit('end', () => { - assert.isOk(fsStub.writeFile.called); - done(); - }); - })); - }); + assert.isOk(fsStub.writeFile.called) + done() + }) + })) + }) describe('when test passes', () => { before(() => { @@ -176,106 +182,107 @@ describe('XUnitReporter', () => { body: '{ "test": "body" }', schema: '{ "test": "schema" }', headers: { - Accept: 'application/json', - }, + Accept: 'application/json' + } }, expected: { body: '{ "test": "body" }', schema: '{ "test": "schema" }', headers: { - 'Content-Type': 'application/json', - }, + 'Content-Type': 'application/json' + } }, actual: { body: '', headers: { - 'Content-Type': 'text/html', - }, - }, - }; - }); + 'Content-Type': 'text/html' + } + } + } + }) - beforeEach(() => sinon.stub(fsStub, 'appendFileSync')); + beforeEach(() => sinon.stub(fsStub, 'appendFileSync')) - afterEach(() => fsStub.appendFileSync.restore()); + afterEach(() => fsStub.appendFileSync.restore()) it('should write a passing test', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); - emitter.emit('test start', test); - emitter.emit('test pass', test); - assert.isOk(fsStub.appendFileSync.called); - }); - - describe('when details=true', () => it('should write details for passing tests', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml', true)); - emitter.emit('test start', test); - emitter.emit('test pass', test); - assert.isOk(fsStub.appendFileSync.called); - })); - }); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') + emitter.emit('test start', test) + emitter.emit('test pass', test) + assert.isOk(fsStub.appendFileSync.called) + }) + + describe('when details=true', () => + it('should write details for passing tests', () => { + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml', true) + emitter.emit('test start', test) + emitter.emit('test pass', test) + assert.isOk(fsStub.appendFileSync.called) + })) + }) describe('when test is skipped', () => { before(() => { test = { status: 'skipped', - title: 'Skipped Test', - }; - }); + title: 'Skipped Test' + } + }) - beforeEach(() => sinon.stub(fsStub, 'appendFileSync')); + beforeEach(() => sinon.stub(fsStub, 'appendFileSync')) - afterEach(() => fsStub.appendFileSync.restore()); + afterEach(() => fsStub.appendFileSync.restore()) it('should write a skipped test', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); - emitter.emit('test start', test); - emitter.emit('test skip', test); - assert.isOk(fsStub.appendFileSync.called); - }); - }); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') + emitter.emit('test start', test) + emitter.emit('test skip', test) + assert.isOk(fsStub.appendFileSync.called) + }) + }) describe('when test fails', () => { before(() => { test = { status: 'failed', - title: 'Failed Test', - }; - }); + title: 'Failed Test' + } + }) - beforeEach(() => sinon.stub(fsStub, 'appendFileSync')); + beforeEach(() => sinon.stub(fsStub, 'appendFileSync')) - afterEach(() => fsStub.appendFileSync.restore()); + afterEach(() => fsStub.appendFileSync.restore()) it('should write a failed test', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); - emitter.emit('test start', test); - emitter.emit('test fail', test); - assert.isOk(fsStub.appendFileSync.called); - }); - }); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') + emitter.emit('test start', test) + emitter.emit('test fail', test) + assert.isOk(fsStub.appendFileSync.called) + }) + }) describe('when test errors', () => { before(() => { test = { status: 'error', - title: 'Errored Test', - }; - }); + title: 'Errored Test' + } + }) - beforeEach(() => sinon.stub(fsStub, 'appendFileSync')); + beforeEach(() => sinon.stub(fsStub, 'appendFileSync')) - afterEach(() => fsStub.appendFileSync.restore()); + afterEach(() => fsStub.appendFileSync.restore()) it('should write an error test', () => { - const emitter = new EventEmitter(); - (new XUnitReporter(emitter, {}, 'test.xml')); - emitter.emit('test start', test); - emitter.emit('test error', new Error('Error'), test); - assert.isOk(fsStub.appendFileSync.called); - }); - }); -}); + const emitter = new EventEmitter() + new XUnitReporter(emitter, {}, 'test.xml') + emitter.emit('test start', test) + emitter.emit('test error', new Error('Error'), test) + assert.isOk(fsStub.appendFileSync.called) + }) + }) +}) diff --git a/test/unit/resolveLocations-test.js b/test/unit/resolveLocations-test.js index 0ce46755e..0d79c8a97 100644 --- a/test/unit/resolveLocations-test.js +++ b/test/unit/resolveLocations-test.js @@ -1,67 +1,66 @@ -const path = require('path'); -const { assert } = require('chai'); - -const resolveLocations = require('../../lib/resolveLocations'); +import * as path from 'path' +import { assert } from 'chai' +import resolveLocations from '../../lib/resolveLocations' describe('resolveLocations()', () => { - const workingDirectory = path.join(__filename, '..', '..', 'fixtures'); + const workingDirectory = path.join(__filename, '..', '..', 'fixtures') describe('when given no locations', () => { it('produces no results', () => { - const locations = resolveLocations(workingDirectory, []); - assert.deepEqual(locations, []); - }); - }); + const locations = resolveLocations(workingDirectory, []) + assert.deepEqual(locations, []) + }) + }) describe('when given paths', () => { it('resolves them into absolute paths', () => { const locations = resolveLocations(workingDirectory, [ './multifile/*.apib', - './apiary.apib', - ]); + './apiary.apib' + ]) assert.deepEqual(locations, [ path.join(workingDirectory, '/multifile/greeting.apib'), path.join(workingDirectory, '/multifile/message.apib'), path.join(workingDirectory, '/multifile/name.apib'), - path.join(workingDirectory, 'apiary.apib'), - ]); - }); - }); + path.join(workingDirectory, 'apiary.apib') + ]) + }) + }) describe('when given non-existing paths', () => { it('throws an error', () => { assert.throws(() => { - resolveLocations(workingDirectory, ['./foo/bar/moo/boo/*.apib']); - }, './foo/bar/moo/boo/*.apib'); - }); - }); + resolveLocations(workingDirectory, ['./foo/bar/moo/boo/*.apib']) + }, './foo/bar/moo/boo/*.apib') + }) + }) describe('when given HTTP URLs', () => { it('recognizes they are URLs', () => { const locations = resolveLocations(workingDirectory, [ 'http://example.com/foo.yaml', - './apiary.apib', - ]); + './apiary.apib' + ]) assert.deepEqual(locations, [ 'http://example.com/foo.yaml', - path.join(workingDirectory, 'apiary.apib'), - ]); - }); - }); + path.join(workingDirectory, 'apiary.apib') + ]) + }) + }) describe('when given HTTPS URLs', () => { it('recognizes they are URLs', () => { const locations = resolveLocations(workingDirectory, [ 'https://example.com/foo.yaml', - './apiary.apib', - ]); + './apiary.apib' + ]) assert.deepEqual(locations, [ 'https://example.com/foo.yaml', - path.join(workingDirectory, 'apiary.apib'), - ]); - }); - }); + path.join(workingDirectory, 'apiary.apib') + ]) + }) + }) describe('when given duplicate locations', () => { it('returns only the distinct ones', () => { @@ -69,14 +68,14 @@ describe('resolveLocations()', () => { './apiary.apib', 'http://example.com/foo.yaml', 'http://example.com/foo.yaml', - './apiar*.apib', - ]); + './apiar*.apib' + ]) assert.deepEqual(locations, [ path.join(workingDirectory, 'apiary.apib'), - 'http://example.com/foo.yaml', - ]); - }); - }); + 'http://example.com/foo.yaml' + ]) + }) + }) describe('when given various locations', () => { it('keeps their original order', () => { @@ -84,16 +83,16 @@ describe('resolveLocations()', () => { './apiar*.apib', 'https://example.com/foo.yaml', './multifile/*.apib', - 'http://example.com/bar.yaml', - ]); + 'http://example.com/bar.yaml' + ]) assert.deepEqual(locations, [ path.join(workingDirectory, 'apiary.apib'), 'https://example.com/foo.yaml', path.join(workingDirectory, '/multifile/greeting.apib'), path.join(workingDirectory, '/multifile/message.apib'), path.join(workingDirectory, '/multifile/name.apib'), - 'http://example.com/bar.yaml', - ]); - }); - }); -}); + 'http://example.com/bar.yaml' + ]) + }) + }) +}) diff --git a/test/unit/resolveModule-test.js b/test/unit/resolveModule-test.js index c671811a0..810a01688 100644 --- a/test/unit/resolveModule-test.js +++ b/test/unit/resolveModule-test.js @@ -1,30 +1,29 @@ -const path = require('path'); -const { assert } = require('chai'); - -const resolveModule = require('../../lib/resolveModule'); +import path from 'path' +import { assert } from 'chai' +import resolveModule from '../../lib/resolveModule' describe('resolveModule()', () => { - const workingDirectory = path.join(__dirname, '..', 'fixtures'); + const workingDirectory = path.join(__dirname, '..', 'fixtures') it('resolves a local module name', () => { assert.equal( resolveModule(workingDirectory, 'requiredModule'), path.join(workingDirectory, 'requiredModule') - ); - }); + ) + }) it('resolves a local module name with .js extension', () => { assert.equal( resolveModule(workingDirectory, 'requiredModule.js'), path.join(workingDirectory, 'requiredModule.js') - ); - }); + ) + }) it('resolves an installed module name', () => { assert.equal( resolveModule(workingDirectory, 'coffeescript/register'), 'coffeescript/register' - ); - }); -}); + ) + }) +}) diff --git a/test/unit/resolvePaths-test.js b/test/unit/resolvePaths-test.js index 211990630..d47275a73 100644 --- a/test/unit/resolvePaths-test.js +++ b/test/unit/resolvePaths-test.js @@ -1,87 +1,93 @@ -const path = require('path'); -const { assert } = require('chai'); - -const resolvePaths = require('../../lib/resolvePaths'); +import path from 'path' +import { assert } from 'chai' +import resolvePaths from '../../lib/resolvePaths' describe('resolvePaths()', () => { - const workingDirectory = path.join(__filename, '..', '..', 'fixtures'); + const workingDirectory = path.join(__filename, '..', '..', 'fixtures') describe('when given no paths', () => { it('produces no results', () => { - const paths = resolvePaths(workingDirectory, []); - assert.deepEqual(paths, []); - }); - }); + const paths = resolvePaths(workingDirectory, []) + assert.deepEqual(paths, []) + }) + }) describe('when given existing absolute filenames', () => { it('resolves them into absolute paths', () => { const paths = resolvePaths(workingDirectory, [ path.join(workingDirectory, 'hooks.js'), - path.join(workingDirectory, 'non-js-hooks.rb'), - ]); + path.join(workingDirectory, 'non-js-hooks.rb') + ]) assert.deepEqual(paths, [ path.join(workingDirectory, 'hooks.js'), - path.join(workingDirectory, 'non-js-hooks.rb'), - ]); - }); - }); + path.join(workingDirectory, 'non-js-hooks.rb') + ]) + }) + }) describe('when given existing relative filenames', () => { it('resolves them into absolute paths', () => { - const paths = resolvePaths(workingDirectory, ['./hooks.js', './non-js-hooks.rb']); + const paths = resolvePaths(workingDirectory, [ + './hooks.js', + './non-js-hooks.rb' + ]) assert.deepEqual(paths, [ path.join(workingDirectory, 'hooks.js'), - path.join(workingDirectory, 'non-js-hooks.rb'), - ]); - }); - }); + path.join(workingDirectory, 'non-js-hooks.rb') + ]) + }) + }) describe('when given non-existing filenames', () => { it('throws an error', () => { assert.throws(() => { - resolvePaths(workingDirectory, ['./hooks.js', './foo/bar/42']); - }, './foo/bar/42'); - }); - }); + resolvePaths(workingDirectory, ['./hooks.js', './foo/bar/42']) + }, './foo/bar/42') + }) + }) describe('when given glob pattern resolving to existing files', () => { it('resolves them into absolute paths', () => { - const paths = resolvePaths(workingDirectory, ['./**/hooks.js']); - assert.deepEqual(paths, [ - path.join(workingDirectory, 'hooks.js'), - ]); - }); - }); + const paths = resolvePaths(workingDirectory, ['./**/hooks.js']) + assert.deepEqual(paths, [path.join(workingDirectory, 'hooks.js')]) + }) + }) describe('when given glob pattern resolving to no files', () => { it('throws an error', () => { assert.throws(() => { - resolvePaths(workingDirectory, ['./**/hooks.js', './**/foo/bar/foobar.js']); - }, './**/foo/bar/foobar.js'); - }); - }); + resolvePaths(workingDirectory, [ + './**/hooks.js', + './**/foo/bar/foobar.js' + ]) + }, './**/foo/bar/foobar.js') + }) + }) describe('when given both globs and filenames', () => { it('resolves them into absolute paths', () => { - const paths = resolvePaths(workingDirectory, ['./non-js-hooks.rb', './**/hooks.js']); + const paths = resolvePaths(workingDirectory, [ + './non-js-hooks.rb', + './**/hooks.js' + ]) assert.deepEqual(paths, [ path.join(workingDirectory, 'hooks.js'), - path.join(workingDirectory, 'non-js-hooks.rb'), - ]); - }); + path.join(workingDirectory, 'non-js-hooks.rb') + ]) + }) it('throws an error on non-existing filenams', () => { assert.throws(() => { - resolvePaths(workingDirectory, ['./**/hooks.js', './foo/bar/42']); - }, './foo/bar/42'); - }); + resolvePaths(workingDirectory, ['./**/hooks.js', './foo/bar/42']) + }, './foo/bar/42') + }) it('throws an error on globs resolving to no files', () => { assert.throws(() => { - resolvePaths(workingDirectory, ['./hooks.js', './**/foo/bar/foobar.js']); - }, './**/foo/bar/foobar.js'); - }); + resolvePaths(workingDirectory, ['./hooks.js', './**/foo/bar/foobar.js']) + }, './**/foo/bar/foobar.js') + }) it('returns the absolute paths alphabetically sorted by their basename', () => { const paths = resolvePaths(workingDirectory, [ @@ -93,8 +99,8 @@ describe('resolvePaths()', () => { './hooks-glob/bar/b.js', './hooks-glob/baz/c.js', './hooks-glob/foo/o.js', - './hooks-glob/bar/p.js', - ]); + './hooks-glob/bar/p.js' + ]) assert.deepEqual(paths, [ path.join(workingDirectory, 'hooks-glob/foo/a.js'), path.join(workingDirectory, 'hooks-glob/bar/b.js'), @@ -106,23 +112,23 @@ describe('resolvePaths()', () => { path.join(workingDirectory, 'test_hooks.coffee'), path.join(workingDirectory, 'hooks-glob/baz/x.js'), path.join(workingDirectory, 'hooks-glob/foo/y.js'), - path.join(workingDirectory, 'hooks-glob/bar/z.js'), - ]); - }); - }); + path.join(workingDirectory, 'hooks-glob/bar/z.js') + ]) + }) + }) describe('when given duplicate paths', () => { it('returns only the distinct ones', () => { const paths = resolvePaths(workingDirectory, [ './test2_hooks.js', './**/*_hooks.*', - 'multifile/multifile_hooks.coffee', - ]); + 'multifile/multifile_hooks.coffee' + ]) assert.deepEqual(paths, [ path.join(workingDirectory, 'multifile/multifile_hooks.coffee'), path.join(workingDirectory, 'test2_hooks.js'), - path.join(workingDirectory, 'test_hooks.coffee'), - ]); - }); - }); -}); + path.join(workingDirectory, 'test_hooks.coffee') + ]) + }) + }) +}) diff --git a/test/unit/sortTransactions-test.ts b/test/unit/sortTransactions-test.ts new file mode 100644 index 000000000..68f99a789 --- /dev/null +++ b/test/unit/sortTransactions-test.ts @@ -0,0 +1,34 @@ +import R from 'ramda' +import { expect } from 'chai' +import { RESTMethod, Transaction } from '../../lib/__general' +import sortTransactions from '../../lib/sortTransactions' + +const createTransaction = R.mergeDeepRight>({ + id: 'abcd', + name: 'test-transaction', + host: 'localhost', + protocol: 'http:' +}) + +describe('sortTransactions', () => { + describe('given transactions in arbitrary order', () => { + it('should sort transactions according to the internal manifest', () => { + const getTransaction = createTransaction({ + request: { + method: RESTMethod.GET, + url: '/endpoint' + } + }) + const optionsTransaction = createTransaction({ + request: { + method: RESTMethod.OPTIONS, + url: '/endpoint' + } + }) + const transactions: Transaction[] = [getTransaction, optionsTransaction] + const result = sortTransactions(transactions) + + expect(result).to.deep.equal([optionsTransaction, getTransaction]) + }) + }) +}) diff --git a/test/unit/transactionRunner-test.js b/test/unit/transactionRunner-test.js index e6ebd18bd..2ba5d3ed8 100644 --- a/test/unit/transactionRunner-test.js +++ b/test/unit/transactionRunner-test.js @@ -1,24 +1,22 @@ -const bodyParser = require('body-parser'); -const clone = require('clone'); -const express = require('express'); -const htmlStub = require('html'); -const nock = require('nock'); -const sinon = require('sinon'); -const proxyquire = require('proxyquire'); -const { assert } = require('chai'); -const { EventEmitter } = require('events'); +import bodyParser from 'body-parser' +import clone from 'clone' +import express from 'express' +import htmlStub from 'html' +import nock from 'nock' +import sinon from 'sinon' +import proxyquire from 'proxyquire' +import { assert } from 'chai' +import { EventEmitter } from 'events' +import addHooks from '../../lib/addHooks' +import loggerStub from '../../lib/logger' +import Hooks from '../../lib/Hooks' nock.enableNetConnect(); -const addHooks = require('../../lib/addHooks'); -const loggerStub = require('../../lib/logger'); - const Runner = proxyquire('../../lib/TransactionRunner', { html: htmlStub, './logger': loggerStub, -}); - -const Hooks = require('../../lib/Hooks'); +}).default describe('TransactionRunner', () => { let server; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 000000000..25be56f13 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compileOnSave": true, + "compilerOptions": { + "module": "commonjs", + "target": "es2017", + "lib": ["es2017"], + "rootDir": "./lib", + "outDir": "./build", + "moduleResolution": "node", + // "resolveJsonModule": true, + "esModuleInterop": true, + "allowJs": true, + "allowSyntheticDefaultImports": true, + "typeRoots": ["node_modules/@types"] + }, + "include": ["./lib/**/*.ts", "./lib/**/*.js"], + "exclude": ["node_modules", "build"] +}