diff --git a/messages/RunOutputProcessor.js b/messages/RunOutputProcessor.js index 7340a2432..1e6949496 100644 --- a/messages/RunOutputProcessor.js +++ b/messages/RunOutputProcessor.js @@ -1,7 +1,6 @@ module.exports = { "output": { "engineSummaryTemplate": "Executed %s, found %s violation(s) across %s file(s).", - "noViolationsDetected": "Executed engines: %s. No rule violations found.", "sevThresholdSummary": "Rule violations of severity %s or more severe were detected.", "writtenToConsole": "Rule violations were logged to the console.", "writtenToOutFile": "Rule violations were written to %s." diff --git a/src/lib/formatter/RuleResultRecombinator.ts b/src/lib/formatter/RuleResultRecombinator.ts index cc02eaa20..4975a71e2 100644 --- a/src/lib/formatter/RuleResultRecombinator.ts +++ b/src/lib/formatter/RuleResultRecombinator.ts @@ -110,12 +110,7 @@ export class RuleResultRecombinator { private static constructXml(results: RuleResult[]): string { let resultXml = ``; - // If the results were just an empty string, we can return it. - if (results.length === 0) { - return resultXml; - } - - const normalizeSeverity: boolean = results[0].violations.length > 0 && !(results[0].violations[0].normalizedSeverity === undefined) + const normalizeSeverity: boolean = results[0]?.violations.length > 0 && !(results[0]?.violations[0].normalizedSeverity === undefined) let problemCount = 0; @@ -188,10 +183,6 @@ export class RuleResultRecombinator { } private static constructJunit(results: RuleResult[]): string { - // If there are no results, we can just return an empty string. - if (!results || results.length === 0) { - return ''; - } // Otherwise, we'll need to start constructing our JUnit XML. To do that, we'll need a map from file names to // lists of the tags generated from violations found in the corresponding file. @@ -279,10 +270,6 @@ URL: ${url}`; } private static constructTable(results: RuleResult[]): RecombinedData { - // If the results were just an empty string, we can return it. - if (results.length === 0) { - return ''; - } const columns = this.violationsAreDfa(results) ? ['Source Location', 'Sink Location', 'Description', 'Category', 'URL'] : ['Location', 'Description', 'Category', 'URL']; @@ -341,10 +328,6 @@ URL: ${url}`; } private static constructJson(results: RuleResult[], verboseViolations = false): string { - if (results.length === 0) { - return ''; - } - if (verboseViolations) { const resultsVerbose = JSON.parse(JSON.stringify(results)) as RuleResult[]; for (const result of resultsVerbose) { @@ -363,12 +346,7 @@ URL: ${url}`; } private static async constructHtml(results: RuleResult[], verboseViolations = false): Promise { - // If the results were just an empty string, we can return it. - if (results.length === 0) { - return ''; - } - - const normalizeSeverity: boolean = results[0].violations.length > 0 && !(results[0].violations[0].normalizedSeverity === undefined); + const normalizeSeverity: boolean = results[0]?.violations.length > 0 && !(results[0]?.violations[0].normalizedSeverity === undefined); const isDfa = this.violationsAreDfa(results); @@ -434,12 +412,8 @@ URL: ${url}`; } private static async constructCsv(results: RuleResult[]): Promise { - // If the results were just an empty list, we can return an empty string - if (results.length === 0) { - return ''; - } const isDfa: boolean = this.violationsAreDfa(results); - const normalizeSeverity: boolean = results[0].violations.length > 0 && !(results[0].violations[0].normalizedSeverity === undefined) + const normalizeSeverity: boolean = results[0]?.violations.length > 0 && !(results[0]?.violations[0].normalizedSeverity === undefined) const csvRows = []; // There will always be columns for the problem counter and the severity. diff --git a/src/lib/util/RunOutputProcessor.ts b/src/lib/util/RunOutputProcessor.ts index 13f76214c..43c25b351 100644 --- a/src/lib/util/RunOutputProcessor.ts +++ b/src/lib/util/RunOutputProcessor.ts @@ -28,21 +28,11 @@ export class RunOutputProcessor { public processRunOutput(rrr: RecombinedRuleResults): AnyJson { - const {minSev, summaryMap, results} = rrr; - // If the results are an empty string, it means no violations were found. - if (results === '') { - // Build an appropriate message... - const msg = messages.getMessage('output.noViolationsDetected', [[...summaryMap.keys()].join(', ')]); - // ...log it to the console... - this.ux.log(msg); - // ...and return it for use with the --json flag. - return msg; - } + const {minSev, results} = rrr; - // If we actually have violations, there's some stuff we need to do with them. We'll build an array of message parts, - // and then log them all at the end. + // With the list of violations, we'll build an array of message parts, and then log them all at the end. let msgComponents: string[] = []; - // We need a summary of the information we were provided. + // We need a summary of the information we were provided (blank/empty if no violations). msgComponents = [...msgComponents, ...this.buildRunSummaryMsgParts(rrr)]; // We need to surface the results directly to the user, then add a message describing what we did. msgComponents.push(this.opts.outfile ? this.writeToOutfile(results) : this.writeToConsole(results)); diff --git a/test/commands/scanner/run.filters.test.ts b/test/commands/scanner/run.filters.test.ts index 8dc6945c1..c11424c74 100644 --- a/test/commands/scanner/run.filters.test.ts +++ b/test/commands/scanner/run.filters.test.ts @@ -19,7 +19,13 @@ describe('scanner:run tests that result in the use of RuleFilters', function () '--engine', 'eslint-lwc' ]) .it('LWC Engine Successfully parses LWC code', ctx => { - expect(ctx.stdout).to.contain('No rule violations found.'); + // If there's a summary, then it'll be separated from the CSV by an empty line. Throw it away. + const [csv, _] = ctx.stdout.trim().split(/\n\r?\n/); + + // Confirm there are no violations. + // Since it's a CSV, the rows themselves are separated by newline characters. + // The header should not have any newline characters after it. There should be no violation rows. + expect(csv.indexOf('\n')).to.equal(-1, "Should be no violations detected"); }); setupCommandTest diff --git a/test/commands/scanner/run.severity.test.ts b/test/commands/scanner/run.severity.test.ts index d5d7de7be..4e9682f45 100644 --- a/test/commands/scanner/run.severity.test.ts +++ b/test/commands/scanner/run.severity.test.ts @@ -20,7 +20,9 @@ describe('scanner:run', function () { '--severity-threshold', '3' ]) .it('When no violations are found, no error is thrown', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + const output = JSON.parse(ctx.stdout); + // There should be no violations (nor any exceptions). + expect(output.length).to.equal(0, 'Should be no violations from one engine'); expect(ctx.stderr).to.not.contain(processorMessages.getMessage('output.sevThresholdSummary', ['3']), 'Error should not be present'); }); diff --git a/test/commands/scanner/run.test.ts b/test/commands/scanner/run.test.ts index f27a51e2d..3fdad9dd5 100644 --- a/test/commands/scanner/run.test.ts +++ b/test/commands/scanner/run.test.ts @@ -30,6 +30,12 @@ describe('scanner:run', function () { expect(violations[1]).to.match(/line="19".+rule="ApexUnitTestClassShouldHaveAsserts"/); } + function validateNoViolationsXmlOutput(xml: string): void { + // We'll split the output by the tag, so we can get individual violations. + // we expect no violations + expect(xml.indexOf(' { setupCommandTest .command(['scanner:run', @@ -57,8 +63,8 @@ describe('scanner:run', function () { '--ruleset', 'ApexUnit', '--format', 'xml' ]) - .it('When the file contains no violations, a message is logged to the console', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + .it('When the file contains no violations, XML with no violations is logged to the console', ctx => { + validateNoViolationsXmlOutput(ctx.stdout); }); }); @@ -180,6 +186,25 @@ describe('scanner:run', function () { const fileContents = fs.readFileSync('testout.xml').toString(); validateXmlOutput(fileContents); }); + + setupCommandTest + .command(['scanner:run', + '--target', path.join('test', 'code-fixtures', 'apex', 'YetAnotherTestClass.cls'), + '--ruleset', 'ApexUnit', + '--outfile', 'testout.xml' + ]) + .finally(ctx => { + // Regardless of what happens in the test itself, we need to delete the file we created. + if (fs.existsSync('testout.xml')) { + fs.unlinkSync('testout.xml'); + } + }) + .it('An XML file with no violations is created', ctx => { + // Verify that an empty XML file was actually created. + expect(fs.existsSync('testout.xml')).to.equal(true, 'The command should have created an empty output file'); + const fileContents = fs.readFileSync('testout.xml').toString(); + validateNoViolationsXmlOutput(fileContents); + }); }); }); @@ -192,7 +217,7 @@ describe('scanner:run', function () { expect(summary).to.not.equal(null, 'Expected summary to be not null'); expect(summary).to.contain(processorMessages.getMessage('output.engineSummaryTemplate', ['pmd', 2, 1]), 'Summary should be correct'); } - // Since it's a CSV, the rows themselves are separated by newline chaacters, and there's a header row we + // Since it's a CSV, the rows themselves are separated by newline characters, and there's a header row we // need to discard. const rows = csv.trim().split('\n'); rows.shift(); @@ -209,6 +234,21 @@ describe('scanner:run', function () { expect(data[1][5]).to.equal('"ApexUnitTestClassShouldHaveAsserts"', 'Violation #2 should be of the expected type'); } + function validateNoViolationsCsvOutput(contents: string, expectSummary=true): void { + // If there's a summary, then it'll be separated from the CSV by an empty line. + const [csv, summary] = contents.trim().split(/\n\r?\n/); + if (expectSummary) { + expect(summary).to.not.equal(undefined, 'Expected summary to be not undefined'); + expect(summary).to.not.equal(null, 'Expected summary to be not null'); + expect(summary).to.contain(processorMessages.getMessage('output.engineSummaryTemplate', ['pmd', 2, 1]), 'Summary should be correct'); + } + + // Since it's a CSV, the rows themselves are separated by newline characters. + // Test to check there are no violations. + // There should be a header and no other lines, meaning no newline characters. + expect(csv.indexOf('\n')).to.equal(-1, "Should be no violations detected"); + } + setupCommandTest .command(['scanner:run', '--target', path.join('test', 'code-fixtures', 'apex', 'SomeTestClass.cls'), @@ -236,7 +276,6 @@ describe('scanner:run', function () { // Verify that the correct message is displayed to user expect(ctx.stdout).to.contain(processorMessages.getMessage('output.engineSummaryTemplate', ['pmd', 2, 1]), 'Expected summary to be correct'); expect(ctx.stdout).to.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.csv'])); - expect(ctx.stdout).to.not.contain(processorMessages.getMessage('output.noViolationsDetected', [])); // Verify that the file we wanted was actually created. expect(fs.existsSync('testout.csv')).to.equal(true, 'The command should have created the expected output file'); @@ -250,8 +289,8 @@ describe('scanner:run', function () { '--ruleset', 'ApexUnit', '--format', 'csv' ]) - .it('When no violations are detected, a message is logged to the console', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + .it('When no violations are detected, empty CSV is printed to the console', ctx => { + validateNoViolationsCsvOutput(ctx.stdout, false); }); setupCommandTest @@ -266,10 +305,12 @@ describe('scanner:run', function () { fs.unlinkSync('testout.csv'); } }) - .it('When --oufile is provided and no violations are detected, output file should not be created', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); - expect(ctx.stdout).to.not.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.csv'])); - expect(fs.existsSync('testout.csv')).to.be.false; + .it('When --outfile is provided and no violations are detected, CSV file with no violations is created', ctx => { + expect(ctx.stdout).to.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.csv'])); + + const fileContents = fs.readFileSync('testout.csv').toString(); + expect(fs.existsSync('testout.csv')).to.be.true; + validateNoViolationsCsvOutput(fileContents, false); }); }); @@ -290,6 +331,15 @@ describe('scanner:run', function () { expect(rows[1]['ruleName']).to.equal('ApexUnitTestClassShouldHaveAsserts', 'Violation #2 should be of the expected type'); } + function validateNoViolationsHtmlOutput(html: string): void { + // there should be no instance of a filled violations object + const result = html.match(/const violations = (\[.+\]);/); + expect(result).to.be.null; + // there should be an empty violations object + const emptyResult = html.match(/const violations = \[\];/); + expect(emptyResult).to.be.not.null; + } + setupCommandTest .command(['scanner:run', '--target', path.join('test', 'code-fixtures', 'apex', 'SomeTestClass.cls'), @@ -297,7 +347,7 @@ describe('scanner:run', function () { '--format', 'html' ]) .it('Properly writes HTML to console', ctx => { - // Parse out the JSON results + // Parse out the HTML results validateHtmlOutput(ctx.stdout); }); @@ -316,7 +366,6 @@ describe('scanner:run', function () { .it('Properly writes HTML to file', ctx => { // Verify that the correct message is displayed to user expect(ctx.stdout).to.contain(processorMessages.getMessage('output.writtenToOutFile', [outputFile])); - expect(ctx.stdout).to.not.contain(processorMessages.getMessage('output.noViolationsDetected', [])); // Verify that the file we wanted was actually created. expect(fs.existsSync(outputFile)).to.equal(true, 'The command should have created the expected output file'); @@ -330,8 +379,8 @@ describe('scanner:run', function () { '--ruleset', 'ApexUnit', '--format', 'html' ]) - .it('When no violations are detected, a message is logged to the console', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + .it('When no violations are detected, HTML with no violations is logged to the console', ctx => { + validateNoViolationsHtmlOutput(ctx.stdout); }); setupCommandTest @@ -346,10 +395,11 @@ describe('scanner:run', function () { fs.unlinkSync(outputFile); } }) - .it('When --oufile is provided and no violations are detected, output file should not be created', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); - expect(ctx.stdout).to.not.contain(processorMessages.getMessage('output.writtenToOutFile', [outputFile])); - expect(fs.existsSync(outputFile)).to.be.false; + .it('When --outfile is provided and no violations are detected, HTML file with no violations should be created', ctx => { + expect(ctx.stdout).to.contain(processorMessages.getMessage('output.writtenToOutFile', [outputFile])); + expect(fs.existsSync(outputFile)).to.be.true; + const fileContents = fs.readFileSync(outputFile).toString(); + validateNoViolationsHtmlOutput(fileContents); }); }); @@ -365,6 +415,13 @@ describe('scanner:run', function () { expect(output[0].violations[1].line).to.equal(19, 'Violation #2 should occur on the expected line'); } + function validateNoViolationsJsonOutput(json: string): void { + const output = JSON.parse(json); + // There should be no violations. + expect(output.length).to.equal(0, 'Should be no violations from one engine'); + } + + setupCommandTest .command(['scanner:run', '--target', path.join('test', 'code-fixtures', 'apex', 'SomeTestClass.cls'), @@ -392,7 +449,6 @@ describe('scanner:run', function () { // Verify that the correct message is displayed to user expect(ctx.stdout).to.contain(processorMessages.getMessage('output.engineSummaryTemplate', ['pmd', 2, 1]), 'Expected summary to be correct'); expect(ctx.stdout).to.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.json'])); - expect(ctx.stdout).to.not.contain(processorMessages.getMessage('output.noViolationsDetected', [])); // Verify that the file we wanted was actually created. expect(fs.existsSync('testout.json')).to.equal(true, 'The command should have created the expected output file'); @@ -406,8 +462,8 @@ describe('scanner:run', function () { '--ruleset', 'ApexUnit', '--format', 'json' ]) - .it('When no violations are detected, a message is logged to the console', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + .it('When no violations are detected, JSON with no violations is logged to the console', ctx => { + validateNoViolationsJsonOutput(ctx.stdout); }); setupCommandTest @@ -422,10 +478,11 @@ describe('scanner:run', function () { fs.unlinkSync('testout.json'); } }) - .it('When --oufile is provided and no violations are detected, output file should not be created', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); - expect(ctx.stdout).to.not.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.json'])); - expect(fs.existsSync('testout.json')).to.be.false; + .it('When --outfile is provided and no violations are detected, a JSON file with no violations is created', ctx => { + expect(ctx.stdout).to.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.json'])); + expect(fs.existsSync('testout.json')).to.be.true; + const fileContents = fs.readFileSync('testout.json').toString(); + validateNoViolationsJsonOutput(fileContents); }); }); @@ -454,8 +511,13 @@ describe('scanner:run', function () { '--ruleset', 'ApexUnit', '--format', 'table' ]) - .it('When no violations are detected, a message is logged to the console', ctx => { - expect(ctx.stdout).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + .it('When no violations are detected, an empty table is logged to the console', ctx => { + // Split the output by newline characters and throw away the first two rows, which are the column names and a separator. + // That will leave us with just the rows. + const rows = ctx.stdout.trim().split('\n'); + + // Expect to find no violations listing this class. + expect(rows.find(r => r.indexOf("SomeTestClass.cls") > 0)).to.equal(undefined, "more rows??"); }); }); @@ -519,7 +581,6 @@ describe('scanner:run', function () { expect(output.status).to.equal(0, 'Should finish properly'); const result = output.result; expect(result).to.contain(processorMessages.getMessage('output.writtenToOutFile', ['testout.xml'])); - expect(result).to.not.contain(processorMessages.getMessage('output.noViolationsDetected', [])); // Verify that the file we wanted was actually created. expect(fs.existsSync('testout.xml')).to.equal(true, 'The command should have created the expected output file'); const fileContents = fs.readFileSync('testout.xml').toString(); @@ -543,7 +604,7 @@ describe('scanner:run', function () { .it('--json flag wraps message about no violations occuring', ctx => { const output = JSON.parse(ctx.stdout); expect(output.status).to.equal(0, 'Should have finished properly'); - expect(output.result).to.contain(processorMessages.getMessage('output.noViolationsDetected', ['pmd, retire-js'])); + expect(output.result.length).to.equal(0, 'When no violations are present, JSON result should be empty array.') }); }); @@ -629,6 +690,12 @@ describe('scanner:run', function () { setupCommandTest .command(['scanner:run', '--target', 'path/that/does/not/matter', '--format', 'csv', '--outfile', 'notcsv.xml']) + .finally(ctx => { + // Regardless of what happens in the test itself, we need to delete the file we created. + if (fs.existsSync('notcsv.xml')) { + fs.unlinkSync('notcsv.xml'); + } + }) .it('Warning logged when output file format does not match format', ctx => { expect(ctx.stdout).to.contain(commonMessages.getMessage('validations.outfileFormatMismatch', ['csv', 'xml'])); }); @@ -663,8 +730,13 @@ describe('scanner:run', function () { '--format', 'csv' ]) .it('The baseConfig enables the usage of default Javascript Types', ctx => { - // There should be no violations. - expect(ctx.stdout).to.contains('No rule violations found.', 'Should be no violations found in the file.'); + // If there's a summary, then it'll be separated from the CSV by an empty line. Throw it away. + const [csv, _] = ctx.stdout.trim().split(/\n\r?\n/); + + // Confirm there are no violations. + // Since it's a CSV, the rows themselves are separated by newline characters. + // The header should not have any newline characters after it. There should be no violation rows. + expect(csv.indexOf('\n')).to.equal(-1, "Should be no violations detected"); }); // TODO: THIS TEST WAS IMPLEMENTED FOR W-7791882. THE FIX FOR THAT BUG WAS SUB-OPTIMAL, AND WE NEED TO CHANGE IT IN 3.0. @@ -691,7 +763,13 @@ describe('scanner:run', function () { '--env', '{"qunit": true}' ]) .it('Providing qunit in the --env override should resolve errors about that framework', ctx => { - expect(ctx.stdout).to.contain('No rule violations found.', 'Should be no violations found in the file.'); + // If there's a summary, then it'll be separated from the CSV by an empty line. Throw it away. + const [csv, _] = ctx.stdout.trim().split(/\n\r?\n/); + + // Confirm there are no violations. + // Since it's a CSV, the rows themselves are separated by newline characters. + // The header should not have any newline characters after it. There should be no violation rows. + expect(csv.indexOf('\n')).to.equal(-1, "Should be no violations detected"); }); }); diff --git a/test/lib/RuleManager.test.ts b/test/lib/RuleManager.test.ts index f426f9881..1a5d03d3c 100644 --- a/test/lib/RuleManager.test.ts +++ b/test/lib/RuleManager.test.ts @@ -273,7 +273,7 @@ describe('RuleManager', () => { const {results} = await ruleManager.runRulesMatchingCriteria(filters, invalidTarget, runOptions, EMPTY_ENGINE_OPTIONS); - expect(results).to.equal(''); + expect(results).to.equal('[]'); Sinon.assert.calledWith(uxSpy, EVENTS.WARNING_ALWAYS, messages.getMessage('warning.targetSkipped', [invalidTarget.join(', ')])); Sinon.assert.callCount(telemetrySpy, 1); }); @@ -389,13 +389,13 @@ describe('RuleManager', () => { }) describe('Edge Cases', () => { - it('When no rules match the given criteria, an empty string is returned', async () => { + it('When no rules match the given criteria, an empty summary is returned', async () => { // Define our preposterous filter array. const filters = [new CategoryFilter(['beebleborp'])]; const {results} = await ruleManager.runRulesMatchingCriteria(filters, ['app'], runOptions, EMPTY_ENGINE_OPTIONS); expect(typeof results).to.equal('string', `Output ${results} should have been a string`); - expect(results).to.equal('', `Output ${results} should have been an empty string`); + expect(results).to.equal('[]', `Output ${results} should have been an empty summary (empty array)`); Sinon.assert.callCount(telemetrySpy, 1); }); @@ -407,7 +407,7 @@ describe('RuleManager', () => { const {results} = await ruleManager.runRulesMatchingCriteria(filters, invalidTarget, runOptions, EMPTY_ENGINE_OPTIONS); - expect(results).to.equal(''); + expect(results).to.equal('[]'); Sinon.assert.callCount(uxSpy, 1); Sinon.assert.calledWith(uxSpy, EVENTS.WARNING_ALWAYS, messages.getMessage("warning.targetSkipped", [invalidTarget.join(', ')])); Sinon.assert.callCount(telemetrySpy, 1); @@ -422,7 +422,7 @@ describe('RuleManager', () => { const {results} = await ruleManager.runRulesMatchingCriteria(filters, invalidTarget, runOptions, EMPTY_ENGINE_OPTIONS); - expect(results).to.equal(''); + expect(results).to.equal('[]'); Sinon.assert.callCount(uxSpy, 1); Sinon.assert.calledWith(uxSpy, EVENTS.WARNING_ALWAYS, messages.getMessage("warning.targetSkipped", [invalidTarget.join(', ')])); Sinon.assert.callCount(telemetrySpy, 1); @@ -436,7 +436,7 @@ describe('RuleManager', () => { const {results} = await ruleManager.runRulesMatchingCriteria(filters, invalidTargets, runOptions, EMPTY_ENGINE_OPTIONS); - expect(results).to.equal(''); + expect(results).to.equal('[]'); Sinon.assert.callCount(uxSpy, 1); Sinon.assert.calledWith(uxSpy, EVENTS.WARNING_ALWAYS, messages.getMessage("warning.targetsSkipped", [invalidTargets.join(', ')])); Sinon.assert.callCount(telemetrySpy, 1); diff --git a/test/lib/util/RunOutputProcessor.test.ts b/test/lib/util/RunOutputProcessor.test.ts index bfff52f8d..54e961b0c 100644 --- a/test/lib/util/RunOutputProcessor.test.ts +++ b/test/lib/util/RunOutputProcessor.test.ts @@ -62,6 +62,16 @@ const FAKE_TABLE_OUTPUT = { ] }; +const EMPTY_TABLE_OUTPUT = { + "columns": [ + "Location", + "Description", + "Category", + "URL" + ], + "rows": [] +}; + const FAKE_CSV_OUTPUT = `"Problem","File","Severity","Line","Column","Rule","Description","URL","Category","Engine" "1","/Users/jfeingold/ts-sample-project/src/file-with-problems.ts","2","3","7","no-unused-vars","'UNUSED' is assigned a value but never used.","https://eslint.org/docs/rules/no-unused-vars","Variables","eslint-typescript" "2","/Users/jfeingold/ts-sample-project/src/file-with-problems.ts","2","3","7","@typescript-eslint/no-unused-vars","'UNUSED' is assigned a value but never used.","https://github.com/typescript-eslint/typescript-eslint/blob/v2.33.0/packages/eslint-plugin/docs/rules/no-unused-vars.md","Variables","eslint-typescript" @@ -136,19 +146,18 @@ describe('RunOutputProcessor', () => { }; const rop = new RunOutputProcessor(opts, testUx); const summaryMap: Map = new Map(); - summaryMap.set('pmd', {fileCount: 0, violationCount: 0}); - summaryMap.set('eslint', {fileCount: 0, violationCount: 0}); - const fakeRes: RecombinedRuleResults = {minSev: 0, summaryMap, results: ''}; + const fakeTableResults: RecombinedRuleResults = {minSev: 1, results: EMPTY_TABLE_OUTPUT, summaryMap: summaryMap} // THIS IS THE PART BEING TESTED. - const output: AnyJson = rop.processRunOutput(fakeRes); + const output: AnyJson = rop.processRunOutput(fakeTableResults); - // We expect that the message logged to the console and the message returned should both be the default - const expectedMsg = processorMessages.getMessage('output.noViolationsDetected', ['pmd, eslint']); + const expectedTableSummary = `${processorMessages.getMessage('output.writtenToConsole')}`; + // We expect that an empty table should be logged to the console and that the output should be an empty array Sinon.assert.callCount(logSpy, 1); - Sinon.assert.callCount(tableSpy, 0); - Sinon.assert.calledWith(logSpy, expectedMsg); - expect(output).to.equal(expectedMsg, 'Should have returned expected message'); + Sinon.assert.callCount(tableSpy, 1); + Sinon.assert.calledWith(logSpy, expectedTableSummary); + // TODO is there a better way to check for empty array? output is an empty array, but typed as AnyJson. + expect(output.toString()).to.equal('', 'Should have returned empty results'); }); describe('Test Case: Table', () => { @@ -248,7 +257,7 @@ ${processorMessages.getMessage('output.writtenToConsole')}`; // NOTE: This next test is based on the implementation of run-summary messages in W-8388246, which was known // to be incomplete. When we flesh out that implementation with summaries for other formats, this test might // need to change. - it('JSON-type output should NOT be followed by summary', async () => { + it('JSON-type output with no violations should output be an empty violation set', async () => { const opts: RunOutputOptions = { format: OUTPUT_FORMAT.JSON }; @@ -297,17 +306,20 @@ ${processorMessages.getMessage('output.writtenToConsole')}`; const summaryMap: Map = new Map(); summaryMap.set('pmd', {fileCount: 0, violationCount: 0}); summaryMap.set('eslint', {fileCount: 0, violationCount: 0}); - const fakeRes: RecombinedRuleResults = {minSev: 0, summaryMap, results: ''}; + const fakeRes: RecombinedRuleResults = {minSev: 0, summaryMap, results: '"Problem","Severity","File","Line","Column","Rule","Description","URL","Category","Engine"'}; // THIS IS THE PART BEING TESTED. const output: AnyJson = rop.processRunOutput(fakeRes); - // We expect that the message logged to the console and the message returned should both be the default - const expectedMsg = processorMessages.getMessage('output.noViolationsDetected', ['pmd, eslint']); + // We expect the empty CSV output followed by the default engine summary and written-to-file messages are logged to the console + const expectedMsg = `${processorMessages.getMessage('output.engineSummaryTemplate', ['pmd', 0, 0])} +${processorMessages.getMessage('output.engineSummaryTemplate', ['eslint', 0, 0])} +${processorMessages.getMessage('output.writtenToOutFile', [fakeFilePath])}`; + Sinon.assert.callCount(logSpy, 1); Sinon.assert.callCount(tableSpy, 0); Sinon.assert.calledWith(logSpy, expectedMsg); - expect(fakeFiles.length).to.equal(0, 'No files should be created'); + expect(fakeFiles.length).to.equal(1, 'A CSV file with only a header should be created'); expect(output).to.equal(expectedMsg, 'Should have returned expected message'); });