From a23c6b359f5c9944aea555006d707a700caaf462 Mon Sep 17 00:00:00 2001 From: Shane McLaughlin Date: Mon, 31 Oct 2022 13:27:45 -0500 Subject: [PATCH] feat: string replacements in deploy (#748) --- .github/workflows/test.yml | 18 +- HANDBOOK.md | 9 +- package.json | 5 +- src/client/metadataApiDeploy.ts | 35 ++- src/client/types.ts | 1 - src/convert/convertContext.ts | 13 +- src/convert/metadataConverter.ts | 21 +- src/convert/replacements.ts | 220 +++++++++++++++ src/convert/streams.ts | 58 +--- .../defaultMetadataTransformer.ts | 39 ++- .../staticResourceMetadataTransformer.ts | 34 ++- src/convert/types.ts | 43 ++- src/resolve/sourceComponent.ts | 16 +- test/convert/metadataConverter.test.ts | 40 ++- test/convert/replacements.test.ts | 262 ++++++++++++++++++ test/convert/streams.test.ts | 12 - .../local/replacements/replacements.nut.ts | 97 +++++++ .../testProj/config/project-scratch-def.json | 13 + .../main/default/classes/replaceStuff.cls | 14 + .../default/classes/replaceStuff.cls-meta.xml | 5 + .../TestObj__c/TestObj__c.object-meta.xml | 21 ++ .../fields/FieldA__c.field-meta.xml | 12 + .../staticresources/Test.resource-meta.xml | 7 + .../staticresources/Test/folder/test2.css | 1 + .../default/staticresources/Test/test.css | 1 + .../local/replacements/testProj/label.txt | 1 + .../replacements/testProj/replacements.txt | 1 + .../replacements/testProj/sfdx-project.json | 66 +++++ .../eda.json} | 10 +- .../lotsOfClasses.json | 10 +- .../lotsOfClassesOneDir.json} | 10 +- .../eda.json | 8 +- .../lotsOfClasses.json | 8 +- .../lotsOfClassesOneDir.json | 8 +- .../eda.json | 10 +- .../lotsOfClasses.json | 10 +- .../lotsOfClassesOneDir.json | 10 +- .../eda.json | 8 +- .../lotsOfClasses.json | 8 +- .../lotsOfClassesOneDir.json | 8 +- .../lotsOfClasses.json | 8 +- .../lotsOfClassesOneDir.json | 8 +- .../eda.json | 8 +- .../lotsOfClasses.json | 8 +- .../lotsOfClassesOneDir.json | 8 +- yarn.lock | 5 + 46 files changed, 981 insertions(+), 237 deletions(-) create mode 100644 src/convert/replacements.ts create mode 100644 test/convert/replacements.test.ts create mode 100644 test/nuts/local/replacements/replacements.nut.ts create mode 100644 test/nuts/local/replacements/testProj/config/project-scratch-def.json create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls-meta.xml create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/TestObj__c.object-meta.xml create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/fields/FieldA__c.field-meta.xml create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test.resource-meta.xml create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/folder/test2.css create mode 100644 test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/test.css create mode 100644 test/nuts/local/replacements/testProj/label.txt create mode 100644 test/nuts/local/replacements/testProj/replacements.txt create mode 100644 test/nuts/local/replacements/testProj/sfdx-project.json rename test/nuts/perfResults/{x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClassesOneDir.json => x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/eda.json} (53%) rename test/nuts/perfResults/{x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz => x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz}/lotsOfClasses.json (53%) rename test/nuts/perfResults/{x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/eda.json => x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClassesOneDir.json} (52%) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ba8dea1767..3f4d2b5e7e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,12 +11,22 @@ on: jobs: unit-tests: uses: salesforcecli/github-workflows/.github/workflows/unitTest.yml@main + nuts: + uses: salesforcecli/github-workflows/.github/workflows/nut.yml@main + secrets: inherit + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + fail-fast: false + with: + os: ${{ matrix.os }} + perf-scale-nuts-linux: uses: ./.github/workflows/perfScaleNut.yml - needs: unit-tests + needs: [unit-tests, nuts] perf-scale-nuts-windows: uses: ./.github/workflows/perfScaleNut.yml - needs: unit-tests + needs: [unit-tests, nuts] with: os: 'windows-latest' @@ -24,7 +34,7 @@ jobs: # the following is highly duplicative to allow linux to start all the nuts without waiting for windows primer extNuts-primer-linux: name: extNUTs-linux-prime - needs: unit-tests + needs: [unit-tests, nuts] uses: salesforcecli/github-workflows/.github/workflows/externalNut.yml@main with: packageName: '@salesforce/source-deploy-retrieve' @@ -38,7 +48,7 @@ jobs: extNuts-primer-windows: name: extNUTs-windows-prime - needs: unit-tests + needs: [unit-tests, nuts] uses: salesforcecli/github-workflows/.github/workflows/externalNut.yml@main with: packageName: '@salesforce/source-deploy-retrieve' diff --git a/HANDBOOK.md b/HANDBOOK.md index 75039d18e0..d65377d0d6 100644 --- a/HANDBOOK.md +++ b/HANDBOOK.md @@ -20,7 +20,6 @@ - [Overview](#overview-2) - [Converting metadata](#converting-metadata) - [The conversion pipeline](#the-conversion-pipeline) - - [ComponentReader](#componentreader) - [ComponentConverter](#componentconverter) - [ComponentWriter](#componentwriter) - [ConvertContext](#convertcontext) @@ -214,7 +213,7 @@ A `TreeContainer` is an encapsulation of a file system that enables I/O against Clients can implement new tree containers by extending the `TreeContainer` base class and expanding functionality. Not all methods of a tree container have to be implemented, but an error will be thrown if the container is being used in a context that requires particular methods. -💡*The author, Brian, demonstrated the extensibility of tree containers for a side project by creating a* `GitTreeContainer`_. This enabled resolving components against a git object tree, allowing us to perform component diffs between git refs and analyze GitHub projects. See the [SFDX Badge Generator](https://sfdx-badge.herokuapp.com/). This could be expanded into a plugin of some sort._ +💡_The author, Brian, demonstrated the extensibility of tree containers for a side project by creating a_ `GitTreeContainer`_. This enabled resolving components against a git object tree, allowing us to perform component diffs between git refs and analyze GitHub projects. See the [SFDX Badge Generator](https://sfdx-badge.herokuapp.com/). This could be expanded into a plugin of some sort._ #### Creating mock components with the VirtualTreeContainer @@ -315,12 +314,6 @@ const converter = new MetadataConverter(); When `convert` is called, the method prepares the inputs for setting up the conversion pipeline. The pipeline consists of chaining three custom NodeJS stream, one for each stage of the copy operation. To more deeply understand what is happening in the conversion process, it’s recommended to familiarize yourself with streaming concepts and the NodeJS API. See [Stream NodeJS documentation](https://nodejs.org/api/stream.html) and [Understanding Streams in NodeJS](https://nodesource.com/blog/understanding-streams-in-nodejs/). -#### ComponentReader - -The reader is fairly simple, it takes a collection of source components and implements the stream API to push them out one-by-one. - -🧽 _When this aspect of the library was first written,_ `Readable.from(iterable)` _was not yet available. This simple API could probably replace the_ `ComponentReader`_._ - #### ComponentConverter Here is where file transformation is done, but without being written to the destination yet. Similar to how source resolution uses adapters to determine how to construct components for a type (see [The resolver constructs components based…](#resolving-from-metadata-files)), conversion uses `MetadataTransformer` implementations to describe the transformations. As you might guess, types are assigned a transformer, if they need one, in their metadata registry definition, otherwise the default one is used. Each transformer implements a `toSourceFormat` and a `toMetadataFormat` method, which are called by the `ComponentConverter` based on what the target format is. The methods will return a collection of `WriteInfo` objects, which as we’ve been touching on are “descriptions” of how to write a given file. diff --git a/package.json b/package.json index 8d125c9864..1d6dbfcdc7 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "graceful-fs": "^4.2.10", "ignore": "^5.2.0", "mime": "2.6.0", + "minimatch": "^5.1.0", "proxy-agent": "^5.0.0", "proxy-from-env": "^1.1.0", "unzipper": "0.10.11" @@ -47,6 +48,7 @@ "@types/archiver": "^5.3.1", "@types/deep-equal-in-any-order": "^1.0.1", "@types/mime": "2.0.3", + "@types/minimatch": "^5.1.2", "@types/proxy-from-env": "^1.0.1", "@types/shelljs": "^0.8.11", "@types/unzipper": "^0.10.5", @@ -98,6 +100,7 @@ "pretest": "sf-compile-test", "repl": "node --inspect ./scripts/repl.js", "test": "sf-test", + "test:nuts": "mocha \"test/nuts/local/**/*.nut.ts\" --timeout 500000", "test:nuts:scale": "mocha \"test/nuts/scale/eda.nut.ts\" --timeout 500000; mocha \"test/nuts/scale/lotsOfClasses.nut.ts\" --timeout 500000; mocha \"test/nuts/scale/lotsOfClassesOneDir.nut.ts\" --timeout 500000", "test:nuts:scale:record": "yarn test:nuts:scale && git add . && git commit -m \"test: record perf [ci skip]\" --no-verify && git push --no-verify", "test:registry": "mocha ./test/registry/registryCompleteness.test.ts --timeout 50000", @@ -114,4 +117,4 @@ "yarn": "1.22.4" }, "config": {} -} \ No newline at end of file +} diff --git a/src/client/metadataApiDeploy.ts b/src/client/metadataApiDeploy.ts index 4dbf5c5930..f1fadbfa5d 100644 --- a/src/client/metadataApiDeploy.ts +++ b/src/client/metadataApiDeploy.ts @@ -10,6 +10,7 @@ import { create as createArchive } from 'archiver'; import * as fs from 'graceful-fs'; import { Lifecycle, Messages, SfError } from '@salesforce/core'; import { ensureArray } from '@salesforce/kit'; +import { ReplacementEvent } from '../convert/types'; import { MetadataConverter } from '../convert'; import { ComponentLike, SourceComponent } from '../resolve'; import { ComponentSet } from '../collections'; @@ -31,16 +32,15 @@ Messages.importMessagesDirectory(__dirname); const messages = Messages.load('@salesforce/source-deploy-retrieve', 'sdr', ['error_no_job_id']); export class DeployResult implements MetadataTransferResult { - public readonly response: MetadataApiDeployStatus; - public readonly components: ComponentSet; private readonly diagnosticUtil = new DiagnosticUtil('metadata'); private fileResponses: FileResponse[]; private readonly shouldConvertPaths = sep !== posix.sep; - public constructor(response: MetadataApiDeployStatus, components: ComponentSet) { - this.response = response; - this.components = components; - } + public constructor( + public readonly response: MetadataApiDeployStatus, + public readonly components: ComponentSet, + public readonly replacements: Map = new Map() + ) {} public getFileResponses(): FileResponse[] { // this involves FS operations, so only perform once! @@ -236,6 +236,7 @@ export class MetadataApiDeploy extends MetadataTransfer = new Map(); private orgId: string; // Keep track of rest deploys separately since Connection.deploy() removes it // from the apiOptions and we need it for telemetry. @@ -310,6 +311,7 @@ export class MetadataApiDeploy extends MetadataTransfer { + const LifecycleInstance = Lifecycle.getInstance(); const connection = await this.getConnection(); // store for use in the scopedPostDeploy event this.orgId = connection.getAuthInfoFields().orgId; @@ -320,11 +322,26 @@ export class MetadataApiDeploy extends MetadataTransfer + // lifecycle have to be async, so wrapped in a promise + new Promise((resolve) => { + if (!this.replacements.has(replacement.filename)) { + this.replacements.set(replacement.filename, [replacement.replaced]); + } else { + this.replacements.get(replacement.filename).push(replacement.replaced); + } + resolve(); + }) + ); + const [zipBuffer] = await Promise.all([this.getZipBuffer(), this.maybeSaveTempDirectory('metadata')]); // SDR modifies what the mdapi expects by adding a rest param const { rest, ...optionsWithoutRest } = this.options.apiOptions; @@ -370,7 +387,7 @@ export class MetadataApiDeploy extends MetadataTransfer, targetFormat: SfdxFileFormat, @@ -43,7 +46,7 @@ export class MetadataConverter { (comps instanceof ComponentSet ? Array.from(comps.getSourceComponents()) : comps) as SourceComponent[] ).filter((comp) => comp.type.isAddressable !== false); - const isSource = targetFormat === 'source'; + const targetFormatIsSource = targetFormat === 'source'; const tasks: Array> = []; let writer: StandardWriter | ZipWriter; @@ -59,7 +62,7 @@ export class MetadataConverter { packagePath = getPackagePath(output); defaultDirectory = packagePath; writer = new StandardWriter(packagePath); - if (!isSource) { + if (!targetFormatIsSource) { const manifestPath = join(packagePath, MetadataConverter.PACKAGE_XML_FILE); tasks.push( promises.writeFile(manifestPath, await cs.getPackageXml()), @@ -78,13 +81,16 @@ export class MetadataConverter { if (output.packageName) { cs.fullName = output.packageName; } + packagePath = getPackagePath(output); defaultDirectory = packagePath; writer = new ZipWriter(packagePath); - if (!isSource) { + if (!targetFormatIsSource) { writer.addToZip(await cs.getPackageXml(), MetadataConverter.PACKAGE_XML_FILE); + // for each of the destructive changes in the component set, convert and write the correct metadata // to each manifest + for (const destructiveChangeType of cs.getTypesOfDestructiveChanges()) { writer.addToZip( // TODO: can this be safely parallelized? @@ -96,7 +102,7 @@ export class MetadataConverter { } break; case 'merge': - if (!isSource) { + if (!targetFormatIsSource) { throw new SfError(messages.getMessage('error_merge_metadata_target_unsupported')); } defaultDirectory = output.defaultDirectory; @@ -111,7 +117,10 @@ export class MetadataConverter { } const conversionPipeline = pipeline( - new ComponentReader(components), + Readable.from(components), + !targetFormatIsSource && (process.env.SF_APPLY_REPLACEMENTS_ON_CONVERT === 'true' || output.type === 'zip') + ? (await getReplacementMarkingStream()) ?? new PassThrough({ objectMode: true }) + : new PassThrough({ objectMode: true }), new ComponentConverter(targetFormat, this.registry, mergeSet, defaultDirectory), writer ); diff --git a/src/convert/replacements.ts b/src/convert/replacements.ts new file mode 100644 index 0000000000..70e5a55734 --- /dev/null +++ b/src/convert/replacements.ts @@ -0,0 +1,220 @@ +/* + * Copyright (c) 2020, salesforce.com, inc. + * All rights reserved. + * Licensed under the BSD 3-Clause license. + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +import { readFile } from 'fs/promises'; +import { Transform, Readable } from 'stream'; +import { Lifecycle, SfError, SfProject } from '@salesforce/core'; +import * as minimatch from 'minimatch'; +import { SourcePath } from '../common'; +import { SourceComponent } from '../resolve/sourceComponent'; +import { MarkedReplacement, ReplacementConfig, ReplacementEvent } from './types'; + +const fileContentsCache = new Map(); + +/** If a component has replacements, you get it piped through the replacementStream + * Otherwise, you'll get the original readable stream + */ +export const getReplacementStreamForReadable = ( + component: SourceComponent, + path: SourcePath +): Readable | ReplacementStream => + component.replacements?.[path] + ? component.tree.stream(path).pipe(new ReplacementStream(component.replacements?.[path])) + : component.tree.stream(path); + +/** + * A stream for replacing the contents of a single SourceComponent. + * + */ +class ReplacementStream extends Transform { + public constructor(private readonly replacements: MarkedReplacement[]) { + super({ objectMode: true }); + } + + public async _transform( + chunk: Buffer, + encoding: string, + callback: (error?: Error, data?: Buffer) => void + ): Promise { + let error: Error; + // read and do the various replacements + callback(error, Buffer.from(await replacementIterations(chunk.toString(), this.replacements))); + } +} + +/** + * perform an array of replacements on a string + * emits warnings when an expected replacement target isn't found + */ +export const replacementIterations = async (input: string, replacements: MarkedReplacement[]): Promise => { + const lifecycleInstance = Lifecycle.getInstance(); + let output = input; + for (const replacement of replacements) { + // TODO: node 16+ has String.replaceAll for non-regex scenarios + const regex = + typeof replacement.toReplace === 'string' ? new RegExp(replacement.toReplace, 'g') : replacement.toReplace; + const replaced = output.replace(regex, replacement.replaceWith); + + if (replaced !== output) { + output = replaced; + // eslint-disable-next-line no-await-in-loop + await lifecycleInstance.emit('replacement', { + filename: replacement.matchedFilename, + replaced: replacement.toReplace.toString(), + } as ReplacementEvent); + } else if (replacement.singleFile) { + // replacements need to be done sequentially + // eslint-disable-next-line no-await-in-loop + await lifecycleInstance.emitWarning( + `Your sfdx-project.json specifies that ${replacement.toReplace.toString()} should be replaced in ${ + replacement.matchedFilename + }, but it was not found.` + ); + } + } + return output; +}; + +/** + * Reads the project, gets replacements, removes any that aren't applicable due to environment conditionals, and returns an instance of the ReplacementMarkingStream + */ +export const getReplacementMarkingStream = async (): Promise => { + // remove any that don't agree with current env + const filteredReplacements = envFilter(await readReplacementsFromProject()); + if (filteredReplacements.length) { + return new ReplacementMarkingStream(filteredReplacements); + } +}; + +/** + * Stream for marking replacements on a component. + * Returns a mutated component with a `replacements` property if any replacements are found. + * Throws if any replacements reference a file or env that does not exist + */ +class ReplacementMarkingStream extends Transform { + public constructor(private readonly replacementConfigs: ReplacementConfig[]) { + super({ objectMode: true }); + } + + public async _transform( + chunk: SourceComponent, + encoding: string, + callback: (err: Error, data: SourceComponent) => void + ): Promise { + let err: Error; + // if deleting, or no configs, just pass through + if (!chunk.isMarkedForDelete() && this.replacementConfigs?.length) { + try { + chunk.replacements = await getReplacements(chunk, this.replacementConfigs); + } catch (e) { + if (!(e instanceof Error)) { + throw e; + } + err = e; + } + } + callback(err, chunk); + } +} + +export const getContentsOfReplacementFile = async (path: string): Promise => { + if (!fileContentsCache.has(path)) { + try { + fileContentsCache.set(path, (await readFile(path, 'utf8')).trim()); + } catch (e) { + throw new SfError( + `The file "${path}" specified in the "replacements" property of sfdx-project.json could not be read.` + ); + } + } + return fileContentsCache.get(path); +}; + +/** + * Build the replacements property for a sourceComponent + */ +export const getReplacements = async ( + cmp: SourceComponent, + replacementConfigs: ReplacementConfig[] = [] +): Promise => { + // all possible filenames for this component + const filenames = [cmp.xml, ...cmp.walkContent()].filter(Boolean); + const replacementsForComponent = ( + await Promise.all( + // build a nested array that can be run through Object.fromEntries + // one MarkedReplacement[] for each file in the component + filenames.map( + async (f): Promise<[string, MarkedReplacement[]]> => [ + f, + await Promise.all( + replacementConfigs + // filter out any that don't match the current file + .filter((r) => matchesFile(f, r)) + .map(async (r) => ({ + matchedFilename: f, + // used during replacement stream to limit warnings to explicit filenames, not globs + singleFile: Boolean(r.filename), + // Config is json which might use the regex. If so, turn it into an actual regex + toReplace: r.stringToReplace ? stringToRegex(r.stringToReplace) : new RegExp(r.regexToReplace, 'g'), + // get the literal replacement (either from env or file contents) + replaceWith: r.replaceWithEnv + ? getEnvValue(r.replaceWithEnv) + : await getContentsOfReplacementFile(r.replaceWithFile), + })) + ), + ] + ) + ) + ) + // filter out any that don't have any replacements + .filter(([, replacements]) => replacements.length > 0); + + if (replacementsForComponent.length) { + // turn into a Dictionary-style object so it's easier to lookup by filename + return Object.fromEntries(replacementsForComponent); + } +}; + +export const matchesFile = (f: string, r: ReplacementConfig): boolean => + // filenames will be absolute. We don't have convenient access to the pkgDirs, + // so we need to be more open than an exact match + f.endsWith(r.filename) || (r.glob && minimatch(f, `**/${r.glob}`)); + +/** + * Regardless of any components, return the ReplacementConfig that are valid with the current env. + * These can be checked globally and don't need to be checked per component. + */ +const envFilter = (replacementConfigs: ReplacementConfig[] = []): ReplacementConfig[] => + replacementConfigs.filter( + (replacement) => + !replacement.replaceWhenEnv || + replacement.replaceWhenEnv.every((envConditional) => process.env[envConditional.env] === envConditional.value) + ); + +/** A "getter" for envs to throw an error when an expected env is not present */ +const getEnvValue = (env: string): string => { + if (process.env[env]) { + return process.env[env]; + } + throw new SfError( + `"${env}" is in sfdx-project.json as a value for "replaceWithEnv" property, but it's not set in your environment.` + ); +}; + +/** + * Read the `replacement` property from sfdx-project.json + */ +const readReplacementsFromProject = async (): Promise => { + const proj = await SfProject.resolve(); + const projJson = (await proj.resolveProjectConfig()) as { replacements?: ReplacementConfig[] }; + return projJson.replacements; +}; + +/** escape any special characters used in the string so it can be used as a regex */ +export const stringToRegex = (input: string): RegExp => + // being overly conservative + // eslint-disable-next-line no-useless-escape + new RegExp(input.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&'), 'g'); diff --git a/src/convert/streams.ts b/src/convert/streams.ts index 2d18c439fe..9358093699 100644 --- a/src/convert/streams.ts +++ b/src/convert/streams.ts @@ -36,51 +36,19 @@ export const stream2buffer = async (stream: Stream): Promise => // eslint-disable-next-line @typescript-eslint/restrict-template-expressions stream.on('error', (err) => reject(`error converting stream - ${err}`)); }); -export class ComponentReader extends Readable { - private iter: Iterator; - - public constructor(components: Iterable) { - super({ objectMode: true }); - this.iter = this.createIterator(components); - } - - public _read(): void { - let next = this.iter.next(); - while (!next.done) { - this.push(next.value); - next = this.iter.next(); - } - this.push(null); - } - - // preserved to isolate from other classes in this file - // componentReader should go away (see note in handbook) - // eslint-disable-next-line class-methods-use-this - private *createIterator(components: Iterable): Iterator { - for (const component of components) { - yield component; - } - } -} export class ComponentConverter extends Transform { public readonly context = new ConvertContext(); - private targetFormat: SfdxFileFormat; - private mergeSet: ComponentSet; private transformerFactory: MetadataTransformerFactory; - private defaultDirectory: string; public constructor( - targetFormat: SfdxFileFormat, + private targetFormat: SfdxFileFormat, registry: RegistryAccess, - mergeSet?: ComponentSet, - defaultDirectory?: string + private mergeSet?: ComponentSet, + private defaultDirectory?: string ) { super({ objectMode: true }); - this.targetFormat = targetFormat; - this.mergeSet = mergeSet; this.transformerFactory = new MetadataTransformerFactory(registry, this.context); - this.defaultDirectory = defaultDirectory; } public async _transform( @@ -154,12 +122,10 @@ export abstract class ComponentWriter extends Writable { export class StandardWriter extends ComponentWriter { public converted: SourceComponent[] = []; - private resolver: MetadataResolver; private logger: Logger; - public constructor(rootDestination: SourcePath, resolver = new MetadataResolver()) { + public constructor(rootDestination: SourcePath, private resolver = new MetadataResolver()) { super(rootDestination); - this.resolver = resolver; this.logger = Logger.childFromRoot(this.constructor.name); } @@ -239,13 +205,12 @@ export class ZipWriter extends ComponentWriter { if (chunk.component.type.folderType || chunk.component.type.folderContentType) { return this.addToZip(writeInfo.source, writeInfo.output); } - const streamAsBuffer = await stream2buffer(writeInfo.source); // everything else can be zipped immediately to reduce the number of open files (windows has a low limit!) and help perf - if (streamAsBuffer.length) { - return this.addToZip(streamAsBuffer, writeInfo.output); - } - // these will be zero-length files, which archiver supports via stream but not buffer - return this.addToZip(writeInfo.source, writeInfo.output); + const streamAsBuffer = await stream2buffer(writeInfo.source); + return streamAsBuffer.length + ? this.addToZip(streamAsBuffer, writeInfo.output) + : // these will be zero-length files, which archiver supports via stream but not buffer + this.addToZip(writeInfo.source, writeInfo.output); }) ); } catch (e) { @@ -289,11 +254,8 @@ export class ZipWriter extends ComponentWriter { * even though it's not beneficial in the typical way a stream is. */ export class JsToXml extends Readable { - private xmlObject: JsonMap; - - public constructor(xmlObject: JsonMap) { + public constructor(private xmlObject: JsonMap) { super(); - this.xmlObject = xmlObject; } public _read(): void { diff --git a/src/convert/transformers/defaultMetadataTransformer.ts b/src/convert/transformers/defaultMetadataTransformer.ts index 7d93d8b094..b9f0bd7cd0 100644 --- a/src/convert/transformers/defaultMetadataTransformer.ts +++ b/src/convert/transformers/defaultMetadataTransformer.ts @@ -9,6 +9,7 @@ import { META_XML_SUFFIX, SourcePath } from '../../common'; import { SfdxFileFormat, WriteInfo } from '../types'; import { SourceComponent } from '../../resolve'; import { extName, trimUntil } from '../../utils'; +import { getReplacementStreamForReadable } from '../replacements'; import { BaseMetadataTransformer } from './baseMetadataTransformer'; const ORIGINAL_SUFFIX_REGEX = new RegExp('(.)([a-zA-Z]+)(' + META_XML_SUFFIX + ')$'); @@ -36,27 +37,23 @@ const getWriteInfos = ( component: SourceComponent, targetFormat: SfdxFileFormat, mergeWith?: SourceComponent -): WriteInfo[] => { - const writeInfos: WriteInfo[] = []; - - if (component.content) { - for (const source of component.walkContent()) { - writeInfos.push({ - source: component.tree.stream(source), - output: getContentSourceDestination(source, targetFormat, component, mergeWith), - }); - } - } - - if (component.xml) { - writeInfos.push({ - source: component.tree.stream(component.xml), - output: getXmlDestination(targetFormat, component, mergeWith), - }); - } - - return writeInfos; -}; +): WriteInfo[] => + component + .walkContent() + .map((path) => ({ + source: getReplacementStreamForReadable(component, path), + output: getContentSourceDestination(path, targetFormat, component, mergeWith), + })) + .concat( + component.xml + ? [ + { + source: getReplacementStreamForReadable(component, component.xml), + output: getXmlDestination(targetFormat, component, mergeWith), + }, + ] + : [] + ); // assumes component has content const getContentSourceDestination = ( diff --git a/src/convert/transformers/staticResourceMetadataTransformer.ts b/src/convert/transformers/staticResourceMetadataTransformer.ts index bf62da992c..90a1afdc1e 100644 --- a/src/convert/transformers/staticResourceMetadataTransformer.ts +++ b/src/convert/transformers/staticResourceMetadataTransformer.ts @@ -4,9 +4,9 @@ * Licensed under the BSD 3-Clause license. * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import { basename, dirname, isAbsolute, join } from 'path'; +import { basename, dirname, isAbsolute, join, relative } from 'path'; import { Readable } from 'stream'; -import { create as createArchive } from 'archiver'; +import { create as createArchive, Archiver } from 'archiver'; import { getExtension } from 'mime'; import { Open } from 'unzipper'; import { JsonMap } from '@salesforce/ts-types'; @@ -18,6 +18,7 @@ import { SourceComponent } from '../../resolve'; import { SourcePath } from '../../common'; import { ensureFileExists } from '../../utils/fileSystemHandler'; import { pipeline } from '../streams'; +import { getReplacementStreamForReadable } from '../replacements'; import { BaseMetadataTransformer } from './baseMetadataTransformer'; Messages.importMessagesDirectory(__dirname); @@ -38,26 +39,33 @@ export class StaticResourceMetadataTransformer extends BaseMetadataTransformer { public async toMetadataFormat(component: SourceComponent): Promise { const { content, type, xml } = component; - let contentSource: Readable; - - if (await componentIsExpandedArchive(component)) { + const zipIt = async (): Promise => { // toolbelt was using level 9 for static resources, so we'll do the same. // Otherwise, you'll see errors like https://github.com/forcedotcom/cli/issues/1098 const zip = createArchive('zip', { zlib: { level: 9 } }); - zip.directory(content, false); - void zip.finalize(); - contentSource = zip; - } else { - contentSource = component.tree.stream(content); - } + if (!component.replacements) { + // the easy way...no replacements required + zip.directory(content, false); + } else { + // the hard way--we have to walk the content and do replacements on each of the files. + for (const path of component.walkContent()) { + const replacementStream = getReplacementStreamForReadable(component, path); + zip.append(replacementStream, { name: relative(content, path) }); + } + } + await zip.finalize(); + return zip; + }; return [ { - source: contentSource, + source: (await componentIsExpandedArchive(component)) + ? await zipIt() + : getReplacementStreamForReadable(component, content), output: join(type.directoryName, `${baseName(content)}.${type.suffix}`), }, { - source: component.tree.stream(xml), + source: getReplacementStreamForReadable(component, xml), output: join(type.directoryName, basename(xml)), }, ]; diff --git a/src/convert/types.ts b/src/convert/types.ts index d63e6d28f8..cb5df08830 100644 --- a/src/convert/types.ts +++ b/src/convert/types.ts @@ -85,7 +85,7 @@ export interface MetadataTransformer { * * `metadata` - Structure for use with the metadata api. * - * `source` - Friendly for local editing and comitting files to source control. + * `source` - Friendly for local editing and committing files to source control. */ export type SfdxFileFormat = 'metadata' | 'source'; @@ -101,7 +101,46 @@ export type ConvertResult = { */ zipBuffer?: Buffer; /** - * Converted source components. Not set if archving the package. + * Converted source components. Not set if archiving the package. */ converted?: SourceComponent[]; }; + +/** Stored by file on SourceComponent for stream processing */ +export type MarkedReplacement = { + toReplace: RegExp; + replaceWith: string; + matchedFilename: string; + singleFile?: boolean; +}; + +// TODO: what's the right way to get this into core/sfdxProjectJson +export type ReplacementConfig = ReplacementLocation & + ReplacementSource & + ReplacementTarget & { + /** Only do the replacement if ALL of the environment values in this array match */ + replaceWhenEnv?: [ + { + env: string; + value: string | number | boolean; + } + ]; + }; + +type ReplacementLocation = { filename: string; glob?: never } | { filename?: never; glob: string }; +type ReplacementSource = + | { replaceWithEnv: string; replaceWithFile?: never } + | { replaceWithEnv?: never; replaceWithFile: string }; + +type ReplacementTarget = + | { stringToReplace: string; regexToReplace?: never } + | { + stringToReplace?: never; + /** When putting regex into json, you have to use an extra backslash to escape your regex backslashes because JSON also treats backslash as an escape character */ + regexToReplace: string; + }; + +export type ReplacementEvent = { + filename: string; + replaced: string; +}; diff --git a/src/resolve/sourceComponent.ts b/src/resolve/sourceComponent.ts index 90f100b754..66498e1ae4 100644 --- a/src/resolve/sourceComponent.ts +++ b/src/resolve/sourceComponent.ts @@ -9,13 +9,16 @@ import { Messages, SfError } from '@salesforce/core'; import { parse, validate } from 'fast-xml-parser'; import { get, getString, JsonMap } from '@salesforce/ts-types'; import { ensureArray } from '@salesforce/kit'; +import { replacementIterations } from '../../src/convert/replacements'; import { baseName, parseMetadataXml, trimUntil } from '../utils'; import { DEFAULT_PACKAGE_ROOT_SFDX } from '../common'; import { SfdxFileFormat } from '../convert'; import { MetadataType } from '../registry'; import { DestructiveChangesType } from '../collections'; import { filePathsFromMetadataComponent } from '../utils/filePathGenerator'; +import { MarkedReplacement } from '../convert/types'; import { MetadataComponent, VirtualDirectory } from './types'; + import { NodeFSTreeContainer, TreeContainer, VirtualTreeContainer } from './treeContainers'; import { ForceIgnore } from './forceIgnore'; @@ -44,6 +47,7 @@ export class SourceComponent implements MetadataComponent { public readonly parent?: SourceComponent; public parentType?: MetadataType; public content?: string; + public replacements: Record; private treeContainer: TreeContainer; private forceIgnore: ForceIgnore; private markedForDelete = false; @@ -159,7 +163,11 @@ export class SourceComponent implements MetadataComponent { const xml = xmlFilePath ?? this.xml; if (xml) { const contents = (await this.tree.readFile(xml)).toString(); - return this.parseAndValidateXML(contents, xml); + const replacements = this.replacements?.[xml] ?? this.parent?.replacements?.[xml]; + return this.parseAndValidateXML( + replacements ? await replacementIterations(contents, replacements) : contents, + xml + ); } return {} as T; } @@ -179,11 +187,7 @@ export class SourceComponent implements MetadataComponent { * @return ForceIgnore */ public getForceIgnore(): ForceIgnore { - if (this.forceIgnore) { - return this.forceIgnore; - } else { - return ForceIgnore.findAndCreate(this.content); - } + return this.forceIgnore ?? ForceIgnore.findAndCreate(this.content); } /** diff --git a/test/convert/metadataConverter.test.ts b/test/convert/metadataConverter.test.ts index 66bdece395..60a59562e6 100644 --- a/test/convert/metadataConverter.test.ts +++ b/test/convert/metadataConverter.test.ts @@ -13,7 +13,6 @@ import { assert, expect } from 'chai'; import { TestContext } from '@salesforce/core/lib/testSetup'; import { xmlInFolder } from '../mock'; import * as streams from '../../src/convert/streams'; -import { ComponentReader } from '../../src/convert/streams'; import * as fsUtil from '../../src/utils/fileSystemHandler'; import { COMPONENTS } from '../mock/type-constants/documentFolderConstant'; import { ComponentSet, DestructiveChangesType, MetadataConverter, registry, SourceComponent } from '../../src'; @@ -45,10 +44,9 @@ describe('MetadataConverter', () => { /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ function validatePipelineArgs(pipelineArgs: any[], targetFormat = 'metadata'): void { - expect(pipelineArgs[0] instanceof streams.ComponentReader).to.be.true; - expect(pipelineArgs[1] instanceof streams.ComponentConverter).to.be.true; - expect(pipelineArgs[1].targetFormat).to.equal(targetFormat); - expect(pipelineArgs[2] instanceof streams.ComponentWriter).to.be.true; + expect(pipelineArgs[2] instanceof streams.ComponentConverter).to.be.true; + expect(pipelineArgs[2].targetFormat).to.equal(targetFormat); + expect(pipelineArgs[3] instanceof streams.ComponentWriter).to.be.true; } beforeEach(() => { @@ -69,7 +67,7 @@ describe('MetadataConverter', () => { outputDirectory, }); - expect(pipelineStub.firstCall.args[2].rootDestination).to.equal(packagePath); + expect(pipelineStub.firstCall.args[3].rootDestination).to.equal(packagePath); }); it('should convert to specified output dir', async () => { @@ -79,7 +77,7 @@ describe('MetadataConverter', () => { genUniqueDir: false, }); - expect(pipelineStub.firstCall.args[2].rootDestination).to.equal(outputDirectory); + expect(pipelineStub.firstCall.args[3].rootDestination).to.equal(outputDirectory); }); it('should throw ConversionError when an error occurs', async () => { @@ -125,8 +123,8 @@ describe('MetadataConverter', () => { const pipelineArgs = pipelineStub.firstCall.args; validatePipelineArgs(pipelineArgs); - expect(pipelineArgs[2] instanceof streams.StandardWriter).to.be.true; - expect(pipelineArgs[2].rootDestination).to.equal(packageOutput); + expect(pipelineArgs[3] instanceof streams.StandardWriter).to.be.true; + expect(pipelineArgs[3].rootDestination).to.equal(packageOutput); }); it('should create conversion pipeline with normalized output directory', async () => { @@ -138,8 +136,8 @@ describe('MetadataConverter', () => { const pipelineArgs = pipelineStub.firstCall.args; validatePipelineArgs(pipelineArgs); - expect(pipelineArgs[2] instanceof streams.StandardWriter).to.be.true; - expect(pipelineArgs[2].rootDestination).to.equal(packageName); + expect(pipelineArgs[3] instanceof streams.StandardWriter).to.be.true; + expect(pipelineArgs[3].rootDestination).to.equal(packageName); }); it('should return packagePath in result', async () => { @@ -314,8 +312,8 @@ describe('MetadataConverter', () => { // secondCall is used because ZipWriter uses pipeline upon construction const pipelineArgs = pipelineStub.secondCall.args; validatePipelineArgs(pipelineArgs); - expect(pipelineArgs[2] instanceof streams.ZipWriter).to.be.true; - expect(pipelineArgs[2].rootDestination).to.equal(`${packageOutput}.zip`); + expect(pipelineArgs[3] instanceof streams.ZipWriter).to.be.true; + expect(pipelineArgs[3].rootDestination).to.equal(`${packageOutput}.zip`); }); it('should create conversion pipeline with in-memory configuration', async () => { @@ -323,8 +321,8 @@ describe('MetadataConverter', () => { const pipelineArgs = pipelineStub.secondCall.args; validatePipelineArgs(pipelineArgs); - expect(pipelineArgs[2] instanceof streams.ZipWriter).to.be.true; - expect(pipelineArgs[2].rootDestination).to.be.undefined; + expect(pipelineArgs[3] instanceof streams.ZipWriter).to.be.true; + expect(pipelineArgs[3].rootDestination).to.be.undefined; }); it('should return zipBuffer result for in-memory configuration', async () => { @@ -457,13 +455,11 @@ describe('MetadataConverter', () => { const pipelineArgs = pipelineStub.firstCall.args; validatePipelineArgs(pipelineArgs, 'source'); - expect(pipelineArgs[1].mergeSet).to.deep.equal(new ComponentSet(COMPONENTS)); - expect(pipelineArgs[2].rootDestination).to.equal(defaultDirectory); + expect(pipelineArgs[2].mergeSet).to.deep.equal(new ComponentSet(COMPONENTS)); + expect(pipelineArgs[3].rootDestination).to.equal(defaultDirectory); }); it('should create conversion pipeline with addressable components', async () => { - // @ts-ignore private - const componentReaderSpy = $$.SANDBOX.spy(ComponentReader.prototype, 'createIterator'); components.push({ type: registry.types.customobjecttranslation.children.types.customfieldtranslation, name: 'myFieldTranslation', @@ -479,11 +475,9 @@ describe('MetadataConverter', () => { const pipelineArgs = pipelineStub.firstCall.args; validatePipelineArgs(pipelineArgs, 'source'); - expect(componentReaderSpy.firstCall.args[0].length).to.equal(3); // pop off the CFT that should be filtered off for the assertion components.pop(); - expect(componentReaderSpy.firstCall.args[0]).to.deep.equal(components); - expect(pipelineArgs[2].rootDestination).to.equal(defaultDirectory); + expect(pipelineArgs[3].rootDestination).to.equal(defaultDirectory); }); it('should ensure merge set contains parents of child components instead of the children themselves', async () => { @@ -495,7 +489,7 @@ describe('MetadataConverter', () => { const pipelineArgs = pipelineStub.firstCall.args; validatePipelineArgs(pipelineArgs, 'source'); - expect(pipelineArgs[1].mergeSet).to.deep.equal(new ComponentSet([DECOMPOSED_CHILD_COMPONENT_1.parent])); + expect(pipelineArgs[2].mergeSet).to.deep.equal(new ComponentSet([DECOMPOSED_CHILD_COMPONENT_1.parent])); }); }); }); diff --git a/test/convert/replacements.test.ts b/test/convert/replacements.test.ts new file mode 100644 index 0000000000..dbd892b26c --- /dev/null +++ b/test/convert/replacements.test.ts @@ -0,0 +1,262 @@ +/* + * Copyright (c) 2020, salesforce.com, inc. + * All rights reserved. + * Licensed under the BSD 3-Clause license. + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +import * as path from 'path'; +import { expect } from 'chai'; +import Sinon = require('sinon'); +import { Lifecycle } from '@salesforce/core'; +import { getReplacements, matchesFile, replacementIterations, stringToRegex } from '../../src/convert/replacements'; +import { matchingContentFile } from '../mock'; +import * as replacementsForMock from '../../src/convert/replacements'; + +describe('file matching', () => { + const base = { replaceWithEnv: 'foo', stringToReplace: 'foo' }; + it('file matches string', () => { + expect(matchesFile('foo', { filename: 'foo', ...base })).to.be.true; + expect(matchesFile('bar', { filename: 'foo', ...base })).to.not.be.true; + }); + it('file matches glob (posix paths)', () => { + expect(matchesFile('foo/bar', { glob: 'foo/**', ...base })).to.be.true; + expect(matchesFile('foo/bar', { glob: 'foo/*', ...base })).to.be.true; + expect(matchesFile('foo/bar', { glob: 'foo', ...base })).to.be.false; + expect(matchesFile('foo/bar', { glob: '**/*', ...base })).to.be.true; + }); + it('file matches glob (os-dependent paths)', () => { + expect(matchesFile(path.join('foo', 'bar'), { glob: 'foo/**', ...base })).to.be.true; + expect(matchesFile(path.join('foo', 'bar'), { glob: 'foo/*', ...base })).to.be.true; + expect(matchesFile(path.join('foo', 'bar'), { glob: 'foo', ...base })).to.be.false; + expect(matchesFile(path.join('foo', 'bar'), { glob: '**/*', ...base })).to.be.true; + }); + it('test absolute vs. relative paths'); +}); + +describe('env filters', () => {}); + +describe('marking replacements on a component', () => { + before(() => { + // replaceFromFile uses the contents of a file. This prevents the test from hitting real FS for that. + Sinon.stub(replacementsForMock, 'getContentsOfReplacementFile').resolves('bar'); + }); + + after(() => { + Sinon.restore(); + }); + + process.env.FOO_REPLACEMENT = 'bar'; + const cmp = matchingContentFile.COMPONENT; + + beforeEach(() => { + delete cmp.replacements; + }); + + it('marks no replacements when passed no configs', async () => { + expect(await getReplacements(cmp)).to.be.undefined; + expect(await getReplacements(cmp, [])).to.be.undefined; + }); + it('marks a string replacement from env', async () => { + const result = await getReplacements(cmp, [ + { filename: cmp.xml, stringToReplace: 'foo', replaceWithEnv: 'FOO_REPLACEMENT' }, + ]); + expect(result).to.deep.equal({ + [cmp.xml]: [ + { + matchedFilename: cmp.xml, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: true, + }, + ], + }); + }); + it('marks string replacements from file', async () => { + const result = await getReplacements(cmp, [{ filename: cmp.xml, stringToReplace: 'foo', replaceWithFile: 'bar' }]); + expect(result).to.deep.equal({ + [cmp.xml]: [ + { + matchedFilename: cmp.xml, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: true, + }, + ], + }); + }); + + it('marks regex replacements on a matching file', async () => { + const result = await getReplacements(cmp, [ + { filename: cmp.xml, regexToReplace: '.*foo.*', replaceWithEnv: 'FOO_REPLACEMENT' }, + ]); + expect(result).to.deep.equal({ + [cmp.xml]: [ + { + matchedFilename: cmp.xml, + toReplace: /.*foo.*/g, + replaceWith: 'bar', + singleFile: true, + }, + ], + }); + }); + it('marks 2 replacements on one file', async () => { + const result = await getReplacements(cmp, [ + { filename: cmp.xml, stringToReplace: 'foo', replaceWithEnv: 'FOO_REPLACEMENT' }, + { filename: cmp.xml, stringToReplace: 'baz', replaceWithEnv: 'FOO_REPLACEMENT' }, + ]); + expect(result).to.deep.equal({ + [cmp.xml]: [ + { + matchedFilename: cmp.xml, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: true, + }, + { + matchedFilename: cmp.xml, + toReplace: stringToRegex('baz'), + replaceWith: 'bar', + singleFile: true, + }, + ], + }); + }); + it('marks two files with 1 replacement each for greedy glob', async () => { + const result = await getReplacements(cmp, [ + { glob: '**/*', stringToReplace: 'foo', replaceWithEnv: 'FOO_REPLACEMENT' }, + ]); + expect(result).to.deep.equal({ + [cmp.xml]: [ + { + matchedFilename: cmp.xml, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: false, + }, + ], + [cmp.content]: [ + { + matchedFilename: cmp.content, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: false, + }, + ], + }); + }); + it('marks replacement on multiple files from multiple configs', async () => { + const result = await getReplacements(cmp, [ + { filename: cmp.xml, stringToReplace: 'foo', replaceWithEnv: 'FOO_REPLACEMENT' }, + { filename: cmp.content, stringToReplace: 'foo', replaceWithEnv: 'FOO_REPLACEMENT' }, + ]); + expect(result).to.deep.equal({ + [cmp.xml]: [ + { + matchedFilename: cmp.xml, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: true, + }, + ], + [cmp.content]: [ + { + matchedFilename: cmp.content, + toReplace: stringToRegex('foo'), + replaceWith: 'bar', + singleFile: true, + }, + ], + }); + }); + it('throws when env is missing'); +}); + +describe('executes replacements on a string', () => { + const matchedFilename = 'foo'; + describe('string', () => { + it('basic replacement', async () => { + expect( + await replacementIterations('ThisIsATest', [ + { matchedFilename, toReplace: stringToRegex('This'), replaceWith: 'That', singleFile: true }, + ]) + ).to.equal('ThatIsATest'); + }); + it('same replacement occuring multiple times', async () => { + expect( + await replacementIterations('ThisIsATestWithThisAndThis', [ + { matchedFilename, toReplace: stringToRegex('This'), replaceWith: 'That', singleFile: true }, + ]) + ).to.equal('ThatIsATestWithThatAndThat'); + }); + it('multiple replacements', async () => { + expect( + await replacementIterations('ThisIsATestWithThisAndThis', [ + { matchedFilename, toReplace: stringToRegex('This'), replaceWith: 'That' }, + { matchedFilename, toReplace: stringToRegex('ATest'), replaceWith: 'AnAwesomeTest' }, + ]) + ).to.equal('ThatIsAnAwesomeTestWithThatAndThat'); + }); + }); + describe('regex', () => { + it('basic replacement', async () => { + expect( + await replacementIterations('ThisIsATest', [ + { toReplace: /Is/g, replaceWith: 'IsNot', singleFile: true, matchedFilename }, + ]) + ).to.equal('ThisIsNotATest'); + }); + it('same replacement occuring multiple times', async () => { + expect( + await replacementIterations('ThisIsATestWithThisAndThis', [ + { toReplace: /s/g, replaceWith: 'S', singleFile: true, matchedFilename }, + ]) + ).to.equal('ThiSISATeStWithThiSAndThiS'); + }); + it('multiple replacements', async () => { + expect( + await replacementIterations('This Is A Test With This And This', [ + { toReplace: /^T.{2}s/, replaceWith: 'That', singleFile: false, matchedFilename }, + { toReplace: /T.{2}s$/, replaceWith: 'Stuff', singleFile: false, matchedFilename }, + ]) + ).to.equal('That Is A Test With This And Stuff'); + }); + }); + + describe('warning when no replacement happened', () => { + let warnSpy: Sinon.SinonSpy; + let emitSpy: Sinon.SinonSpy; + + beforeEach(() => { + // everything is an emit. Warn calls emit, too. + warnSpy = Sinon.spy(Lifecycle.getInstance(), 'emitWarning'); + emitSpy = Sinon.spy(Lifecycle.getInstance(), 'emit'); + }); + afterEach(() => { + warnSpy.restore(); + emitSpy.restore(); + }); + it('emits warning only when no change', async () => { + await replacementIterations('ThisIsATest', [ + { toReplace: stringToRegex('Nope'), replaceWith: 'Nah', singleFile: true, matchedFilename }, + ]); + expect(warnSpy.callCount).to.equal(1); + expect(emitSpy.callCount).to.equal(1); + }); + it('no warning when string is replaced', async () => { + await replacementIterations('ThisIsATest', [ + { toReplace: stringToRegex('Test'), replaceWith: 'SpyTest', singleFile: true, matchedFilename }, + ]); + expect(warnSpy.callCount).to.equal(0); + // because it emits the replacement event + expect(emitSpy.callCount).to.equal(1); + }); + it('no warning when no replacement but not a single file (ex: glob)', async () => { + await replacementIterations('ThisIsATest', [ + { toReplace: stringToRegex('Nope'), replaceWith: 'Nah', singleFile: false, matchedFilename }, + ]); + expect(warnSpy.callCount).to.equal(0); + expect(emitSpy.callCount).to.equal(0); + }); + }); +}); diff --git a/test/convert/streams.test.ts b/test/convert/streams.test.ts index 613abc129c..082d038ec4 100644 --- a/test/convert/streams.test.ts +++ b/test/convert/streams.test.ts @@ -44,18 +44,6 @@ class TestTransformer extends BaseMetadataTransformer { describe('Streams', () => { afterEach(() => env.restore()); - describe('ComponentReader', () => { - it('should read metadata components one at a time', async () => { - const reader = new streams.ComponentReader(COMPONENTS); - let currentIndex = 0; - for await (const component of reader) { - expect(component).to.deep.equal(COMPONENTS[currentIndex]); - currentIndex += 1; - } - expect(currentIndex).to.equal(COMPONENTS.length); - }); - }); - /** * NOTE: tests that call _transform methods must utilize Mocha.done to signal * when a test has finished and to pass on any assertion failures to the test diff --git a/test/nuts/local/replacements/replacements.nut.ts b/test/nuts/local/replacements/replacements.nut.ts new file mode 100644 index 0000000000..d109a21a79 --- /dev/null +++ b/test/nuts/local/replacements/replacements.nut.ts @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2020, salesforce.com, inc. + * All rights reserved. + * Licensed under the BSD 3-Clause license. + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +import * as path from 'path'; +import * as fs from 'fs'; +import { Open } from 'unzipper'; +import { TestSession } from '@salesforce/cli-plugins-testkit'; +import { expect } from 'chai'; +import { ComponentSetBuilder, MetadataConverter } from '../../../../src'; + +describe('e2e replacements test', () => { + let session: TestSession; + + before(async () => { + session = await TestSession.create({ + project: { + sourceDir: path.join('test', 'nuts', 'local', 'replacements', 'testProj'), + }, + authStrategy: 'NONE', + }); + // Hack: rewrite the file replacement locations relative to the project + const projectJsonPath = path.join(session.project.dir, 'sfdx-project.json'); + const original = await fs.promises.readFile(projectJsonPath, 'utf8'); + await fs.promises.writeFile( + projectJsonPath, + original + // we're putting this in a json file which doesnt like windows backslashes. The file will require posix paths + .replace( + 'replacements.txt', + path.join(session.project.dir, 'replacements.txt').split(path.sep).join(path.posix.sep) + ) + .replace('label.txt', path.join(session.project.dir, 'label.txt').split(path.sep).join(path.posix.sep)) + ); + }); + + after(async () => { + await session?.clean(); + }); + + describe('various types of replacements', () => { + it('converts a componentSet built from the testProj to a zip', async () => { + process.env.THE_REPLACEMENT = 'foo'; + const converter = new MetadataConverter(); + const cs = await ComponentSetBuilder.build({ sourcepath: [path.join(session.project.dir, 'force-app')] }); + const { zipBuffer } = await converter.convert(cs, 'metadata', { + type: 'zip', + }); + expect(zipBuffer).to.not.be.undefined; + await (await Open.buffer(zipBuffer)).extract({ path: path.join(session.project.dir, 'unzipped') }); + }); + + it('class replacements as expected', async () => { + const classContents = await fs.promises.readFile( + path.join(session.project.dir, 'unzipped', 'classes', 'replaceStuff.cls'), + 'utf8' + ); + expect(classContents).to.not.include('replaceMeWithEnv'); + expect(classContents).to.not.include('replaceMeWithFile'); + expect(classContents).to.not.include('replaceEachOfTheseValuesWithAValueFromTheEnvUsingRegex'); + expect(classContents).to.include('foo'); + expect(classContents).to.include( + (await fs.promises.readFile(path.join(session.project.dir, 'replacements.txt'), 'utf8')).trim() + ); + expect(classContents).to.include('foo'); + + expect(classContents).to.include('doNotReplaceThis'); + expect(classContents).to.not.include('conditionallyReplaceThis'); + }); + it('decomposed object replacements as expected', async () => { + const objectContents = await fs.promises.readFile( + path.join(session.project.dir, 'unzipped', 'objects', 'TestObj__c.object'), + 'utf8' + ); + expect(objectContents).to.not.include('placeholder'); + expect(objectContents).to.include('foo'); + expect(objectContents).to.include( + (await fs.promises.readFile(path.join(session.project.dir, 'label.txt'), 'utf8')).trim() + ); + }); + it('static resource object replacements as expected', async () => { + const files = ( + await Open.file(path.join(session.project.dir, 'unzipped', 'staticresources', 'Test.resource')) + ).files.filter((f) => f.type === 'File'); + + const buffers = await Promise.all(files.map(async (f) => f.buffer())); + buffers + .map((b) => b.toString()) + .map((contents) => { + expect(contents).to.not.include('placeholder'); + expect(contents).to.include('foo'); + }); + }); + }); +}); diff --git a/test/nuts/local/replacements/testProj/config/project-scratch-def.json b/test/nuts/local/replacements/testProj/config/project-scratch-def.json new file mode 100644 index 0000000000..760f654ba0 --- /dev/null +++ b/test/nuts/local/replacements/testProj/config/project-scratch-def.json @@ -0,0 +1,13 @@ +{ + "orgName": "shane.mclaughlin company", + "edition": "Developer", + "features": ["EnableSetPasswordInApi"], + "settings": { + "lightningExperienceSettings": { + "enableS1DesktopEnabled": true + }, + "mobileSettings": { + "enableS1EncryptedStoragePref2": false + } + } +} diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls b/test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls new file mode 100644 index 0000000000..b45f737881 --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls @@ -0,0 +1,14 @@ +public with sharing class replaceStuff { + // replaceMeWithEnv + // replaceMeWithFile + + // replaceEachOfTheseValuesWithAValueFromTheEnvUsingRegex + // replaceEachOfTheseValuesWithAValueFromTheEnvUsingRegex + // replaceEachOfTheseValuesWithAValueFromTheEnvUsingRegex + + // doNotReplaceThis + // conditionallyReplaceThis + public replaceStuff() { + + } +} diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls-meta.xml b/test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls-meta.xml new file mode 100644 index 0000000000..4b0bc9f387 --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/classes/replaceStuff.cls-meta.xml @@ -0,0 +1,5 @@ + + + 55.0 + Active + diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/TestObj__c.object-meta.xml b/test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/TestObj__c.object-meta.xml new file mode 100644 index 0000000000..5d5498adb5 --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/TestObj__c.object-meta.xml @@ -0,0 +1,21 @@ + + + true + true + true + true + true + true + true + true + TestObjs + TestObj__c + placeholder + ReadWrite + + Text + + + Deployed + + diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/fields/FieldA__c.field-meta.xml b/test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/fields/FieldA__c.field-meta.xml new file mode 100644 index 0000000000..6f0dc262d5 --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/objects/TestObj__c/fields/FieldA__c.field-meta.xml @@ -0,0 +1,12 @@ + + + FieldA__c + Text + 255 + placeholder + + false + false + Public + + diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test.resource-meta.xml b/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test.resource-meta.xml new file mode 100644 index 0000000000..2d18c2446f --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test.resource-meta.xml @@ -0,0 +1,7 @@ + + + Private + application/zip + added from sfdx plugin + Test + \ No newline at end of file diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/folder/test2.css b/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/folder/test2.css new file mode 100644 index 0000000000..d053e6fda2 --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/folder/test2.css @@ -0,0 +1 @@ +/* This is a placeholder */ diff --git a/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/test.css b/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/test.css new file mode 100644 index 0000000000..d053e6fda2 --- /dev/null +++ b/test/nuts/local/replacements/testProj/force-app/main/default/staticresources/Test/test.css @@ -0,0 +1 @@ +/* This is a placeholder */ diff --git a/test/nuts/local/replacements/testProj/label.txt b/test/nuts/local/replacements/testProj/label.txt new file mode 100644 index 0000000000..d82154b7ef --- /dev/null +++ b/test/nuts/local/replacements/testProj/label.txt @@ -0,0 +1 @@ + diff --git a/test/nuts/local/replacements/testProj/replacements.txt b/test/nuts/local/replacements/testProj/replacements.txt new file mode 100644 index 0000000000..91f2add8fc --- /dev/null +++ b/test/nuts/local/replacements/testProj/replacements.txt @@ -0,0 +1 @@ +this is a longer comment that's going to replace something in a metadata file diff --git a/test/nuts/local/replacements/testProj/sfdx-project.json b/test/nuts/local/replacements/testProj/sfdx-project.json new file mode 100644 index 0000000000..30a894d891 --- /dev/null +++ b/test/nuts/local/replacements/testProj/sfdx-project.json @@ -0,0 +1,66 @@ +{ + "packageDirectories": [ + { + "path": "force-app", + "default": true + } + ], + "name": "replacementTest", + "namespace": "", + "sfdcLoginUrl": "https://login.salesforce.com", + "sourceApiVersion": "55.0", + "replacements": [ + { + "glob": "force-app/**/*.cls", + "stringToReplace": "replaceMeWithEnv", + "replaceWithEnv": "THE_REPLACEMENT" + }, + { + "glob": "force-app/**/*.cls", + "stringToReplace": "doNotReplaceThis", + "replaceWithEnv": "THE_REPLACEMENT", + "replaceWhenEnv": [ + { + "env": "THE_REPLACEMENT", + "value": "bar" + } + ] + }, + { + "glob": "force-app/**/*.cls", + "stringToReplace": "conditionallyReplaceThis", + "replaceWithEnv": "THE_REPLACEMENT", + "replaceWhenEnv": [ + { + "env": "THE_REPLACEMENT", + "value": "foo" + } + ] + }, + { + "glob": "force-app/**/*.cls", + "stringToReplace": "replaceMeWithFile", + "replaceWithFile": "replacements.txt" + }, + { + "glob": "force-app/**/*.cls", + "regexToReplace": "\\b.*Regex", + "replaceWithEnv": "THE_REPLACEMENT" + }, + { + "glob": "force-app/main/default/objects/**/*", + "stringToReplace": "placeholder", + "replaceWithEnv": "THE_REPLACEMENT" + }, + { + "glob": "**/*.field-meta.xml", + "stringToReplace": "", + "replaceWithFile": "label.txt" + }, + { + "glob": "**/*.css", + "stringToReplace": "placeholder", + "replaceWithEnv": "THE_REPLACEMENT" + } + ] +} diff --git a/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClassesOneDir.json b/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/eda.json similarity index 53% rename from test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClassesOneDir.json rename to test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/eda.json index 7da90fd399..1658a7e458 100644 --- a/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClassesOneDir.json +++ b/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/eda.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 648.0908780097961 + "duration": 493.49703399837017 }, { "name": "sourceToMdapi", - "duration": 12539.2236790061 + "duration": 5436.327586993575 }, { "name": "sourceToZip", - "duration": 8442.249673008919 + "duration": 4556.773661002517 }, { "name": "mdapiToSource", - "duration": 12450.891524016857 + "duration": 4874.957931995392 } -] +] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClasses.json b/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClasses.json similarity index 53% rename from test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClasses.json rename to test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClasses.json index 5a2e458616..a06d81e129 100644 --- a/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/lotsOfClasses.json +++ b/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClasses.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 388.5986630022526 + "duration": 433.693730995059 }, { "name": "sourceToMdapi", - "duration": 8307.766464978456 + "duration": 8342.872605994344 }, { "name": "sourceToZip", - "duration": 6753.930882006884 + "duration": 7030.860814988613 }, { "name": "mdapiToSource", - "duration": 7472.047949999571 + "duration": 5282.1073610037565 } -] +] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/eda.json b/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClassesOneDir.json similarity index 52% rename from test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/eda.json rename to test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClassesOneDir.json index 2527931b59..e535353bc8 100644 --- a/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU--2-40GHz/eda.json +++ b/test/nuts/perfResults/x64-darwin-16xIntel-Core-i9-9980HK-CPU-2-40GHz/lotsOfClassesOneDir.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 492.98764300346375 + "duration": 760.8551979959011 }, { "name": "sourceToMdapi", - "duration": 5277.533275008202 + "duration": 15508.725342988968 }, { "name": "sourceToZip", - "duration": 3976.5643639862537 + "duration": 11001.649205997586 }, { "name": "mdapiToSource", - "duration": 7064.533327996731 + "duration": 10145.04680301249 } -] +] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/eda.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/eda.json index 1b43fc0144..10323c33c2 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/eda.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/eda.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 328.08031600000686 + "duration": 356.4171700000006 }, { "name": "sourceToMdapi", - "duration": 7152.719090000028 + "duration": 7073.228318999987 }, { "name": "sourceToZip", - "duration": 6235.375627000001 + "duration": 6332.181806000008 }, { "name": "mdapiToSource", - "duration": 5907.137515000009 + "duration": 6008.502924 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClasses.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClasses.json index d9990b0787..d1d4cdf8c7 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClasses.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClasses.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 666.5169450000394 + "duration": 679.544412999996 }, { "name": "sourceToMdapi", - "duration": 12308.666303000005 + "duration": 13732.503479000006 }, { "name": "sourceToZip", - "duration": 9254.614367999951 + "duration": 10400.92113599999 }, { "name": "mdapiToSource", - "duration": 10135.46397099999 + "duration": 8115.602960999997 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClassesOneDir.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClassesOneDir.json index aa6f4e3bca..19c86c028c 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClassesOneDir.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v3-2-40GHz/lotsOfClassesOneDir.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 1145.7118510000291 + "duration": 1184.3568459999806 }, { "name": "sourceToMdapi", - "duration": 17694.542747 + "duration": 17854.442958 }, { "name": "sourceToZip", - "duration": 15521.322923999978 + "duration": 17075.238639000017 }, { "name": "mdapiToSource", - "duration": 13688.250294000027 + "duration": 14847.72293400002 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/eda.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/eda.json index d9c9c02db7..8bf5d980a7 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/eda.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/eda.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 329.62463900004514 + "duration": 324.71387500001583 }, { "name": "sourceToMdapi", - "duration": 7505.23844700004 + "duration": 7122.961764000007 }, { "name": "sourceToZip", - "duration": 6470.010548999999 + "duration": 6274.939941999997 }, { "name": "mdapiToSource", - "duration": 5877.771837000037 + "duration": 5758.478280999989 } -] +] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClasses.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClasses.json index 89695e54d3..1a280577eb 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClasses.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClasses.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 644.1402209999505 + "duration": 651.9775850000151 }, { "name": "sourceToMdapi", - "duration": 11856.592849999957 + "duration": 10889.311952999997 }, { "name": "sourceToZip", - "duration": 10338.099250999978 + "duration": 9392.743359000015 }, { "name": "mdapiToSource", - "duration": 8291.250971000001 + "duration": 7095.85508899999 } -] +] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClassesOneDir.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClassesOneDir.json index da21a2c969..a41ec885e7 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClassesOneDir.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-CPU-E5-2673-v4-2-30GHz/lotsOfClassesOneDir.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 1090.1220250000479 + "duration": 1033.0352329999732 }, { "name": "sourceToMdapi", - "duration": 17970.10952900001 + "duration": 16734.752318999992 }, { "name": "sourceToZip", - "duration": 15943.596995999978 + "duration": 15027.624977999978 }, { "name": "mdapiToSource", - "duration": 14545.061661000014 + "duration": 12297.485982999991 } -] +] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/eda.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/eda.json index 02f05fb8c1..5cb5d8eac3 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/eda.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/eda.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 256.72601899999427 + "duration": 258.7272329999978 }, { "name": "sourceToMdapi", - "duration": 6796.90185200001 + "duration": 7064.5000979999895 }, { "name": "sourceToZip", - "duration": 5362.048718999984 + "duration": 5043.188347000003 }, { "name": "mdapiToSource", - "duration": 5302.37870999999 + "duration": 4514.134340000019 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClasses.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClasses.json index 3130730968..3bb8d75dad 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClasses.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClasses.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 552.7289729999902 + "duration": 479.82419299997855 }, { "name": "sourceToMdapi", - "duration": 11819.851255999994 + "duration": 8941.918268000009 }, { "name": "sourceToZip", - "duration": 8429.234218000027 + "duration": 10533.41592499998 }, { "name": "mdapiToSource", - "duration": 6296.675203000021 + "duration": 5452.256045999995 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClassesOneDir.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClassesOneDir.json index 0d2a950086..44ddb1dd6e 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClassesOneDir.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8171M-CPU-2-60GHz/lotsOfClassesOneDir.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 946.1013220000314 + "duration": 830.0290470000182 }, { "name": "sourceToMdapi", - "duration": 15312.147095999971 + "duration": 12821.065571999992 }, { "name": "sourceToZip", - "duration": 13901.202661000018 + "duration": 11678.985925000015 }, { "name": "mdapiToSource", - "duration": 12138.095666000037 + "duration": 13113.897208999988 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClasses.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClasses.json index 36ee947ad5..e36fe8fc68 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClasses.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClasses.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 411.68149899999844 + "duration": 470.6312699999835 }, { "name": "sourceToMdapi", - "duration": 8466.343819000002 + "duration": 7751.746233000013 }, { "name": "sourceToZip", - "duration": 6411.866650000011 + "duration": 6782.484058000002 }, { "name": "mdapiToSource", - "duration": 5187.5936060000095 + "duration": 4625.043036999996 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClassesOneDir.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClassesOneDir.json index 008046be09..b0a92e7c9d 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClassesOneDir.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8272CL-CPU-2-60GHz/lotsOfClassesOneDir.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 734.0428379999939 + "duration": 729.9692550000036 }, { "name": "sourceToMdapi", - "duration": 13081.072366000008 + "duration": 9952.340448999981 }, { "name": "sourceToZip", - "duration": 10285.210365000006 + "duration": 10424.467409000004 }, { "name": "mdapiToSource", - "duration": 12955.521213 + "duration": 7663.856468000013 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/eda.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/eda.json index 930d69966d..384e83d52f 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/eda.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/eda.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 198.5818020000006 + "duration": 199.40888699999778 }, { "name": "sourceToMdapi", - "duration": 4836.452738000022 + "duration": 5154.8034650000045 }, { "name": "sourceToZip", - "duration": 3890.752925000037 + "duration": 4552.947791000013 }, { "name": "mdapiToSource", - "duration": 3206.40159199998 + "duration": 3261.958102000004 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClasses.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClasses.json index 7792311d70..a379e75f44 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClasses.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClasses.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 375.96476499998244 + "duration": 389.6657969999942 }, { "name": "sourceToMdapi", - "duration": 7423.678190000006 + "duration": 7406.009941000026 }, { "name": "sourceToZip", - "duration": 6435.257741000038 + "duration": 9112.750708999985 }, { "name": "mdapiToSource", - "duration": 4012.0586630000034 + "duration": 3721.0864500000025 } ] \ No newline at end of file diff --git a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClassesOneDir.json b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClassesOneDir.json index ff4e1f1dd8..50502c68d2 100644 --- a/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClassesOneDir.json +++ b/test/nuts/perfResults/x64-linux-2xIntel-Xeon-Platinum-8370C-CPU-2-80GHz/lotsOfClassesOneDir.json @@ -1,18 +1,18 @@ [ { "name": "componentSetCreate", - "duration": 697.4607039999682 + "duration": 676.9866699999548 }, { "name": "sourceToMdapi", - "duration": 10567.754053000011 + "duration": 10331.149006000021 }, { "name": "sourceToZip", - "duration": 8675.471619000018 + "duration": 8735.021562999988 }, { "name": "mdapiToSource", - "duration": 7082.036690000037 + "duration": 7454.696846999985 } ] \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index fbed5b78df..b7bc21c4cf 100644 --- a/yarn.lock +++ b/yarn.lock @@ -943,6 +943,11 @@ resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== +"@types/minimatch@^5.1.2": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== + "@types/minimist@^1.2.0": version "1.2.2" resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c"