diff --git a/METADATA_SUPPORT.md b/METADATA_SUPPORT.md index edd21f2e11..1d5eea3226 100644 --- a/METADATA_SUPPORT.md +++ b/METADATA_SUPPORT.md @@ -488,9 +488,11 @@ v55 introduces the following new types. Here's their current level of support |ExternalDataSrcDescriptor|❌|Not supported, but support could be added| |ExternalDataTranField|❌|Not supported, but support could be added| |ExternalDataTranObject|❌|Not supported, but support could be added| +|FavoriteTransferDestination|❌|Not supported, but support could be added| |IndustriesAutomotiveSettings|✅|| |IndustriesMfgServiceSettings|✅|| |InvLatePymntRiskCalcSettings|✅|| +|LiveChatObjectAccessDefinition|❌|Not supported, but support could be added| |PaymentsManagementEnabledSettings|✅|| |RegisteredExternalService|❌|Not supported, but support could be added| |StreamingAppDataConnector|❌|Not supported, but support could be added| diff --git a/src/convert/streams.ts b/src/convert/streams.ts index b9416b71b9..2723f057b9 100644 --- a/src/convert/streams.ts +++ b/src/convert/streams.ts @@ -5,7 +5,7 @@ * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import { basename, dirname, isAbsolute, join } from 'path'; -import { pipeline as cbPipeline, Readable, Transform, Writable, Stream } from 'stream'; +import { pipeline as cbPipeline, Readable, Stream, Transform, Writable } from 'stream'; import { promisify } from 'util'; import { Archiver, create as createArchive } from 'archiver'; import { createWriteStream, existsSync } from 'graceful-fs'; @@ -13,11 +13,11 @@ import { JsonMap } from '@salesforce/ts-types'; import { j2xParser } from 'fast-xml-parser'; import { Logger } from '@salesforce/core'; import { MetadataResolver, SourceComponent } from '../resolve'; -import { ensureFileExists } from '../utils/fileSystemHandler'; import { SourcePath, XML_DECL } from '../common'; import { ComponentSet } from '../collections'; import { LibraryError } from '../errors'; import { RegistryAccess } from '../registry'; +import { ensureFileExists } from '../utils/fileSystemHandler'; import { MetadataTransformerFactory } from './transformers'; import { ConvertContext } from './convertContext'; import { SfdxFileFormat, WriteInfo, WriterFormat } from './types'; @@ -92,6 +92,7 @@ export class ComponentConverter extends Transform { try { const converts: Array> = []; const transformer = this.transformerFactory.getTransformer(chunk); + transformer.defaultDirectory = this.defaultDirectory; const mergeWith = this.mergeSet?.getSourceComponents(chunk); switch (this.targetFormat) { case 'source': @@ -163,43 +164,43 @@ export class StandardWriter extends ComponentWriter { if (chunk.writeInfos.length !== 0) { try { const toResolve: string[] = []; - const writeTasks = chunk.writeInfos.map((info: WriteInfo) => { - const fullDest = isAbsolute(info.output) ? info.output : join(this.rootDestination, info.output); - if (!existsSync(fullDest)) { - for (const ignoredPath of this.forceIgnoredPaths) { - if ( - dirname(ignoredPath).includes(dirname(fullDest)) && - basename(ignoredPath).includes(basename(fullDest)) - ) { - return; + // it is a reasonable expectation that when a conversion call exits, the files of + // every component has been written to the destination. This await ensures the microtask + // queue is empty when that call exits and overall less memory is consumed. + await Promise.all( + chunk.writeInfos.map((info: WriteInfo) => { + const fullDest = isAbsolute(info.output) ? info.output : join(this.rootDestination, info.output); + if (!existsSync(fullDest)) { + for (const ignoredPath of this.forceIgnoredPaths) { + if ( + dirname(ignoredPath).includes(dirname(fullDest)) && + basename(ignoredPath).includes(basename(fullDest)) + ) { + return; + } } } - } - if (this.forceIgnoredPaths.has(fullDest)) { - return; - } - // if there are children, resolve each file. o/w just pick one of the files to resolve - if (toResolve.length === 0 || chunk.component.type.children) { - // This is a workaround for a server side ListViews bug where - // duplicate components are sent. W-9614275 - if (toResolve.includes(fullDest)) { - this.logger.debug(`Ignoring duplicate metadata for: ${fullDest}`); + if (this.forceIgnoredPaths.has(fullDest)) { return; } - toResolve.push(fullDest); - } - ensureFileExists(fullDest); - return pipeline(info.source, createWriteStream(fullDest)); - }); - - // it is a reasonable expectation that when a conversion call exits, the files of - // every component has been written to the destination. This await ensures the microtask - // queue is empty when that call exits and overall less memory is consumed. - await Promise.all(writeTasks); + // if there are children, resolve each file. o/w just pick one of the files to resolve + if (toResolve.length === 0 || chunk.component.type.children) { + // This is a workaround for a server side ListViews bug where + // duplicate components are sent. W-9614275 + if (toResolve.includes(fullDest)) { + this.logger.debug(`Ignoring duplicate metadata for: ${fullDest}`); + return; + } + toResolve.push(fullDest); + } + ensureFileExists(fullDest); + return pipeline(info.source, createWriteStream(fullDest)); + }) + ); - for (const fsPath of toResolve) { + toResolve.map((fsPath) => { this.converted.push(...this.resolver.getComponentsFromPath(fsPath)); - } + }); } catch (e) { err = e as Error; } diff --git a/src/convert/transformers/baseMetadataTransformer.ts b/src/convert/transformers/baseMetadataTransformer.ts index d444195500..59656b8a3a 100644 --- a/src/convert/transformers/baseMetadataTransformer.ts +++ b/src/convert/transformers/baseMetadataTransformer.ts @@ -11,6 +11,7 @@ import { RegistryAccess } from '../../registry'; export abstract class BaseMetadataTransformer implements MetadataTransformer { public readonly context: ConvertContext; + public defaultDirectory?: string; protected registry: RegistryAccess; public constructor(registry = new RegistryAccess(), context = new ConvertContext()) { diff --git a/src/convert/transformers/staticResourceMetadataTransformer.ts b/src/convert/transformers/staticResourceMetadataTransformer.ts index 1508863c56..2db82444ec 100644 --- a/src/convert/transformers/staticResourceMetadataTransformer.ts +++ b/src/convert/transformers/staticResourceMetadataTransformer.ts @@ -4,17 +4,20 @@ * Licensed under the BSD 3-Clause license. * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import { basename, dirname, join } from 'path'; +import { basename, dirname, isAbsolute, join } from 'path'; import { Readable } from 'stream'; import { create as createArchive } from 'archiver'; import { getExtension } from 'mime'; import { Open } from 'unzipper'; import { JsonMap } from '@salesforce/ts-types'; +import { createWriteStream } from 'graceful-fs'; import { baseName } from '../../utils'; import { WriteInfo } from '..'; import { LibraryError } from '../../errors'; import { SourceComponent } from '../../resolve'; import { SourcePath } from '../../common'; +import { ensureFileExists } from '../../utils/fileSystemHandler'; +import { pipeline } from '../streams'; import { BaseMetadataTransformer } from './baseMetadataTransformer'; export class StaticResourceMetadataTransformer extends BaseMetadataTransformer { @@ -63,38 +66,65 @@ export class StaticResourceMetadataTransformer extends BaseMetadataTransformer { public async toSourceFormat(component: SourceComponent, mergeWith?: SourceComponent): Promise { const { xml, content } = component; - const writeInfos: WriteInfo[] = []; - - if (content) { - const componentContentType = await this.getContentType(component); - const mergeContentPath = mergeWith?.content; - const baseContentPath = this.getBaseContentPath(component, mergeWith); - - // only unzip an archive component if there isn't a merge component, or the merge component is itself expanded - const shouldUnzipArchive = - StaticResourceMetadataTransformer.ARCHIVE_MIME_TYPES.has(componentContentType) && - (!mergeWith || mergeWith.tree.isDirectory(mergeContentPath)); - - if (shouldUnzipArchive) { - const zipBuffer = await component.tree.readFile(content); - for await (const info of this.createWriteInfosFromArchive(zipBuffer, baseContentPath)) { - writeInfos.push(info); - } - } else { - const extension = this.getExtensionFromType(componentContentType); - writeInfos.push({ - source: component.tree.stream(content), - output: `${baseContentPath}.${extension}`, - }); - } - writeInfos.push({ + if (!content) { + return []; + } + const componentContentType = await this.getContentType(component); + const mergeContentPath = mergeWith?.content; + const baseContentPath = this.getBaseContentPath(component, mergeWith); + + // only unzip an archive component if there isn't a merge component, or the merge component is itself expanded + const shouldUnzipArchive = + StaticResourceMetadataTransformer.ARCHIVE_MIME_TYPES.has(componentContentType) && + (!mergeWith || mergeWith.tree.isDirectory(mergeContentPath)); + + if (shouldUnzipArchive) { + // for the bulk of static resource writing we'll start writing ASAP + // we'll still defer writing the resource-meta.xml file by pushing it onto the writeInfos + await Promise.all( + ( + await Open.buffer(await component.tree.readFile(content)) + ).files + .filter((f) => f.type === 'File') + .map(async (f) => { + const path = join(baseContentPath, f.path); + const fullDest = isAbsolute(path) + ? path + : join(this.defaultDirectory || component.getPackageRelativePath('', 'source'), path); + // push onto the pipeline and start writing now + return this.pipeline(f.stream(), fullDest); + }) + ); + } + return [ + { source: component.tree.stream(xml), output: mergeWith?.xml || component.getPackageRelativePath(basename(xml), 'source'), - }); - } + }, + ].concat( + shouldUnzipArchive + ? [] + : [ + { + source: component.tree.stream(content), + output: `${baseContentPath}.${this.getExtensionFromType(componentContentType)}`, + }, + ] + ); + } - return writeInfos; + /** + * Only separated into its own method for unit testing purposes + * I was unable to find a way to stub/spy a pipline() call + * + * @param stream the data to be written + * @param destination the destination path to be written + * @private + */ + private async pipeline(stream: Readable, destination: string): Promise { + ensureFileExists(destination); + await pipeline(stream, createWriteStream(destination)); } private getBaseContentPath(component: SourceComponent, mergeWith?: SourceComponent): SourcePath { @@ -118,18 +148,6 @@ export class StaticResourceMetadataTransformer extends BaseMetadataTransformer { return false; } - private async *createWriteInfosFromArchive(zipBuffer: Buffer, baseDir: string): AsyncIterable { - const directory = await Open.buffer(zipBuffer); - for (const entry of directory.files) { - if (entry.type === 'File') { - yield { - source: entry.stream(), - output: join(baseDir, entry.path), - }; - } - } - } - private async getContentType(component: SourceComponent): Promise { const resource = (await component.parseXml()).StaticResource as JsonMap; diff --git a/src/convert/types.ts b/src/convert/types.ts index 377082a9d8..d63e6d28f8 100644 --- a/src/convert/types.ts +++ b/src/convert/types.ts @@ -71,6 +71,7 @@ export type MergeConfig = { * Transforms metadata component files into different SFDX file formats */ export interface MetadataTransformer { + defaultDirectory?: string; toMetadataFormat(component: SourceComponent): Promise; toSourceFormat(component: SourceComponent, mergeWith?: SourceComponent): Promise; } diff --git a/test/convert/transformers/staticResourceMetadataTransformer.test.ts b/test/convert/transformers/staticResourceMetadataTransformer.test.ts index 3e41e64b79..d25056661e 100644 --- a/test/convert/transformers/staticResourceMetadataTransformer.test.ts +++ b/test/convert/transformers/staticResourceMetadataTransformer.test.ts @@ -5,10 +5,13 @@ * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import { basename, join } from 'path'; +import deepEqualInAnyOrder = require('deep-equal-in-any-order'); + import * as archiver from 'archiver'; import { expect } from 'chai'; import { createSandbox } from 'sinon'; import { CentralDirectory, Entry, Open } from 'unzipper'; +import chai = require('chai'); import { registry, SourceComponent, VirtualTreeContainer, WriteInfo } from '../../../src'; import { StaticResourceMetadataTransformer } from '../../../src/convert/transformers/staticResourceMetadataTransformer'; import { LibraryError } from '../../../src/errors'; @@ -22,14 +25,20 @@ import { import { TestReadable } from '../../mock/convert/readables'; import { DEFAULT_PACKAGE_ROOT_SFDX } from '../../../src/common'; +chai.use(deepEqualInAnyOrder); + const env = createSandbox(); describe('StaticResourceMetadataTransformer', () => { const transformer = new StaticResourceMetadataTransformer(); + transformer.defaultDirectory = 'test'; + let pipelineStub; - beforeEach(() => - env.stub(VirtualTreeContainer.prototype, 'stream').callsFake((fsPath: string) => new TestReadable(fsPath)) - ); + beforeEach(() => { + env.stub(VirtualTreeContainer.prototype, 'stream').callsFake((fsPath: string) => new TestReadable(fsPath)); + // @ts-ignore private method stub + pipelineStub = env.stub(transformer, 'pipeline').resolves(); + }); afterEach(() => env.restore()); @@ -119,7 +128,7 @@ describe('StaticResourceMetadataTransformer', () => { try { await transformer.toMetadataFormat(component); } catch (e) { - expect(e.message).to.equal( + expect(e.message).to.deep.equalInAnyOrder( nls.localize('error_static_resource_missing_resource_file', [join('staticresources', component.name)]) ); } @@ -166,7 +175,7 @@ describe('StaticResourceMetadataTransformer', () => { }, ]; - expect(await transformer.toSourceFormat(component)).to.deep.equal(expectedInfos); + expect(await transformer.toSourceFormat(component)).to.deep.equalInAnyOrder(expectedInfos); }); it('should rename extension from .resource for a fallback mime extension', async () => { @@ -189,7 +198,7 @@ describe('StaticResourceMetadataTransformer', () => { }, ]; - expect(await transformer.toSourceFormat(component)).to.deep.equal(expectedInfos); + expect(await transformer.toSourceFormat(component)).to.deep.equalInAnyOrder(expectedInfos); }); it('should rename extension from .resource for an unsupported mime extension', async () => { @@ -212,7 +221,7 @@ describe('StaticResourceMetadataTransformer', () => { }, ]; - expect(await transformer.toSourceFormat(component)).to.deep.equal(expectedInfos); + expect(await transformer.toSourceFormat(component)).to.deep.equalInAnyOrder(expectedInfos); }); it('should ignore components without content', async () => { @@ -232,23 +241,24 @@ describe('StaticResourceMetadataTransformer', () => { }); env.stub(Open, 'buffer').resolves(mockCentralDirectory); const expectedInfos: WriteInfo[] = [ - { - source: null, - output: join( - DEFAULT_PACKAGE_ROOT_SFDX, - type.directoryName, - mixedContentSingleFile.COMPONENT_NAMES[0], - 'b', - 'c.css' - ), - }, { source: component.tree.stream(xml), output: join(DEFAULT_PACKAGE_ROOT_SFDX, type.directoryName, basename(xml)), }, ]; - expect(await transformer.toSourceFormat(component)).to.deep.equal(expectedInfos); + expect(await transformer.toSourceFormat(component)).to.deep.equalInAnyOrder(expectedInfos); + expect(pipelineStub.callCount).to.equal(1); + expect(pipelineStub.firstCall.args[1]).to.equal( + join( + transformer.defaultDirectory, + DEFAULT_PACKAGE_ROOT_SFDX, + type.directoryName, + mixedContentSingleFile.COMPONENT_NAMES[0], + 'b', + 'c.css' + ) + ); }); it('should work well for null contentType', async () => { @@ -270,7 +280,7 @@ describe('StaticResourceMetadataTransformer', () => { }, ]; - expect(await transformer.toSourceFormat(component)).to.deep.equal(expectedInfos); + expect(await transformer.toSourceFormat(component)).to.deep.equalInAnyOrder(expectedInfos); }); it('should merge output with merge component when content is archive', async () => { @@ -301,10 +311,6 @@ describe('StaticResourceMetadataTransformer', () => { }); env.stub(Open, 'buffer').resolves(mockCentralDirectory); const expectedInfos: WriteInfo[] = [ - { - source: null, - output: join(mergeComponent.content, 'b', 'c.css'), - }, { source: component.tree.stream(component.xml), output: mergeComponent.xml, @@ -312,6 +318,10 @@ describe('StaticResourceMetadataTransformer', () => { ]; expect(await transformer.toSourceFormat(component, mergeComponent)).to.deep.equal(expectedInfos); + expect(pipelineStub.callCount).to.equal(1); + expect(pipelineStub.firstCall.args[1]).to.deep.equal( + join(transformer.defaultDirectory, mergeComponent.content, 'b', 'c.css') + ); }); it('should merge output with merge component when content is single file', async () => { @@ -351,7 +361,7 @@ describe('StaticResourceMetadataTransformer', () => { }, ]; - expect(await transformer.toSourceFormat(component, mergeComponent)).to.deep.equal(expectedInfos); + expect(await transformer.toSourceFormat(component, mergeComponent)).to.deep.equalInAnyOrder(expectedInfos); }); }); });