diff --git a/Classes/Archival/Archive.swift b/Classes/Archival/Archive.swift new file mode 100644 index 000000000..2c145d127 --- /dev/null +++ b/Classes/Archival/Archive.swift @@ -0,0 +1,52 @@ +// +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. +// + +/// The class containing an image or video and associated data (an encoded representation of the edits). +class Archive: NSObject, NSSecureCoding { + static var supportsSecureCoding: Bool = true + + let image: UIImage? + let video: URL? + let data: Data? + + init(image: UIImage, data: Data?) { + self.image = image + self.data = data + self.video = nil + } + + init(video: URL, data: Data?) { + self.video = video + self.data = data + self.image = nil + } + + private enum CodingKeys: String { + case image + case video + case data + } + + func encode(with coder: NSCoder) { + coder.encode(image, forKey: CodingKeys.image.rawValue) + coder.encode(video?.absoluteString, forKey: CodingKeys.video.rawValue) + coder.encode(data?.base64EncodedString(), forKey: CodingKeys.data.rawValue) + } + + required init?(coder: NSCoder) { + image = coder.decodeObject(of: UIImage.self, forKey: CodingKeys.image.rawValue) + if let urlString = coder.decodeObject(of: NSString.self, forKey: CodingKeys.video.rawValue) as String? { + video = URL(string: urlString) + } else { + video = nil + } + if let dataString = coder.decodeObject(of: NSString.self, forKey: CodingKeys.data.rawValue) as String? { + data = Data(base64Encoded: dataString) + } else { + data = nil + } + } +} diff --git a/Classes/Camera/CameraController.swift b/Classes/Camera/CameraController.swift index 2c828bd5b..5d7ba51c8 100644 --- a/Classes/Camera/CameraController.swift +++ b/Classes/Camera/CameraController.swift @@ -14,34 +14,39 @@ public struct KanvasMedia { public let output: URL public let info: MediaInfo public let size: CGSize + public let archive: URL? public let type: MediaType init(unmodified: URL?, output: URL, info: MediaInfo, size: CGSize, + archive: URL?, type: MediaType) { self.unmodified = unmodified self.output = output self.info = info self.size = size + self.archive = archive self.type = type } - init(asset: AVURLAsset, original: URL?, info: MediaInfo) { + init(asset: AVURLAsset, original: URL?, info: MediaInfo, archive: URL?) { self.init(unmodified: original, output: asset.url, info: info, size: asset.videoScreenSize ?? .zero, + archive: archive, type: .video ) } - init(image: UIImage, url: URL, original: URL?, info: MediaInfo) { + init(image: UIImage, url: URL, original: URL?, info: MediaInfo, archive: URL?) { self.init(unmodified: original, output: url, info: info, size: image.size, + archive: archive, type: .image ) } @@ -87,7 +92,7 @@ public protocol CameraControllerDelegate: class { func tagButtonPressed() /// Called when the editor is dismissed - func editorDismissed() + func editorDismissed(_ cameraController: CameraController) /// Called after the welcome tooltip is dismissed func didDismissWelcomeTooltip() @@ -133,14 +138,10 @@ public protocol CameraControllerDelegate: class { } // A controller that contains and layouts all camera handling views and controllers (mode selector, input, etc). -public class CameraController: UIViewController, MediaClipsEditorDelegate, CameraPreviewControllerDelegate, EditorControllerDelegate, CameraZoomHandlerDelegate, OptionsControllerDelegate, ModeSelectorAndShootControllerDelegate, CameraViewDelegate, CameraInputControllerDelegate, FilterSettingsControllerDelegate, CameraPermissionsViewControllerDelegate, KanvasMediaPickerViewControllerDelegate, MediaPickerThumbnailFetcherDelegate, MultiEditorComposerDelegate { - public func show(media: [(CameraSegment, Data?)]) { - showPreview = true - self.segments = media.map({ return $0.0 }) +open class CameraController: UIViewController, MediaClipsEditorDelegate, CameraPreviewControllerDelegate, EditorControllerDelegate, CameraZoomHandlerDelegate, OptionsControllerDelegate, ModeSelectorAndShootControllerDelegate, CameraViewDelegate, CameraInputControllerDelegate, FilterSettingsControllerDelegate, CameraPermissionsViewControllerDelegate, KanvasMediaPickerViewControllerDelegate, MediaPickerThumbnailFetcherDelegate, MultiEditorComposerDelegate { - if view.superview != nil { - showPreviewWithSegments(segments, selected: segments.startIndex, edits: nil, animated: false) - } + enum ArchiveErrors: Error { + case unknownMedia } public func hideLoading() { @@ -173,6 +174,8 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer controller.delegate = self return controller }() + + private var clips = [MediaClip]() private lazy var cameraInputController: CameraInputController = { let controller = CameraInputController(settings: self.settings, recorderClass: self.recorderClass, segmentsHandler: self.segmentsHandler, delegate: self) @@ -330,7 +333,7 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer return } if segments.isEmpty == false && showPreview { - showPreviewWithSegments(segments, selected: segments.startIndex, animated: false) + showPreviewWithSegments(segments, selected: segments.startIndex, edits: edits, animated: false) showPreview = false } if delegate?.cameraShouldShowWelcomeTooltip() == true && cameraPermissionsViewController.hasFullAccess() { @@ -339,7 +342,9 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer } // MARK: - navigation + private var segments: [CameraSegment] = [] + private var edits: [Data?]? private var showPreview: Bool = false private func showPreviewWithSegments(_ segments: [CameraSegment], selected: Array.Index, edits: [Data?]? = nil, animated: Bool = true) { @@ -366,7 +371,8 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer multiEditorViewController = controller as? MultiEditorViewController } else if settings.features.editor { - controller = createEditorViewController(segments, selected: selected) + let existing = existingEditor + controller = existing ?? createEditorViewController(segments, selected: selected, cache: nil) } else { controller = createPreviewViewController(segments) @@ -376,7 +382,7 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer return controller } - private func createEditorViewController(_ segments: [CameraSegment], selected: Array.Index) -> EditorViewController { + private func createEditorViewController(_ segments: [CameraSegment], selected: Array.Index, canvas: MovableViewCanvas? = nil, drawing: IgnoreTouchesView? = nil, cache: NSCache?) -> EditorViewController { let controller = EditorViewController(settings: settings, segments: segments, assetsHandler: segmentsHandler, @@ -386,14 +392,30 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer stickerProvider: stickerProvider, analyticsProvider: analyticsProvider, quickBlogSelectorCoordinator: quickBlogSelectorCoordinator, - tagCollection: tagCollection) + canvas: canvas, + tagCollection: tagCollection, + cache: cache) controller.delegate = self + canvas?.delegate = controller.editorView return controller } + private func frames(segments: [CameraSegment], edits: [Data?]?) -> [MultiEditorViewController.Frame] { + if let edits = edits { + return zip(segments, edits).map { (segment, data) in + return MultiEditorViewController.Frame(segment: segment, edit: MultiEditorViewController.Edit(data: data, cache: nil)) + } + } else { + return segments.map({ segment in + return MultiEditorViewController.Frame(segment: segment, edit: nil) + }) + } + } + private func createStoryViewController(_ segments: [CameraSegment], selected: Int, edits: [Data?]?) -> MultiEditorViewController { + let controller = MultiEditorViewController(settings: settings, - segments: segments, + frames: frames(segments: segments, edits: edits), delegate: self, selected: selected) return controller @@ -430,27 +452,13 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer } // MARK: - Media Content Creation - - class func save(image: UIImage?, info: MediaInfo) -> URL? { - do { - guard let image = image, let jpgImageData = image.jpegData(compressionQuality: 1.0) else { - return nil - } - let fileURL = try save(data: jpgImageData, to: "kanvas-image", ext: "jpg") - info.write(toImage: fileURL) - return fileURL - } catch { - print("Failed to save to file. \(error)") - return nil - } - } class func save(data: Data, to filename: String, ext fileExtension: String) throws -> URL { let fileURL = try URL(filename: filename, fileExtension: fileExtension, unique: false, removeExisting: true) try data.write(to: fileURL, options: .atomic) return fileURL } - + private func durationStringForAssetAtURL(_ url: URL?) -> String { var text = "" if let url = url { @@ -581,7 +589,7 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer if isRecording { modeAndShootController.hideModeButton() } - else if !isRecording && !clipsController.hasClips && settings.enabledModes.count > 1 { + else if !isRecording && !clipsController.hasClips && !clips.isEmpty && settings.enabledModes.count > 1 { modeAndShootController.showModeButton() } } @@ -641,7 +649,7 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer // Let's prompt for losing clips if they have clips and it's the "x" button, rather than the ">" button. if clipsController.hasClips && !settings.topButtonsSwapped { showDismissTooltip() - } else if clipsController.hasClips && multiEditorViewController != nil { + } else if clips.isEmpty == false && multiEditorViewController != nil { showPreviewWithSegments([], selected: multiEditorViewController?.selected ?? 0) } else { @@ -853,18 +861,19 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer dismiss(animated: false, completion: nil) } - func editor(segment: CameraSegment) -> EditorViewController { + func editor(segment: CameraSegment, canvas: MovableViewCanvas?, cache: NSCache?) -> EditorViewController { let segments = [segment] - - return createEditorViewController(segments, selected: segments.startIndex) + return createEditorViewController(segments, selected: segments.startIndex, canvas: canvas, cache: cache) } + + // MARK: - CameraPreviewControllerDelegate & EditorControllerDelegate & StoryComposerDelegate func didFinishExportingVideo(url: URL?) { - didFinishExportingVideo(url: url, info: MediaInfo(source: .kanvas_camera), action: .previewConfirm, mediaChanged: true) + didFinishExportingVideo(url: url, info: MediaInfo(source: .kanvas_camera), archive: nil, action: .previewConfirm, mediaChanged: true) } func didFinishExportingImage(image: UIImage?) { - didFinishExportingImage(image: image, info: MediaInfo(source: .kanvas_camera), action: .previewConfirm, mediaChanged: true) + didFinishExportingImage(image: image, info: MediaInfo(source: .kanvas_camera), archive: nil, action: .previewConfirm, mediaChanged: true) } func didFinishExportingFrames(url: URL?) { @@ -872,10 +881,10 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer if let url = url { size = GIFDecoderFactory.main().size(of: url) } - didFinishExportingFrames(url: url, size: size, info: MediaInfo(source: .kanvas_camera), action: .previewConfirm, mediaChanged: true) + didFinishExportingFrames(url: url, size: size, info: MediaInfo(source: .kanvas_camera), archive: nil, action: .previewConfirm, mediaChanged: true) } - public func didFinishExportingVideo(url: URL?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { + public func didFinishExportingVideo(url: URL?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { guard settings.features.multipleExports == false else { return } let asset: AVURLAsset? if let url = url { @@ -885,8 +894,23 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer asset = nil } + let fileName = url?.deletingPathExtension().lastPathComponent ?? UUID().uuidString + if let asset = asset, let info = info { - let media = KanvasMedia(asset: asset, original: url, info: info) + + let archiveURL: URL? + if let saveDirectory = saveDirectory { + do { + archiveURL = try archive?.save(to: fileName, in: saveDirectory, ext: "") + } catch let error { + print("Failed to save archive on video export: \(error)") + archiveURL = nil + } + } else { + archiveURL = nil + } + + let media = KanvasMedia(asset: asset, original: url, info: info, archive: archiveURL) logMediaCreation(action: action, clipsCount: cameraInputController.segments().count, length: CMTimeGetSeconds(asset.duration)) performUIUpdate { [weak self] in if let self = self { @@ -898,16 +922,24 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer performUIUpdate { [weak self] in if let self = self { self.handleCloseSoon(action: action) - self.delegate?.didCreateMedia(self, media: [], exportAction: action) + self.delegate?.didCreateMedia(self, media: [.failure(CameraControllerError.exportFailure)], exportAction: action) } } } } - public func didFinishExportingImage(image: UIImage?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { + public func didFinishExportingImage(image: UIImage?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { guard settings.features.multipleExports == false else { return } if let image = image, let info = info, let url = image.save(info: info) { - let media = KanvasMedia(image: image, url: url, original: url, info: info) + + let archiveURL: URL? + if let saveDirectory = saveDirectory { + archiveURL = try! archive?.save(to: UUID().uuidString, in: saveDirectory, ext: "") + } else { + archiveURL = nil + } + + let media = KanvasMedia(image: image, url: url, original: url, info: info, archive: archiveURL) logMediaCreation(action: action, clipsCount: 1, length: 0) performUIUpdate { [weak self] in if let self = self { @@ -920,23 +952,30 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer performUIUpdate { [weak self] in if let self = self { self.handleCloseSoon(action: action) - self.delegate?.didCreateMedia(self, media: [], exportAction: action) + self.delegate?.didCreateMedia(self, media: [.failure(CameraControllerError.exportFailure)], exportAction: action) } } } } - public func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { + public func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { + guard settings.features.multipleExports == false else { return } guard let url = url, let info = info, let size = size, size != .zero else { performUIUpdate { self.handleCloseSoon(action: action) - self.delegate?.didCreateMedia(self, media: [], exportAction: action) + self.delegate?.didCreateMedia(self, media: [.failure(CameraControllerError.exportFailure)], exportAction: action) } return } + let archiveURL: URL? + if let saveDirectory = saveDirectory { + archiveURL = try! archive?.save(to: UUID().uuidString, in: saveDirectory, ext: "") + } else { + archiveURL = nil + } performUIUpdate { self.handleCloseSoon(action: action) - let media = KanvasMedia(unmodified: url, output: url, info: info, size: size, type: .frames) + let media = KanvasMedia(unmodified: url, output: url, info: info, size: size, archive: archiveURL, type: .frames) self.delegate?.didCreateMedia(self, media: [.success(media)], exportAction: action) } } @@ -949,17 +988,16 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer let archiver = MediaArchiver(saveDirectory: saveDirectory) - let exports: [EditorViewController.ExportResult?] = result.map { result in - switch result { - case .success(let export): - return export - case .failure(_): - return nil - } - } - queue.async { [weak self] in guard let self = self else { return } + let exports: [EditorViewController.ExportResult?] = result.map { result in + switch result { + case .success(let export): + return export + case .failure(_): + return nil + } + } let publishers = archiver.handle(exports: exports) self.exportCancellable = publishers.receive(on: DispatchQueue.main).sink { completion in @@ -991,14 +1029,14 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer else { analyticsProvider?.logPreviewDismissed() } - if settings.features.multipleExports && clipsController.hasClips { + if settings.features.multipleExports && !clips.isEmpty { showPreviewWithSegments([], selected: multiEditorViewController?.selected ?? 0) } else { performUIUpdate { [weak self] in self?.dismiss(animated: true) } } - delegate?.editorDismissed() + delegate?.editorDismissed(self) } public func tagButtonPressed() { @@ -1236,3 +1274,37 @@ public class CameraController: UIViewController, MediaClipsEditorDelegate, Camer mediaPlayerController?.onQuickPostOptionsSelected(selected: selected, hintText: hintText, view: view) } } + +//MARK: Archival + +extension CameraController { + public static func unarchive(_ url: URL) throws -> (CameraSegment, Data?) { + let data = try Data(contentsOf: url) + let archive = try NSKeyedUnarchiver.unarchivedObject(ofClass: Archive.self, from: data) + let segment: CameraSegment + if let image = archive?.image { + let info: MediaInfo + if let imageData = image.jpegData(compressionQuality: 1.0), let mInfo = MediaInfo(fromImageData: imageData) { + info = mInfo + } else { + info = MediaInfo(source: .kanvas_camera) + } + segment = CameraSegment.image(image, nil, nil, info) + } else if let video = archive?.video { + segment = CameraSegment.video(video, MediaInfo(fromVideoURL: video)) + } else { + throw ArchiveErrors.unknownMedia + } + return (segment, archive?.data) + } + + public func show(media: [(CameraSegment, Data?)]) { + showPreview = true + self.segments = media.map({ return $0.0 }) + self.edits = media.map({ return $0.1 }) + + if view.superview != nil { + showPreviewWithSegments(segments, selected: segments.startIndex, edits: nil, animated: false) + } + } +} diff --git a/Classes/Camera/MediaArchiver.swift b/Classes/Camera/MediaArchiver.swift index ff64e4b26..77503f001 100644 --- a/Classes/Camera/MediaArchiver.swift +++ b/Classes/Camera/MediaArchiver.swift @@ -61,21 +61,50 @@ class MediaArchiver { } else { originalURL = nil } - return KanvasMedia(image: image, url: url, original: originalURL, info: export.info) + let archiveURL = self.archive(media: .image(original), archive: export.archive, to: url.deletingPathExtension().lastPathComponent) + return KanvasMedia(image: image, url: url, original: originalURL, info: export.info, archive: archiveURL) } else { return nil } case (.video(let url), .video(let original)): + let archiveURL = self.archive(media: .video(original), archive: export.archive, to: url.deletingPathExtension().lastPathComponent) os_log(.debug, log: log, "Original video URL: %@", original.absoluteString) let asset = AVURLAsset(url: url) - return KanvasMedia(asset: asset, original: original, info: export.info) + return KanvasMedia(asset: asset, original: original, info: export.info, archive: archiveURL) default: return nil } } + + private func archive(media: EditorViewController.Media, archive data: Data, to path: String) -> URL? { + + let archive: Archive + + switch media { + case .image(let image): + archive = Archive(image: image, data: data) + case .video(let url): + archive = Archive(video: url, data: data) + } + + let archiveURL: URL? + if let saveDirectory = saveDirectory { + do { + let data = try NSKeyedArchiver.archivedData(withRootObject: archive, requiringSecureCoding: true) + archiveURL = try data.save(to: path, in: saveDirectory, ext: "") + } catch let error { + archiveURL = nil + print("Failed to archive \(error)") + } + } else { + archiveURL = nil + } + + return archiveURL + } } -private extension Data { +extension Data { func save(to filename: String, in directory: URL, ext fileExtension: String) throws -> URL { let fileURL = directory.appendingPathComponent(filename).appendingPathExtension(fileExtension) try write(to: fileURL, options: .atomic) diff --git a/Classes/Editor/EditorView.swift b/Classes/Editor/EditorView.swift index 0be35f77c..b8f7b4362 100644 --- a/Classes/Editor/EditorView.swift +++ b/Classes/Editor/EditorView.swift @@ -171,11 +171,7 @@ final class EditorView: UIView, MovableViewCanvasDelegate, MediaPlayerViewDelega ) }() - lazy var movableViewCanvas: MovableViewCanvas = { - let canvas = MovableViewCanvas() - canvas.delegate = self - return canvas - }() + var movableViewCanvas: MovableViewCanvas private lazy var movableViewCanvasConstraints = { return FullViewConstraints( @@ -219,7 +215,8 @@ final class EditorView: UIView, MovableViewCanvasDelegate, MediaPlayerViewDelega showBlogSwitcher: Bool, quickBlogSelectorCoordinator: KanvasQuickBlogSelectorCoordinating?, tagCollection: UIView?, - metalContext: MetalContext?) { + metalContext: MetalContext?, + movableViewCanvas: MovableViewCanvas?) { self.delegate = delegate self.mainActionMode = mainActionMode self.showSaveButton = showSaveButton @@ -232,7 +229,10 @@ final class EditorView: UIView, MovableViewCanvasDelegate, MediaPlayerViewDelega self.quickBlogSelectorCoordinator = quickBlogSelectorCoordinator self.tagCollection = tagCollection self.metalContext = metalContext + self.movableViewCanvas = movableViewCanvas ?? MovableViewCanvas() + super.init(frame: .zero) + self.movableViewCanvas.delegate = self setupViews() } @@ -277,7 +277,7 @@ final class EditorView: UIView, MovableViewCanvasDelegate, MediaPlayerViewDelega setupOverlay() setupOverlayLabel() } - + // MARK: - views private func setupPlayer() { @@ -299,7 +299,7 @@ final class EditorView: UIView, MovableViewCanvasDelegate, MediaPlayerViewDelega addSubview(movableViewCanvas) movableViewCanvasConstraints.activate() } - + /// Container that holds the back button and the bottom menu private func setupNavigationContainer() { navigationContainer.accessibilityIdentifier = "Navigation Container" diff --git a/Classes/Editor/EditorViewController.swift b/Classes/Editor/EditorViewController.swift index 6c9e9371a..0e80958a7 100644 --- a/Classes/Editor/EditorViewController.swift +++ b/Classes/Editor/EditorViewController.swift @@ -12,13 +12,13 @@ import UIKit public protocol EditorControllerDelegate: class { /// callback when finished exporting video clips. - func didFinishExportingVideo(url: URL?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) + func didFinishExportingVideo(url: URL?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) /// callback when finished exporting image - func didFinishExportingImage(image: UIImage?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) + func didFinishExportingImage(image: UIImage?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) /// callback when finished exporting frames - func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) + func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) /// callback when dismissing controller without exporting func dismissButtonPressed() @@ -76,9 +76,10 @@ public final class EditorViewController: UIViewController, MediaPlayerController } public struct ExportResult { - let original: Media? - let result: Media - let info: MediaInfo + let original: Media? + let result: Media + let info: MediaInfo + let archive: Data } var editorView: EditorView @@ -153,8 +154,12 @@ public final class EditorViewController: UIViewController, MediaPlayerController private let cameraMode: CameraMode? private var openedMenu: EditionOption? private var selectedCell: KanvasEditorMenuCollectionCell? + + var shouldExportSound: Bool = true private let metalContext = MetalContext.createContext() + let cache: NSCache + private var shouldExportMediaAsGIF: Bool { get { return collectionController.shouldExportMediaAsGIF @@ -182,11 +187,12 @@ public final class EditorViewController: UIViewController, MediaPlayerController private var editingNewText: Bool = true public weak var delegate: EditorControllerDelegate? - + private var exportCompletion: ((Result) -> Void)? private static func editor(delegate: EditorViewDelegate?, settings: CameraSettings, + canvas: MovableViewCanvas?, quickBlogSelectorCoordinator: KanvasQuickBlogSelectorCoordinating?, tagCollection: UIView?, metalContext: MetalContext?) -> EditorView { @@ -209,7 +215,8 @@ public final class EditorViewController: UIViewController, MediaPlayerController showBlogSwitcher: settings.showBlogSwitcherInEditor, quickBlogSelectorCoordinator: quickBlogSelectorCoordinator, tagCollection: tagCollection, - metalContext: metalContext) + metalContext: metalContext, + movableViewCanvas: canvas) return editorView } @@ -236,7 +243,8 @@ public final class EditorViewController: UIViewController, MediaPlayerController stickerProvider: stickerProvider, analyticsProvider: analyticsProvider, quickBlogSelectorCoordinator: nil, - tagCollection: nil) + tagCollection: nil, + cache: nil) } public static func createEditor(for videoURL: URL, settings: CameraSettings, stickerProvider: StickerProvider) -> EditorViewController { @@ -249,7 +257,8 @@ public final class EditorViewController: UIViewController, MediaPlayerController stickerProvider: stickerProvider, analyticsProvider: nil, quickBlogSelectorCoordinator: nil, - tagCollection: nil) + tagCollection: nil, + cache: nil) } public static func createEditor(forGIF url: URL, @@ -268,6 +277,13 @@ public final class EditorViewController: UIViewController, MediaPlayerController } } + private static func freshCache() -> NSCache { + let cache = NSCache() + cache.name = "Kanvas Editor Cache" + cache.totalCostLimit = 50_000_000 + return cache + } + convenience init(settings: CameraSettings, segments: [CameraSegment], stickerProvider: StickerProvider, @@ -281,7 +297,8 @@ public final class EditorViewController: UIViewController, MediaPlayerController stickerProvider: stickerProvider, analyticsProvider: analyticsProvider, quickBlogSelectorCoordinator: nil, - tagCollection: nil) + tagCollection: nil, + cache: nil) } /// The designated initializer for the editor controller @@ -302,7 +319,9 @@ public final class EditorViewController: UIViewController, MediaPlayerController stickerProvider: StickerProvider?, analyticsProvider: KanvasAnalyticsProvider?, quickBlogSelectorCoordinator: KanvasQuickBlogSelectorCoordinating?, - tagCollection: UIView?) { + canvas: MovableViewCanvas? = nil, + tagCollection: UIView?, + cache: NSCache?) { self.settings = settings self.originalSegments = segments self.assetsHandler = assetsHandler @@ -318,9 +337,11 @@ public final class EditorViewController: UIViewController, MediaPlayerController self.player = MediaPlayer(renderer: Renderer(settings: settings, metalContext: metalContext)) self.editorView = EditorViewController.editor(delegate: nil, settings: settings, + canvas: canvas, quickBlogSelectorCoordinator: quickBlogSelectorCoordinator, tagCollection: tagCollection, metalContext: metalContext) + self.cache = cache ?? EditorViewController.freshCache() super.init(nibName: .none, bundle: .none) self.editorView.delegate = self @@ -384,12 +405,25 @@ public final class EditorViewController: UIViewController, MediaPlayerController /// Sets up the color carousels of both drawing and text tools private func addCarouselDefaultColors(_ image: UIImage) { - let dominantColors = image.getDominantColors(count: 3) + + let cacheKey = "dominantColors" + + let dominantColors: [UIColor] + if let cached = cache.object(forKey: cacheKey as NSString) { + let colors = try! NSKeyedUnarchiver.unarchivedObject(ofClasses: [NSArray.self, UIColor.self], from: cached as Data) + dominantColors = colors as! [UIColor] + } else { + dominantColors = image.getDominantColors(count: 3) + } + drawingController.addColorsForCarousel(colors: dominantColors) - + if let mostDominantColor = dominantColors.first { textController.addColorsForCarousel(colors: [mostDominantColor, .white, .black]) } + + let archivedColors = try! NSKeyedArchiver.archivedData(withRootObject: dominantColors as NSArray, requiringSecureCoding: true) + cache.setObject(archivedColors as NSData, forKey: cacheKey as NSString) } // MARK: - Media Player @@ -498,14 +532,14 @@ public final class EditorViewController: UIViewController, MediaPlayerController } // More than one segment, or one video-only segment, enable it. - if segments.count > 1 || segments.first?.image == nil { + if segments.count > 1 || segments.first?.isVideo == true { return true } // A single segment that has both an image and a video (live photo), enabled it. if segments.count == 1, let firstSegment = segments.first, - firstSegment.image != nil, + firstSegment.isVideo == false, firstSegment.videoURL != nil { return true } @@ -656,15 +690,15 @@ public final class EditorViewController: UIViewController, MediaPlayerController player.stop() delegate?.dismissButtonPressed() } - - func getQuickPostButton() -> UIView { + + func getBlogSwitcher() -> UIView { guard let delegate = delegate else { return UIView() } - return delegate.getQuickPostButton() + return delegate.getBlogSwitcher() } - func getBlogSwitcher() -> UIView { + func getQuickPostButton() -> UIView { guard let delegate = delegate else { return UIView() } - return delegate.getBlogSwitcher() + return delegate.getQuickPostButton() } func restartPlayback() { @@ -685,27 +719,34 @@ public final class EditorViewController: UIViewController, MediaPlayerController private func startExporting(action: KanvasExportAction) { player.stop() showLoading() - if segments.count == 1, let firstSegment = segments.first, let image = firstSegment.image { + let archive: Data + do { + archive = try self.archive() + } catch { + handleExportError() + return + } + if segments.count == 1, let firstSegment = segments.first, case CameraSegment.image(let image, _, _, _) = firstSegment { // If the camera mode is .stopMotion, .normal or .stitch (.video) and the `exportStopMotionPhotoAsVideo` is true, // then single photos from that mode should still export as video. if let cameraMode = cameraMode, cameraMode.group == .video || settings.exportStopMotionPhotoAsVideo { assetsHandler.ensureAllImagesHaveVideo(segments: segments) { segments in guard let videoURL = segments.first?.videoURL else { return } DispatchQueue.main.async { - self.createFinalVideo(videoURL: videoURL, mediaInfo: firstSegment.mediaInfo, exportAction: action) + self.createFinalVideo(videoURL: videoURL, mediaInfo: firstSegment.mediaInfo, archive: archive, exportAction: action) } } } else { - createFinalImage(image: image, mediaInfo: firstSegment.mediaInfo, exportAction: action) + createFinalImage(image: image, mediaInfo: firstSegment.mediaInfo, archive: archive, exportAction: action) } } else if shouldExportMediaAsGIF { if segments.count == 1, let segment = segments.first, let url = segment.videoURL { - self.createFinalGIF(videoURL: url, framesPerSecond: KanvasTimes.gifPreferredFramesPerSecond, mediaInfo: segment.mediaInfo, exportAction: action) + self.createFinalGIF(videoURL: url, framesPerSecond: KanvasTimes.gifPreferredFramesPerSecond, mediaInfo: segment.mediaInfo, archive: archive, exportAction: action) } else if assetsHandler.containsOnlyImages(segments: segments) { - self.createFinalGIF(segments: segments, mediaInfo: segments.first?.mediaInfo ?? MediaInfo(source: .kanvas_camera), exportAction: action) + self.createFinalGIF(segments: segments, mediaInfo: segments.first?.mediaInfo ?? MediaInfo(source: .kanvas_camera), archive: archive, exportAction: action) } else { // Segments are not all frames, so we need to generate a full video first, and then convert that to a GIF. @@ -721,7 +762,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController } let fps = Int(CMTime(seconds: 1.0, preferredTimescale: KanvasTimes.stopMotionFrameTimescale).seconds / KanvasTimes.onlyImagesFrameTime.seconds) DispatchQueue.main.async { - self.createFinalGIF(videoURL: url, framesPerSecond: fps, mediaInfo: mediaInfo, exportAction: action) + self.createFinalGIF(videoURL: url, framesPerSecond: fps, mediaInfo: mediaInfo, archive: archive, exportAction: action) } } } @@ -734,7 +775,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController return } DispatchQueue.main.async { - self?.createFinalVideo(videoURL: url, mediaInfo: mediaInfo ?? MediaInfo(source: .media_library), exportAction: action) + self?.createFinalVideo(videoURL: url, mediaInfo: mediaInfo ?? MediaInfo(source: .media_library), archive: archive, exportAction: action) } } } @@ -745,7 +786,11 @@ public final class EditorViewController: UIViewController, MediaPlayerController startExporting(action: .post) } - private func createFinalGIF(segments: [CameraSegment], mediaInfo: MediaInfo, exportAction: KanvasExportAction) { + private func archive() throws -> Data { + return try NSKeyedArchiver.archivedData(withRootObject: editorView.movableViewCanvas, requiringSecureCoding: true) + } + + private func createFinalGIF(segments: [CameraSegment], mediaInfo: MediaInfo, archive: Data, exportAction: KanvasExportAction) { let exporter = exporterClass.init(settings: settings) exporter.filterType = filterType ?? .passthrough exporter.imageOverlays = imageOverlays() @@ -754,11 +799,17 @@ public final class EditorViewController: UIViewController, MediaPlayerController exporter.export(frames: frames) { orderedFrames in let playbackFrames = self.gifMakerHandler.framesForPlayback(orderedFrames) self.gifEncoderClass.init().encode(frames: playbackFrames, loopCount: 0) { gifURL in - var size: CGSize? = nil - if let gifURL = gifURL { - size = GIFDecoderFactory.main().size(of: gifURL) + guard let gifURL = gifURL else { + performUIUpdate { + self.hideLoading() + self.handleExportError() + } + return } - self.delegate?.didFinishExportingFrames(url: gifURL, size: size, info: mediaInfo, action: exportAction, mediaChanged: self.mediaChanged) + let size = GIFDecoderFactory.main().size(of: gifURL) + let result = ExportResult(original: nil, result: .video(gifURL), info: mediaInfo, archive: archive) + self.exportCompletion?(.success(result)) + self.delegate?.didFinishExportingFrames(url: gifURL, size: size, info: mediaInfo, archive: archive, action: exportAction, mediaChanged: self.mediaChanged) performUIUpdate { self.hideLoading() } @@ -766,7 +817,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController } } - private func createFinalGIF(videoURL: URL, framesPerSecond: Int, mediaInfo: MediaInfo, exportAction: KanvasExportAction) { + private func createFinalGIF(videoURL: URL, framesPerSecond: Int, mediaInfo: MediaInfo, archive: Data, exportAction: KanvasExportAction) { let exporter = exporterClass.init(settings: settings) exporter.filterType = filterType ?? .passthrough exporter.imageOverlays = imageOverlays() @@ -788,9 +839,9 @@ public final class EditorViewController: UIViewController, MediaPlayerController return } let size = GIFDecoderFactory.main().size(of: gifURL) - let result = ExportResult(original: nil, result: .video(gifURL), info: mediaInfo) + let result = ExportResult(original: nil, result: .video(gifURL), info: mediaInfo, archive: archive) self.exportCompletion?(.success(result)) - self.delegate?.didFinishExportingFrames(url: gifURL, size: size, info: mediaInfo, action: exportAction, mediaChanged: self.mediaChanged) + self.delegate?.didFinishExportingFrames(url: gifURL, size: size, info: mediaInfo, archive: archive, action: exportAction, mediaChanged: self.mediaChanged) performUIUpdate { self.hideLoading() } @@ -798,9 +849,8 @@ public final class EditorViewController: UIViewController, MediaPlayerController } } - private func createFinalVideo(videoURL: URL, mediaInfo: MediaInfo, exportAction: KanvasExportAction) { + private func createFinalVideo(videoURL: URL, mediaInfo: MediaInfo, archive: Data, exportAction: KanvasExportAction) { let exporter = exporterClass.init(settings: settings) - exporter.filterType = filterType ?? .passthrough exporter.imageOverlays = imageOverlays() exporter.export(video: videoURL, mediaInfo: mediaInfo) { (exportedVideoURL, error) in performUIUpdate { @@ -813,19 +863,21 @@ public final class EditorViewController: UIViewController, MediaPlayerController } return } - let result = ExportResult(original: .video(videoURL), result: .video(url), info: mediaInfo) + let result = ExportResult(original: .video(videoURL), result: .video(url), info: mediaInfo, archive: archive) self.exportCompletion?(.success(result)) - self.delegate?.didFinishExportingVideo(url: url, info: mediaInfo, action: exportAction, mediaChanged: self.mediaChanged) + self.delegate?.didFinishExportingVideo(url: url, info: mediaInfo, archive: archive, action: exportAction, mediaChanged: self.mediaChanged) self.hideLoading() } } } - private func createFinalImage(image: UIImage, mediaInfo: MediaInfo, exportAction: KanvasExportAction) { + private func createFinalImage(image: UIImage, mediaInfo: MediaInfo, archive: Data, exportAction: KanvasExportAction) { let exporter = exporterClass.init(settings: settings) exporter.filterType = filterType ?? .passthrough exporter.imageOverlays = imageOverlays() - exporter.export(image: image, time: player.lastStillFilterTime) { (exportedImage, error) in + exporter.export(image: image, time: player.lastStillFilterTime) { [weak self] (exportedImage, error) in + guard let self = self else { return } + let originalImage = image performUIUpdate { guard let unwrappedImage = exportedImage else { self.hideLoading() @@ -836,9 +888,9 @@ public final class EditorViewController: UIViewController, MediaPlayerController } return } - let result = ExportResult(original: .image(image), result: .image(unwrappedImage), info: mediaInfo) + let result = ExportResult(original: .image(originalImage), result: .image(unwrappedImage), info: mediaInfo, archive: archive) self.exportCompletion?(.success(result)) - self.delegate?.didFinishExportingImage(image: unwrappedImage, info: mediaInfo, action: exportAction, mediaChanged: self.mediaChanged) + self.delegate?.didFinishExportingImage(image: unwrappedImage, info: mediaInfo, archive: archive, action: exportAction, mediaChanged: self.mediaChanged) self.hideLoading() } } @@ -990,7 +1042,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController func getDefaultTimeIntervalForImageSegments() -> TimeInterval { return CameraSegment.defaultTimeInterval(segments: segments) } - + // MARK: - GifMakerHandlerDelegate func didConfirmGif() { @@ -1019,7 +1071,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController func unsetMediaPlayerFrame() { player.cancelPlayingSingleFrame() } - + // MARK: - EditorFilterControllerDelegate func didConfirmFilters() { @@ -1051,7 +1103,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController func didConfirmText(textView: StylableTextView, transformations: ViewTransformations, location: CGPoint, size: CGSize) { if !textView.text.isEmpty { - editorView.movableViewCanvas.addView(view: textView, transformations: transformations, location: location, size: size) + editorView.movableViewCanvas.addView(view: textView, transformations: transformations, location: location, size: size, animated: true) if let font = textView.options.font, let alignment = KanvasTextAlignment.from(alignment: textView.options.alignment) { analyticsProvider?.logEditorTextConfirm(isNew: editingNewText, font: font, alignment: alignment, highlighted: textView.options.highlightColor != nil) } @@ -1127,7 +1179,7 @@ public final class EditorViewController: UIViewController, MediaPlayerController func didSelectSticker(imageView: StylableImageView, size: CGSize) { analyticsProvider?.logEditorStickerAdd(stickerId: imageView.id) editorView.movableViewCanvas.addView(view: imageView, transformations: ViewTransformations(), - location: editorView.movableViewCanvas.bounds.center, size: size) + location: editorView.movableViewCanvas.bounds.center, size: size, animated: true) } func didSelectStickerType(_ stickerType: StickerType) { diff --git a/Classes/Editor/Media/Stickers/StylableImageView.swift b/Classes/Editor/Media/Stickers/StylableImageView.swift index e7997a1ba..410c607e0 100644 --- a/Classes/Editor/Media/Stickers/StylableImageView.swift +++ b/Classes/Editor/Media/Stickers/StylableImageView.swift @@ -8,7 +8,9 @@ import Foundation import UIKit /// Image view that increases its image quality when its contentScaleFactor is modified -final class StylableImageView: UIImageView, MovableViewInnerElement { +@objc final class StylableImageView: UIImageView, MovableViewInnerElement, NSSecureCoding { + + static var supportsSecureCoding: Bool { return true } let id: String @@ -17,6 +19,10 @@ final class StylableImageView: UIImageView, MovableViewInnerElement { setScaleFactor(newValue) } } + + var viewSize: CGSize = .zero + + var viewCenter: CGPoint = .zero // MARK: - Initializers @@ -25,10 +31,28 @@ final class StylableImageView: UIImageView, MovableViewInnerElement { super.init(image: image) } - required init?(coder: NSCoder) { - fatalError("init(coder:) has not been implemented") + required convenience init?(coder: NSCoder) { + let id = String(coder.decodeObject(of: NSString.self, forKey: CodingKeys.id.rawValue) ?? "") + let image = coder.decodeObject(of: UIImage.self, forKey: CodingKeys.image.rawValue) + self.init(id: id, image: image) + viewSize = coder.decodeCGSize(forKey: CodingKeys.size.rawValue) + viewCenter = coder.decodeCGPoint(forKey: CodingKeys.center.rawValue) } - + + private enum CodingKeys: String { + case id + case size + case center + case image + } + + override func encode(with coder: NSCoder) { + coder.encode(id, forKey: CodingKeys.id.rawValue) + coder.encode(viewSize, forKey: CodingKeys.size.rawValue) + coder.encode(viewCenter, forKey: CodingKeys.center.rawValue) + coder.encode(image, forKey: CodingKeys.image.rawValue) + } + // MARK: - Scale factor /// Sets a new scale factor to update the quality of the inner image. This value represents how content in the view is mapped diff --git a/Classes/Editor/MovableViews/MovableView.swift b/Classes/Editor/MovableViews/MovableView.swift index 60ae65f19..e1d917366 100644 --- a/Classes/Editor/MovableViews/MovableView.swift +++ b/Classes/Editor/MovableViews/MovableView.swift @@ -50,10 +50,12 @@ private struct Constants { } /// A wrapper for UIViews that can be rotated, moved and scaled -final class MovableView: UIView { +final class MovableView: UIView, NSSecureCoding { + + static var supportsSecureCoding: Bool { return true } weak var delegate: MovableViewDelegate? - private let innerView: MovableViewInnerElement + let innerView: MovableViewInnerElement /// Current rotation angle var rotation: CGFloat { @@ -75,6 +77,10 @@ final class MovableView: UIView { applyTransform() } } + + var originLocation: CGPoint = .zero + + var size: CGSize = .zero var transformations: ViewTransformations { return ViewTransformations(position: position, scale: scale, rotation: rotation) @@ -89,9 +95,56 @@ final class MovableView: UIView { setupInnerView() } + + private enum CodingKeys: String { + case position + case scale + case rotation + case innerView + case origin + } required init?(coder aDecoder: NSCoder) { - fatalError("init(coder:) has not been implemented") + position = aDecoder.decodeCGPoint(forKey: CodingKeys.position.rawValue) + scale = CGFloat(aDecoder.decodeFloat(forKey: CodingKeys.scale.rawValue)) + rotation = CGFloat(aDecoder.decodeFloat(forKey: CodingKeys.rotation.rawValue)) + let view = aDecoder.decodeObject(of: [StylableTextView.self, StylableImageView.self], forKey: CodingKeys.innerView.rawValue) + + switch view { + case let imageView as StylableImageView: + innerView = imageView + case let textView as StylableTextView: + innerView = textView + default: + innerView = StylableTextView() + } + originLocation = aDecoder.decodeCGPoint(forKey: CodingKeys.origin.rawValue) + + super.init(frame: .zero) + + setupInnerView() + } + + override func encode(with coder: NSCoder) { + super.encode(with: coder) + coder.encode(position, forKey: CodingKeys.position.rawValue) + coder.encode(Float(scale), forKey: CodingKeys.scale.rawValue) + coder.encode(Float(rotation), forKey: CodingKeys.rotation.rawValue) + coder.encode(innerView, forKey: CodingKeys.innerView.rawValue) + coder.encode(originLocation, forKey: CodingKeys.origin.rawValue) + } + + enum ViewType { + case text + case image + } + + var type: ViewType { + if innerView is StylableTextView { + return .text + } else { + return .image + } } // MARK: - Layout @@ -170,6 +223,7 @@ final class MovableView: UIView { // Called when the view is moved func onMove() { + innerView.viewCenter = position if let _ = innerView as? StylableTextView { delegate?.didMoveTextView() } diff --git a/Classes/Editor/MovableViews/MovableViewCanvas.swift b/Classes/Editor/MovableViews/MovableViewCanvas.swift index e60ff3366..b700b37aa 100644 --- a/Classes/Editor/MovableViews/MovableViewCanvas.swift +++ b/Classes/Editor/MovableViews/MovableViewCanvas.swift @@ -47,8 +47,10 @@ private struct Constants { } /// View that contains the collection of movable views -final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, MovableViewDelegate { - +final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, MovableViewDelegate, NSSecureCoding { + + static var supportsSecureCoding: Bool { return true } + weak var delegate: MovableViewCanvasDelegate? // View that has been tapped @@ -68,7 +70,11 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M private var originTransformations: ViewTransformations var isEmpty: Bool { - return subviews.compactMap{ $0 as? MovableView }.count == 0 + return movableViews.isEmpty + } + + var movableViews: [MovableView] { + return subviews.compactMap { $0 as? MovableView } } init() { @@ -77,12 +83,38 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M originTransformations = ViewTransformations() super.init(frame: .zero) setUpViews() + } + + private enum CodingKeys: String, CodingKey { + case originTransformations + case textViews + case imageViews + case movableViews } - - required init?(coder aDecoder: NSCoder) { - fatalError("init(coder:) has not been implemented") + + required init?(coder: NSCoder) { + + overlay = UIView() + trashView = TrashView() + + originTransformations = ViewTransformations() + + super.init(frame: .zero) + + let movableViews = coder.decodeObject(of: [NSArray.self, MovableView.self], forKey: CodingKeys.movableViews.rawValue) as? [MovableView] + movableViews?.forEach({ view in + addView(view: view.innerView, transformations: view.transformations, location: view.innerView.viewCenter, origin: view.originLocation, size: view.innerView.viewSize, animated: false) + }) + setUpViews() } - + + override func encode(with coder: NSCoder) { + coder.encode(originTransformations, forKey: CodingKeys.originTransformations.rawValue) + + let movableViews = subviews.compactMap({ $0 as? MovableView }) + coder.encode(movableViews, forKey: CodingKeys.movableViews.rawValue) + } + // MARK: - Layout private func setUpViews() { @@ -93,7 +125,6 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M /// Sets up the trash bin used during deletion private func setUpTrashView() { trashView.accessibilityIdentifier = "Editor Movable View Canvas Trash View" - trashView.layer.zPosition = 1 trashView.translatesAutoresizingMaskIntoConstraints = false addSubview(trashView) @@ -121,6 +152,13 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M overlay.alpha = 0 } + + override func layoutSubviews() { + super.layoutSubviews() + subviews.compactMap({ return $0 as? MovableView }).forEach({ view in + view.moveToDefinedPosition() + }) + } // MARK: - Public interface @@ -130,9 +168,12 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M /// - transformations: transformations for the view /// - location: location of the view before transformations /// - size: size of the view - func addView(view: MovableViewInnerElement, transformations: ViewTransformations, location: CGPoint, size: CGSize) { + func addView(view: MovableViewInnerElement, transformations: ViewTransformations, location: CGPoint, origin: CGPoint? = nil, size: CGSize, animated: Bool) { let movableView = MovableView(view: view, transformations: transformations) + movableView.originLocation = origin ?? location movableView.delegate = self + view.viewSize = size + view.viewCenter = location movableView.isUserInteractionEnabled = true movableView.isExclusiveTouch = true movableView.isMultipleTouchEnabled = true @@ -142,8 +183,8 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M NSLayoutConstraint.activate([ movableView.heightAnchor.constraint(equalToConstant: size.height), movableView.widthAnchor.constraint(equalToConstant: size.width), - movableView.topAnchor.constraint(equalTo: topAnchor, constant: location.y - (size.height / 2)), - movableView.leadingAnchor.constraint(equalTo: leadingAnchor, constant: location.x - (size.width / 2)) + movableView.centerXAnchor.constraint(equalTo: leadingAnchor, constant: movableView.originLocation.x), + movableView.centerYAnchor.constraint(equalTo: topAnchor, constant: movableView.originLocation.y) ]) let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(movableViewTapped(recognizer:))) @@ -163,10 +204,17 @@ final class MovableViewCanvas: IgnoreTouchesView, UIGestureRecognizerDelegate, M movableView.addGestureRecognizer(pinchRecognizer) movableView.addGestureRecognizer(panRecognizer) movableView.addGestureRecognizer(longPressRecognizer) - - UIView.animate(withDuration: Constants.animationDuration) { + + let move: () -> Void = { movableView.moveToDefinedPosition() } + if animated { + UIView.animate(withDuration: Constants.animationDuration) { + move() + } + } else { + move() + } } /// Removes the tapped view from the canvas diff --git a/Classes/Editor/MovableViews/MovableViewInnerElement.swift b/Classes/Editor/MovableViews/MovableViewInnerElement.swift index c129293fa..9096105f0 100644 --- a/Classes/Editor/MovableViews/MovableViewInnerElement.swift +++ b/Classes/Editor/MovableViews/MovableViewInnerElement.swift @@ -7,11 +7,17 @@ import Foundation /// Protocol for the view inside MovableView -protocol MovableViewInnerElement: UIView { +protocol MovableViewInnerElement: UIView, NSSecureCoding { /// Checks whether the hit is done inside the shape of the view /// /// - Parameter point: location where the view was touched /// - Returns: true if the touch was inside, false if not func hitInsideShape(point: CGPoint) -> Bool + + var viewSize: CGSize { get set } + + var viewCenter: CGPoint { get set } + + } diff --git a/Classes/Editor/MovableViews/ViewTransformations.swift b/Classes/Editor/MovableViews/ViewTransformations.swift index 6e3150e32..3cd5f83f0 100644 --- a/Classes/Editor/MovableViews/ViewTransformations.swift +++ b/Classes/Editor/MovableViews/ViewTransformations.swift @@ -7,7 +7,9 @@ import Foundation import UIKit -final class ViewTransformations { +final class ViewTransformations: NSObject, NSSecureCoding { + + static var supportsSecureCoding: Bool { return true } static let defaultPosition: CGPoint = .zero static let defaultScale: CGFloat = 1.0 @@ -25,4 +27,22 @@ final class ViewTransformations { self.scale = scale self.rotation = rotation } + + private enum CodingKeys: String { + case position + case scale + case rotation + } + + init?(coder: NSCoder) { + position = coder.decodeCGPoint(forKey: CodingKeys.position.rawValue) + scale = CGFloat(coder.decodeFloat(forKey: CodingKeys.scale.rawValue)) + rotation = CGFloat(coder.decodeFloat(forKey: CodingKeys.rotation.rawValue)) + } + + func encode(with coder: NSCoder) { + coder.encode(position, forKey: CodingKeys.position.rawValue) + coder.encode(Float(scale), forKey: CodingKeys.scale.rawValue) + coder.encode(Float(rotation), forKey: CodingKeys.rotation.rawValue) + } } diff --git a/Classes/Editor/MultiEditor/MultiEditorViewController.swift b/Classes/Editor/MultiEditor/MultiEditorViewController.swift index 5784b985e..6542c5665 100644 --- a/Classes/Editor/MultiEditor/MultiEditorViewController.swift +++ b/Classes/Editor/MultiEditor/MultiEditorViewController.swift @@ -9,7 +9,7 @@ import Foundation protocol MultiEditorComposerDelegate: EditorControllerDelegate { func didFinishExporting(media: [Result]) func addButtonWasPressed() - func editor(segment: CameraSegment) -> EditorViewController + func editor(segment: CameraSegment, canvas: MovableViewCanvas?, cache: NSCache?) -> EditorViewController func dismissButtonPressed() } @@ -30,6 +30,7 @@ class MultiEditorViewController: UIViewController { struct Frame { let segment: CameraSegment + let edit: Edit? } private var frames: [Frame] @@ -46,6 +47,13 @@ class MultiEditorViewController: UIViewController { guard newValue != selected && migratedIndex != newValue else { return } + if let old = selected { + do { + try archive(index: old) + } catch let error { + print("Failed to archive current edits \(error)") + } + } if let new = newValue { // If the new index is the same as the old just keep the current editor loadEditor(for: new) } else { @@ -55,7 +63,8 @@ class MultiEditorViewController: UIViewController { } func addSegment(_ segment: CameraSegment) { - frames.append(Frame(segment: segment)) + + frames.append(Frame(segment: segment, edit: nil)) let clip = MediaClip(representativeFrame: segment.lastFrame, overlayText: nil, @@ -68,33 +77,34 @@ class MultiEditorViewController: UIViewController { private let settings: CameraSettings + struct Edit { + let data: Data? + let cache: NSCache? + } private var exportingEditors: [EditorViewController]? private weak var currentEditor: EditorViewController? init(settings: CameraSettings, - segments: [CameraSegment], + frames: [Frame], delegate: MultiEditorComposerDelegate, selected: Array.Index?) { self.settings = settings self.delegate = delegate - - frames = segments.map({ segment in - return Frame(segment: segment) - }) + self.frames = frames self.exportHandler = MultiEditorExportHandler({ [weak delegate] result in delegate?.didFinishExporting(media: result) }) self.selected = selected super.init(nibName: nil, bundle: nil) - let clips = segments.map { segment in + let clips = frames.map { frame in return MediaClip(representativeFrame: - segment.lastFrame, + frame.segment.lastFrame, overlayText: nil, - lastFrame: segment.lastFrame) + lastFrame: frame.segment.lastFrame) } clipsController.replace(clips: clips) } @@ -121,7 +131,9 @@ class MultiEditorViewController: UIViewController { } func loadEditor(for index: Int) { - if let editor = delegate?.editor(segment: frames[index].segment) { + let canvas = edits(for: index) + let frame = frames[index] + if let editor = delegate?.editor(segment: frame.segment, canvas: canvas, cache: frame.edit?.cache) { currentEditor?.stopPlayback() currentEditor?.unloadFromParentViewController() let additionalPadding: CGFloat = 10 // Extra padding for devices that don't have safe areas (which provide some padding by default). @@ -243,6 +255,13 @@ extension MultiEditorViewController: MediaClipsEditorDelegate { } func mediaClipWasMoved(from originIndex: Int, to destinationIndex: Int) { + if let selected = selected { + do { + try archive(index: selected) + } catch let error { + print("Failed to archive current edits: \(error)") + } + } frames.move(from: originIndex, to: destinationIndex) let newIndex: Int @@ -263,7 +282,7 @@ extension MultiEditorViewController: MediaClipsEditorDelegate { } @objc func nextButtonWasPressed() { - } + } } extension MultiEditorViewController: EditorControllerDelegate { @@ -276,16 +295,13 @@ extension MultiEditorViewController: EditorControllerDelegate { return UIView() } - func didFinishExportingVideo(url: URL?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { - // No-op for the moment. API is coming in future commit. + func didFinishExportingVideo(url: URL?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { } - func didFinishExportingImage(image: UIImage?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { - // No-op for the moment. API is coming in future commit. + func didFinishExportingImage(image: UIImage?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { } - func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { - // No-op for the moment. API is coming in future commit. + func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { } func dismissButtonPressed() { @@ -293,27 +309,23 @@ extension MultiEditorViewController: EditorControllerDelegate { } func didDismissColorSelectorTooltip() { - delegate?.didDismissColorSelectorTooltip() + } func editorShouldShowColorSelectorTooltip() -> Bool { - return delegate?.editorShouldShowColorSelectorTooltip() == true + return true } func didEndStrokeSelectorAnimation() { - delegate?.didEndStrokeSelectorAnimation() + } func editorShouldShowStrokeSelectorAnimation() -> Bool { - return delegate?.editorShouldShowStrokeSelectorAnimation() == true + return true } func tagButtonPressed() { - delegate?.tagButtonPressed() - } - - struct EditOptions { - let soundEnabled: Bool + } func showLoading() { @@ -333,21 +345,80 @@ extension MultiEditorViewController: EditorControllerDelegate { showLoading() + if let selected = selected { + do { + try archive(index: selected) + } catch let error { + print("Failed to archive current edits on export \(error)") + } + } + exportHandler.startWaiting(for: frames.count) guard let delegate = delegate else { return true } frames.enumerated().forEach({ (idx, frame) in autoreleasepool { - let editor = delegate.editor(segment: frame.segment) - DispatchQueue.main.async { - editor.export { [weak self, editor] result in - let _ = editor // strong reference until the export completes - self?.exportHandler.handleExport(result, for: idx) + let canvas: MovableViewCanvas? + if let edit = frame.edit?.data { + do { + canvas = try NSKeyedUnarchiver.unarchivedObject(ofClass: MovableViewCanvas.self, from: edit) + } catch let error { + print("Failed to unarchive edits on export for \(idx): \(error)") + assertionFailure("Failed to unarchive edits on export for \(idx): \(error)") + canvas = nil } + } else { + canvas = nil + } + let editor = delegate.editor(segment: frame.segment, canvas: canvas, cache: frame.edit?.cache) + editor.export { [weak self, editor] result in + let _ = editor // strong reference until the export completes + self?.exportHandler.handleExport(result, for: idx) } } }) return false } + + func addButtonPressed() { + dismiss(animated: true, completion: nil) + } +} + +//MARK: Edit + Archive + +extension MultiEditorViewController { + func archive(index: Int) throws { + guard let currentEditor = currentEditor else { + return + } + let currentCanvas = try NSKeyedArchiver.archivedData(withRootObject: currentEditor.editorView.movableViewCanvas, requiringSecureCoding: true) + if frames.indices ~= index { + let frame = frames[index] + frames[index] = Frame(segment: frame.segment, edit: Edit(data: currentCanvas, cache: currentEditor.cache)) + } else { + print("Invalid frame index") + } + } + + func edits(for index: Int) -> MovableViewCanvas? { + if frames.indices ~= index, let edit = frames[index].edit { + let canvas: MovableViewCanvas? + if let edit = edit.data { + do { + canvas = try NSKeyedUnarchiver.unarchivedObject(ofClass: MovableViewCanvas.self, from: edit) + } catch let error { + print("Failed to unarchive edits for \(index): \(error)") + assertionFailure("Failed to unarchive edits for \(index): \(error)") + canvas = nil + } + } else { + canvas = nil + } + return canvas + } else { + return nil + } + } } diff --git a/Classes/Editor/Text/MainTextView.swift b/Classes/Editor/Text/MainTextView.swift index de4f7de9c..0d6564014 100644 --- a/Classes/Editor/Text/MainTextView.swift +++ b/Classes/Editor/Text/MainTextView.swift @@ -43,9 +43,9 @@ final class MainTextView: StylableTextView { } required init?(coder aDecoder: NSCoder) { - super.init(coder: aDecoder) + fatalError("init(from:) has not been implemented") } - + override func canPerformAction(_ action: Selector, withSender sender: Any?) -> Bool { return false } diff --git a/Classes/Editor/Text/StylableTextView.swift b/Classes/Editor/Text/StylableTextView.swift index 57ec5f081..5c0d0415f 100644 --- a/Classes/Editor/Text/StylableTextView.swift +++ b/Classes/Editor/Text/StylableTextView.swift @@ -13,7 +13,9 @@ private struct Constants { } /// TextView that can be customized with TextOptions -class StylableTextView: UITextView, UITextViewDelegate, MovableViewInnerElement { +@objc class StylableTextView: UITextView, UITextViewDelegate, MovableViewInnerElement, NSSecureCoding { + + static var supportsSecureCoding: Bool { return true } // Color rectangles behind the text private var highlightViews: [UIView] @@ -41,7 +43,11 @@ class StylableTextView: UITextView, UITextViewDelegate, MovableViewInnerElement setScaleFactor(newValue) } } - + + var viewSize: CGSize = .zero + + var viewCenter: CGPoint = .zero + // MARK: - Initializers init() { @@ -65,11 +71,62 @@ class StylableTextView: UITextView, UITextViewDelegate, MovableViewInnerElement backgroundColor = .clear } - required init?(coder aDecoder: NSCoder) { + required init?(coder: NSCoder) { highlightViews = [] - super.init(coder: aDecoder) + + let size = coder.decodeCGSize(forKey: CodingKeys.size.rawValue) + + super.init(frame: CGRect(origin: .zero, size: size), textContainer: nil) delegate = self + backgroundColor = .clear + + textAlignment = NSTextAlignment(rawValue: coder.decodeInteger(forKey: CodingKeys.textAlignment.rawValue)) ?? .left + contentScaleFactor = CGFloat(coder.decodeFloat(forKey: CodingKeys.contentScaleFactor.rawValue)) + text = String(coder.decodeObject(of: NSString.self, forKey: CodingKeys.text.rawValue) ?? "") + + viewSize = coder.decodeCGSize(forKey: CodingKeys.size.rawValue) + viewCenter = coder.decodeCGPoint(forKey: CodingKeys.center.rawValue) + textColor = coder.decodeObject(of: UIColor.self, forKey: CodingKeys.textColor.rawValue) + highlightColor = coder.decodeObject(of: UIColor.self, forKey: CodingKeys.highlightColor.rawValue) + + let fontName = String(coder.decodeObject(of: NSString.self, forKey: FontKeys.name.rawValue) ?? "") + let fontSize = CGFloat(coder.decodeFloat(forKey: FontKeys.fontSize.rawValue)) + font = UIFont(name: fontName, size: fontSize) } + + private enum CodingKeys: String { + case textAlignment + case contentScaleFactor + case font + case text + case size + case center + case textColor + case highlightColor + } + + private enum FontKeys: String { + case name + case fontSize + } + + override func encode(with coder: NSCoder) { + + coder.encode(textAlignment.rawValue, forKey: CodingKeys.textAlignment.rawValue) + coder.encode(Float(contentScaleFactor), forKey: CodingKeys.contentScaleFactor.rawValue) + + coder.encode(text, forKey: CodingKeys.text.rawValue) + coder.encode(viewSize, forKey: CodingKeys.size.rawValue) + coder.encode(viewCenter, forKey: CodingKeys.center.rawValue) + coder.encode(textColor, forKey: CodingKeys.textColor.rawValue) + coder.encode(highlightColor, forKey: CodingKeys.highlightColor.rawValue) + + if let font = font { + coder.encode(font.fontName, forKey: FontKeys.name.rawValue) + coder.encode(Float(font.pointSize), forKey: FontKeys.fontSize.rawValue) + } + } + override func layoutSubviews() { super.layoutSubviews() diff --git a/Classes/Preview/CameraPreviewViewController.swift b/Classes/Preview/CameraPreviewViewController.swift index 909005190..d24dbc489 100644 --- a/Classes/Preview/CameraPreviewViewController.swift +++ b/Classes/Preview/CameraPreviewViewController.swift @@ -251,30 +251,32 @@ extension CameraPreviewViewController: CameraPreviewViewDelegate { func confirmButtonPressed() { stopPlayback() showLoading() - if segments.count == 1, let firstSegment = segments.first, let image = firstSegment.image { - // If the camera mode is .stopMotion, .normal or .stitch (.video) and the `exportStopMotionPhotoAsVideo` is true, - // then single photos from that mode should still export as video. - if let cameraMode = cameraMode, cameraMode.group == .video || settings.exportStopMotionPhotoAsVideo, let videoURL = firstSegment.videoURL { - performUIUpdate { - self.delegate?.didFinishExportingVideo(url: videoURL) - self.hideLoading() - } - } - else { - performUIUpdate { - self.delegate?.didFinishExportingImage(image: image) - self.hideLoading() - } - } - } - else if settings.features.gifs, - let group = cameraMode?.group, group == .gif, segments.count == 1, let segment = segments.first, let url = segment.videoURL { - // If one GIF/Loop video was captured, export it as a GIF - GIFEncoderImageIO().encode(video: url, loopCount: 0, framesPerSecond: KanvasTimes.gifPreferredFramesPerSecond) { gifURL in - performUIUpdate { - self.delegate?.didFinishExportingFrames(url: gifURL) - self.hideLoading() - } + if segments.count == 1, let firstSegment = segments.first { + switch firstSegment { + case .image(let image, let videoURL, _, _): + // If the camera mode is .stopMotion, .normal or .stitch (.video) and the `exportStopMotionPhotoAsVideo` is true, + // then single photos from that mode should still export as video. + if let cameraMode = cameraMode, cameraMode.group == .video || settings.exportStopMotionPhotoAsVideo { + performUIUpdate { + self.delegate?.didFinishExportingVideo(url: videoURL) + self.hideLoading() + } + } else { + performUIUpdate { + self.delegate?.didFinishExportingImage(image: image) + self.hideLoading() + } + } + case .video(let videoURL, _): + // If the camera mode is .stopMotion, .normal or .stitch (.video) and the `exportStopMotionPhotoAsVideo` is true, + // then single photos from that mode should still export as video. + if settings.features.gifs, + let group = cameraMode?.group, group == .gif { + performUIUpdate { + self.delegate?.didFinishExportingVideo(url: videoURL) + self.hideLoading() + } + } } } else { diff --git a/Classes/Recording/CameraSegmentHandler.swift b/Classes/Recording/CameraSegmentHandler.swift index 63045e08c..8ba20f525 100644 --- a/Classes/Recording/CameraSegmentHandler.swift +++ b/Classes/Recording/CameraSegmentHandler.swift @@ -4,8 +4,10 @@ // file, You can obtain one at https://mozilla.org/MPL/2.0/. // +import UIKit import AVFoundation import Foundation +import CoreServices /// A container for segments public enum CameraSegment { @@ -20,6 +22,15 @@ public enum CameraSegment { } } + var isVideo: Bool { + switch self { + case .video: + return true + case .image: + return false + } + } + var videoURL: URL? { switch self { case .image(_, let url, _, _): return url @@ -45,9 +56,9 @@ public enum CameraSegment { static func defaultTimeInterval(segments: [CameraSegment]) -> TimeInterval { for media in segments { switch media { - case .image(_, _, _, _): + case .image: break - case .video(_, _): + case .video: return KanvasTimes.stopMotionFrameTimeInterval } } @@ -71,10 +82,10 @@ public enum CameraSegment { } func mediaFrame(defaultTimeInterval: TimeInterval) -> MediaFrame? { - if let image = self.image { - return (image: image, interval: self.timeInterval ?? defaultTimeInterval) - } - else { + switch self { + case .image(let image, _, let timeInterval, _): + return (image: image, interval: timeInterval ?? defaultTimeInterval) + case .video: return nil } } @@ -108,12 +119,13 @@ extension AssetsHandlerType { /// - segments: the CameraSegments /// - Returns: true if all images, false otherwise func containsOnlyImages(segments: [CameraSegment]) -> Bool { - for segment in segments { - if segment.image == nil { + return segments.contains(where: { segment in + if case .video = segment { + return true + } else { return false } - } - return true + }) == false } } @@ -312,13 +324,13 @@ final class CameraSegmentHandler: SegmentsHandlerType { var totalDuration: CMTime = CMTime.zero let allImages = containsOnlyImages(segments: segments) for segment in segments { - if let segmentURL = segment.videoURL { - let asset = AVURLAsset(url: segmentURL) + switch segment { + case .video(let url, _): + let asset = AVURLAsset(url: url) totalDuration = CMTimeAdd(totalDuration, asset.duration) - } - else if segment.image != nil { + case .image(_, _, let timeInterval, _): let duration: CMTime = { - if let timeInterval = segment.timeInterval { + if let timeInterval = timeInterval { return CMTime(seconds: timeInterval, preferredTimescale: KanvasTimes.stopMotionFrameTimescale) } else if allImages { @@ -431,23 +443,24 @@ final class CameraSegmentHandler: SegmentsHandlerType { for segment in segments { if segment.videoURL != nil { newSegments.append(segment) - continue } - guard let segmentImage = segment.image else { - assertionFailure("No video and no image?") - continue - } - dispatchGroup.enter() - self.videoQueue.async { - self.createVideoFromImage(image: segmentImage, duration: segment.timeInterval) { url in - guard let url = url else { - dispatchGroup.leave() - return - } - DispatchQueue.main.async { - newSegments.append(.image(segmentImage, url, segment.timeInterval, segment.mediaInfo)) - dispatchGroup.leave() + switch segment { + case .video: + assertionFailure("Video without a video URL") + case .image(let imageURL, _, _, _): + dispatchGroup.enter() + + self.videoQueue.async { + self.createVideoFromImage(image: imageURL, duration: segment.timeInterval) { url in + guard let url = url else { + dispatchGroup.leave() + return + } + DispatchQueue.main.async { + newSegments.append(.image(imageURL, url, segment.timeInterval, segment.mediaInfo)) + dispatchGroup.leave() + } } } } @@ -459,7 +472,7 @@ final class CameraSegmentHandler: SegmentsHandlerType { private func allImagesHaveVideo(segments: [CameraSegment]) -> Bool { for segment in segments { - if segment.image != nil && segment.videoURL == nil { + if case .image = segment, segment.videoURL == nil { return false } } @@ -540,7 +553,9 @@ final class CameraSegmentHandler: SegmentsHandlerType { assetExport.shouldOptimizeForNetworkUse = true assetExport.exportAsynchronously() { - completion(assetExport.status == .completed ? finalURL : nil, mediaInfo) + DispatchQueue.main.async { + completion(assetExport.status == .completed ? finalURL : nil, mediaInfo) + } } } diff --git a/Classes/Rendering/MediaPlayer.swift b/Classes/Rendering/MediaPlayer.swift index dfe3e31b2..cf30f8857 100644 --- a/Classes/Rendering/MediaPlayer.swift +++ b/Classes/Rendering/MediaPlayer.swift @@ -52,32 +52,35 @@ enum MediaPlayerPlaybackMode { final class MediaPlayerView: UIView, GLPixelBufferViewDelegate { weak var pixelBufferView: PixelBufferView? - - var mediaTransform: GLKMatrix4? { - didSet { - pixelBufferView?.mediaTransform = mediaTransform - } - } - - var isPortrait: Bool = true { - didSet { - pixelBufferView?.isPortrait = isPortrait - } - } weak var delegate: MediaPlayerViewDelegate? - init(metalContext: MetalContext?) { + var mediaTransform: GLKMatrix4? { + didSet { + if let metalPixelBufferView = pixelBufferView as? MetalPixelBufferView { + metalPixelBufferView.mediaTransform = mediaTransform + } + } + } + + var isPortrait: Bool = true { + didSet { + if let metalPixelBufferView = pixelBufferView as? MetalPixelBufferView { + metalPixelBufferView.isPortrait = isPortrait + } + } + } + + init(metalContext: MetalContext?=nil) { super.init(frame: .zero) let pixelBufferView: PixelBufferView & UIView + if let metalContext = metalContext { pixelBufferView = MetalPixelBufferView(context: metalContext) + } else { + pixelBufferView = GLPixelBufferView(delegate: self, mediaContentMode: .scaleAspectFill) } - else { - pixelBufferView = GLPixelBufferView(delegate: self, mediaContentMode: .scaleAspectFit) - } - pixelBufferView.add(into: self) self.pixelBufferView = pixelBufferView } @@ -199,17 +202,6 @@ final class MediaPlayer { } } - func getFrame(at index: Int) -> UIImage? { - guard index >= 0 && index < playableMedia.count else { - return nil - } - switch playableMedia[index] { - case .image(let image, _, _): - return image - case .video(_, _, _): - return nil - } - } /// Default initializer /// - Parameter renderer: Rendering instance for this player to use. diff --git a/KanvasExample/KanvasExampleTests/Camera/CameraControllerTests.swift b/KanvasExample/KanvasExampleTests/Camera/CameraControllerTests.swift index 80fa97953..660860acb 100644 --- a/KanvasExample/KanvasExampleTests/Camera/CameraControllerTests.swift +++ b/KanvasExample/KanvasExampleTests/Camera/CameraControllerTests.swift @@ -329,6 +329,10 @@ final class CameraControllerDelegateStub: CameraControllerDelegate { func cameraShouldShowWelcomeTooltip() -> Bool { return false } + + func editorDismissed(_ cameraController: CameraController) { + + } func didDismissColorSelectorTooltip() { diff --git a/KanvasExample/KanvasExampleTests/Editor/EditorControllerTests.swift b/KanvasExample/KanvasExampleTests/Editor/EditorControllerTests.swift index f950a5a63..410a6603b 100644 --- a/KanvasExample/KanvasExampleTests/Editor/EditorControllerTests.swift +++ b/KanvasExample/KanvasExampleTests/Editor/EditorControllerTests.swift @@ -339,19 +339,19 @@ final class EditorControllerDelegateStub: EditorControllerDelegate { var imageExportCompletion: (() -> Void)? var framesExportCompletion: (() -> Void)? - func didFinishExportingVideo(url: URL?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { + func didFinishExportingVideo(url: URL?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { XCTAssertNotNil(url) videoExportCalled = true videoExportCompletion?() } - func didFinishExportingImage(image: UIImage?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { + func didFinishExportingImage(image: UIImage?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { XCTAssertNotNil(image) imageExportCalled = true imageExportCompletion?() } - func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, action: KanvasExportAction, mediaChanged: Bool) { + func didFinishExportingFrames(url: URL?, size: CGSize?, info: MediaInfo?, archive: Data?, action: KanvasExportAction, mediaChanged: Bool) { XCTAssertNotNil(url) framesExportCalled = true framesExportCompletion?() diff --git a/KanvasExample/KanvasExampleTests/Editor/EditorViewTests.swift b/KanvasExample/KanvasExampleTests/Editor/EditorViewTests.swift index d2fce4d00..d6ef451c6 100644 --- a/KanvasExample/KanvasExampleTests/Editor/EditorViewTests.swift +++ b/KanvasExample/KanvasExampleTests/Editor/EditorViewTests.swift @@ -32,7 +32,8 @@ final class EditorViewTests: FBSnapshotTestCase { showBlogSwitcher: false, quickBlogSelectorCoordinator: nil, tagCollection: nil, - metalContext: nil) + metalContext: nil, + movableViewCanvas: nil) view.frame = CGRect(x: 0, y: 0, width: 320, height: 480) return view } diff --git a/KanvasExample/KanvasExampleTests/Editor/MovableViews/MovableViewCanvasTests.swift b/KanvasExample/KanvasExampleTests/Editor/MovableViews/MovableViewCanvasTests.swift index 988b0190e..994ce283c 100644 --- a/KanvasExample/KanvasExampleTests/Editor/MovableViews/MovableViewCanvasTests.swift +++ b/KanvasExample/KanvasExampleTests/Editor/MovableViews/MovableViewCanvasTests.swift @@ -32,7 +32,7 @@ final class MovableViewCanvasTests: FBSnapshotTestCase { textView.options = TextOptions(text: "Example", font: .fairwater(fontSize: 48)) let location = view.center let transformations = ViewTransformations() - view.addView(view: textView, transformations: transformations, location: location, size: view.frame.size) + view.addView(view: textView, transformations: transformations, location: location, size: view.frame.size, animated: true) FBSnapshotVerifyView(view) } diff --git a/KanvasExample/KanvasExampleTests/Utility/MediaMetadataTests.swift b/KanvasExample/KanvasExampleTests/Utility/MediaMetadataTests.swift index cfaae9385..17345917c 100644 --- a/KanvasExample/KanvasExampleTests/Utility/MediaMetadataTests.swift +++ b/KanvasExample/KanvasExampleTests/Utility/MediaMetadataTests.swift @@ -35,10 +35,13 @@ class MediaMetadataTests: XCTestCase { let segmentsHandler = CameraSegmentHandler() let recorder = CameraRecorderStub(size: CGSize(width: 300, height: 300), photoOutput: nil, videoOutput: nil, audioOutput: nil, recordingDelegate: nil, segmentsHandler: segmentsHandler, settings: settings) recorder.takePhoto(on: .photo) { image in - guard let url = CameraController.save(image: image, info: .init(source: .kanvas_camera)) else { + let info = MediaInfo(source: .kanvas_camera) + guard let data = image?.jpegData(compressionQuality: 1), + let url = try? CameraController.save(data: data, to: "kanvas-image", ext: "jpg") else { XCTFail() return } + info.write(toImage: url) let mediaInfo = MediaInfo(fromImage: url) XCTAssertEqual(mediaInfo?.source, .kanvas_camera) expectation.fulfill()