marinofaggiana 5 жил өмнө
parent
commit
ce0384066f

+ 4 - 0
Nextcloud.xcodeproj/project.pbxproj

@@ -84,6 +84,7 @@
 		F7063DEF2199E568003F38DA /* CocoaLumberjack.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F7063DEE2199E568003F38DA /* CocoaLumberjack.framework */; };
 		F7063DF12199E56F003F38DA /* CocoaLumberjackSwift.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F7063DF02199E56E003F38DA /* CocoaLumberjackSwift.framework */; };
 		F707C26521A2DC5200F6181E /* NCStoreReview.swift in Sources */ = {isa = PBXBuildFile; fileRef = F707C26421A2DC5200F6181E /* NCStoreReview.swift */; };
+		F70968A424212C4E00ED60E5 /* NCLivePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = F70968A324212C4E00ED60E5 /* NCLivePhoto.swift */; };
 		F70BFC7420E0FA7D00C67599 /* NCUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = F70BFC7320E0FA7C00C67599 /* NCUtility.swift */; };
 		F70BFC7520E0FA7D00C67599 /* NCUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = F70BFC7320E0FA7C00C67599 /* NCUtility.swift */; };
 		F70CAE3A1F8CF31A008125FD /* NCEndToEndEncryption.m in Sources */ = {isa = PBXBuildFile; fileRef = F70CAE391F8CF31A008125FD /* NCEndToEndEncryption.m */; };
@@ -691,6 +692,7 @@
 		F7063DEE2199E568003F38DA /* CocoaLumberjack.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CocoaLumberjack.framework; path = Carthage/Build/iOS/CocoaLumberjack.framework; sourceTree = "<group>"; };
 		F7063DF02199E56E003F38DA /* CocoaLumberjackSwift.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CocoaLumberjackSwift.framework; path = Carthage/Build/iOS/CocoaLumberjackSwift.framework; sourceTree = "<group>"; };
 		F707C26421A2DC5200F6181E /* NCStoreReview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NCStoreReview.swift; sourceTree = "<group>"; };
+		F70968A324212C4E00ED60E5 /* NCLivePhoto.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NCLivePhoto.swift; sourceTree = "<group>"; };
 		F70A07C8205285FB00DC1231 /* pt-PT */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "pt-PT"; path = "pt-PT.lproj/Localizable.strings"; sourceTree = "<group>"; };
 		F70BFC7320E0FA7C00C67599 /* NCUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NCUtility.swift; sourceTree = "<group>"; };
 		F70CAE381F8CF31A008125FD /* NCEndToEndEncryption.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = NCEndToEndEncryption.h; sourceTree = "<group>"; };
@@ -2280,6 +2282,7 @@
 				F76C3B881C638A4C00DC4301 /* CCError.m */,
 				F7A54C341C6267B500E2C8BF /* CCExifGeo.h */,
 				F7A54C351C6267B500E2C8BF /* CCExifGeo.m */,
+				F70968A324212C4E00ED60E5 /* NCLivePhoto.swift */,
 				F76C3B831C6388BC00DC4301 /* CCGraphics.h */,
 				F76C3B841C6388BC00DC4301 /* CCGraphics.m */,
 				F7514EDA1C7B1336008F3338 /* CCHud.h */,
@@ -3419,6 +3422,7 @@
 				F73B4EFF1F470D9100BBEE4B /* LangRussianModel.cpp in Sources */,
 				F7BAADCB1ED5A87C00B7EAD4 /* NCManageDatabase.swift in Sources */,
 				F79018B9240962C7007C9B6D /* NCViewerImageContentTransformers.swift in Sources */,
+				F70968A424212C4E00ED60E5 /* NCLivePhoto.swift in Sources */,
 				F7A321551E9E2A070069AD1B /* CCFavorites.m in Sources */,
 				F704FA5C232A343F00BBA952 /* IMImagemeterViewer.swift in Sources */,
 				F73B4F031F470D9100BBEE4B /* LangVietnameseModel.cpp in Sources */,

+ 23 - 8
iOSClient/Main/Menu/NCDetailNavigationController+Menu.swift

@@ -78,15 +78,21 @@ extension NCDetailNavigationController {
             )
         }
         
-        actions.append(
-            NCMenuAction(title: NSLocalizedString("_close_", comment: ""),
-                icon: CCGraphics.changeThemingColorImage(UIImage(named: "exit"), width: 50, height: 50, color: NCBrandColor.sharedInstance.icon),
-                action: { menuAction in
-                    self.appDelegate.activeDetail.viewUnload()
+        if appDelegate.activeDetail.viewerImageViewController != nil {
+            if let metadataMov = appDelegate.activeDetail.hasMOV(metadata: metadata) {
+                if CCUtility.fileProviderStorageSize(metadata.ocId, fileNameView: metadata.fileNameView) > 0 && CCUtility.fileProviderStorageSize(metadataMov.ocId, fileNameView: metadataMov.fileNameView) > 0 {
+                    actions.append(
+                        NCMenuAction(title: NSLocalizedString("_xxxxxxx_", comment: ""),
+                            icon: CCGraphics.changeThemingColorImage(UIImage(named: "livePhoto"), width: 50, height: 50, color: NCBrandColor.sharedInstance.icon),
+                            action: { menuAction in
+                                self.appDelegate.activeDetail.saveLivePhoto(metadata: metadata, metadataMov: metadataMov)
+                            }
+                        )
+                    )
                 }
-            )
-        )
-        
+            }
+        }
+                
         if CCUtility.isDocumentModifiableExtension(fileNameExtension) && (directEditingCreators == nil || !appDelegate.reachability.isReachable()) {
             actions.append(
                 NCMenuAction(title: NSLocalizedString("_internal_modify_", comment: ""),
@@ -105,6 +111,15 @@ extension NCDetailNavigationController {
             )
         }
         
+        actions.append(
+            NCMenuAction(title: NSLocalizedString("_close_", comment: ""),
+                icon: CCGraphics.changeThemingColorImage(UIImage(named: "exit"), width: 50, height: 50, color: NCBrandColor.sharedInstance.icon),
+                action: { menuAction in
+                    self.appDelegate.activeDetail.viewUnload()
+                }
+            )
+        )
+        
         return actions
     }
 

+ 21 - 0
iOSClient/Main/NCDetailViewController.swift

@@ -700,6 +700,27 @@ extension NCDetailViewController: NCViewerImageViewControllerDelegate, NCViewerI
         appDelegate.startLoadAutoDownloadUpload()
     }
     
+    func saveLivePhoto(metadata: tableMetadata, metadataMov: tableMetadata) {
+        
+        let fileNameImage = URL(fileURLWithPath: CCUtility.getDirectoryProviderStorageOcId(metadata.ocId, fileNameView: metadata.fileNameView)!)
+        let fileNameMov = URL(fileURLWithPath: CCUtility.getDirectoryProviderStorageOcId(metadataMov.ocId, fileNameView: metadataMov.fileNameView)!)
+        
+        NCLivePhoto.generate(from: fileNameImage, videoURL: fileNameMov, progress: { progress in
+            self.progress(Float(progress))
+        }, completion: { livePhoto, resources in
+            self.progress(0)
+            if resources != nil {
+                NCLivePhoto.saveToLibrary(resources!) { (result) in
+                    if !result {
+                        NCContentPresenter.shared.messageNotification("_error_", description: "errorMessage", delay: TimeInterval(k_dismissAfterSecond), type: NCContentPresenter.messageType.error, errorCode: Int(k_CCErrorInternalError))
+                    }
+                }
+            } else {
+                NCContentPresenter.shared.messageNotification("_error_", description: "errorMessage", delay: TimeInterval(k_dismissAfterSecond), type: NCContentPresenter.messageType.error, errorCode: Int(k_CCErrorInternalError))
+            }
+        })
+    }
+    
     func statusViewImage(metadata: tableMetadata, viewerImageViewController: NCViewerImageViewController) {
         
         var colorStatus: UIColor = UIColor.white.withAlphaComponent(0.8)

+ 473 - 0
iOSClient/Utility/NCLivePhoto.swift

@@ -0,0 +1,473 @@
+//
+//  LivePhoto.swift
+//  NCLivePhoto
+//
+//  Created by Alexander Pagliaro on 7/25/18.
+//  Copyright © 2018 Limit Point LLC. All rights reserved.
+//
+
+import UIKit
+import AVFoundation
+import MobileCoreServices
+import Photos
+
+class NCLivePhoto {
+    
+    // MARK: PUBLIC
+    typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
+    /// Returns the paired image and video for the given PHLivePhoto
+    public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
+        queue.async {
+            shared.extractResources(from: livePhoto, completion: completion)
+        }
+    }
+    /// Generates a PHLivePhoto from an image and video.  Also returns the paired image and video.
+    public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
+        queue.async {
+            shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
+        }
+    }
+    /// Save a Live Photo to the Photo Library by passing the paired image and video.
+    public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
+        PHPhotoLibrary.shared().performChanges({
+            let creationRequest = PHAssetCreationRequest.forAsset()
+            let options = PHAssetResourceCreationOptions()
+            creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
+            creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
+        }, completionHandler: { (success, error) in
+            if error != nil {
+                print(error as Any)
+            }
+            completion(success)
+        })
+    }
+    
+    // MARK: PRIVATE
+    private static let shared = NCLivePhoto()
+    private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
+    lazy private var cacheDirectory: URL? = {
+        if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
+            let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
+            if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
+                try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
+            }
+            return fullDirectory
+        }
+        return nil
+    }()
+    
+    deinit {
+        clearCache()
+    }
+    
+    private func generateKeyPhoto(from videoURL: URL) -> URL? {
+        var percent:Float = 0.5
+        let videoAsset = AVURLAsset(url: videoURL)
+        if let stillImageTime = videoAsset.stillImageTime() {
+            percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
+        }
+        guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
+        guard let jpegData = imageFrame.jpegData(compressionQuality: 1) else { return nil }
+        guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
+        do {
+            try? jpegData.write(to: url)
+            return url
+        }
+    }
+    private func clearCache() {
+        if let cacheDirectory = cacheDirectory {
+            try? FileManager.default.removeItem(at: cacheDirectory)
+        }
+    }
+    
+    private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
+        guard let cacheDirectory = cacheDirectory else {
+            DispatchQueue.main.async {
+                completion(nil, nil)
+            }
+            return
+        }
+        let assetIdentifier = UUID().uuidString
+        let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
+        guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
+            DispatchQueue.main.async {
+                completion(nil, nil)
+            }
+            return
+        }
+        addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (_videoURL) in
+            if let pairedVideoURL = _videoURL {
+                _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in
+                    if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
+                        return
+                    }
+                    DispatchQueue.main.async {
+                        completion(livePhoto, (pairedImageURL, pairedVideoURL))
+                    }
+                })
+            } else {
+                DispatchQueue.main.async {
+                    completion(nil, nil)
+                }
+            }
+        }
+    }
+    
+    private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
+        let assetResources = PHAssetResource.assetResources(for: livePhoto)
+        let group = DispatchGroup()
+        var keyPhotoURL: URL?
+        var videoURL: URL?
+        for resource in assetResources {
+            let buffer = NSMutableData()
+            let options = PHAssetResourceRequestOptions()
+            options.isNetworkAccessAllowed = true
+            group.enter()
+            PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in
+                buffer.append(data)
+            }) { (error) in
+                if error == nil {
+                    if resource.type == .pairedVideo {
+                        videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
+                    } else {
+                        keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
+                    }
+                } else {
+                    print(error as Any)
+                }
+                group.leave()
+            }
+        }
+        group.notify(queue: DispatchQueue.main) {
+            guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
+                completion(nil)
+                return
+            }
+            completion((pairedPhotoURL, pairedVideoURL))
+        }
+    }
+    
+    private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
+        if let cacheDirectory = cacheDirectory {
+            extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
+        }
+    }
+    
+    private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
+        let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,kUTTagClassFilenameExtension)?.takeRetainedValue()
+        
+        guard let ext = fileExtension else {
+            return nil
+        }
+        
+        var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
+        fileUrl = fileUrl.appendingPathExtension(ext as String)
+        
+        do {
+            try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
+        } catch {
+            print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
+            return nil
+        }
+        
+        return fileUrl
+    }
+    
+    func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
+        guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
+            let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
+            var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { return nil }
+        let assetIdentifierKey = "17"
+        let assetIdentifierInfo = [assetIdentifierKey : assetIdentifier]
+        imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
+        CGImageDestinationAddImageFromSource(imageDestination, imageSource, 0, imageProperties as CFDictionary)
+        CGImageDestinationFinalize(imageDestination)
+        return destinationURL
+    }
+    
+    var audioReader: AVAssetReader?
+    var videoReader: AVAssetReader?
+    var assetWriter: AVAssetWriter?
+    
+    func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
+        
+        var audioWriterInput: AVAssetWriterInput?
+        var audioReaderOutput: AVAssetReaderOutput?
+        let videoAsset = AVURLAsset(url: videoURL)
+        let frameCount = videoAsset.countFrames(exact: false)
+        guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
+            completion(nil)
+            return
+        }
+        do {
+            // Create the Asset Writer
+            assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
+            // Create Video Reader Output
+            videoReader = try AVAssetReader(asset: videoAsset)
+            let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
+            let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
+            videoReader?.add(videoReaderOutput)
+            // Create Video Writer Input
+            let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : videoTrack.naturalSize.width, AVVideoHeightKey : videoTrack.naturalSize.height])
+            videoWriterInput.transform = videoTrack.preferredTransform
+            videoWriterInput.expectsMediaDataInRealTime = true
+            assetWriter?.add(videoWriterInput)
+            // Create Audio Reader Output & Writer Input
+            if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
+                do {
+                    let _audioReader = try AVAssetReader(asset: videoAsset)
+                    let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
+                    _audioReader.add(_audioReaderOutput)
+                    audioReader = _audioReader
+                    audioReaderOutput = _audioReaderOutput
+                    let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
+                    _audioWriterInput.expectsMediaDataInRealTime = false
+                    assetWriter?.add(_audioWriterInput)
+                    audioWriterInput = _audioWriterInput
+                } catch {
+                    print(error)
+                }
+            }
+            // Create necessary identifier metadata and still image time metadata
+            let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
+            let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
+            assetWriter?.metadata = [assetIdentifierMetadata]
+            assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
+            // Start the Asset Writer
+            assetWriter?.startWriting()
+            assetWriter?.startSession(atSourceTime: CMTime.zero)
+            // Add still image metadata
+            let _stillImagePercent: Float = 0.5
+            stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()],timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
+            // For end of writing / progress
+            var writingVideoFinished = false
+            var writingAudioFinished = false
+            var currentFrameCount = 0
+            func didCompleteWriting() {
+                guard writingAudioFinished && writingVideoFinished else { return }
+                assetWriter?.finishWriting {
+                    if self.assetWriter?.status == .completed {
+                        completion(destinationURL)
+                    } else {
+                        completion(nil)
+                    }
+                }
+            }
+            // Start writing video
+            if videoReader?.startReading() ?? false {
+                videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
+                    while videoWriterInput.isReadyForMoreMediaData {
+                        if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()  {
+                            currentFrameCount += 1
+                            let percent:CGFloat = CGFloat(currentFrameCount)/CGFloat(frameCount)
+                            progress(percent)
+                            if !videoWriterInput.append(sampleBuffer) {
+                                print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
+                                self.videoReader?.cancelReading()
+                            }
+                        } else {
+                            videoWriterInput.markAsFinished()
+                            writingVideoFinished = true
+                            didCompleteWriting()
+                        }
+                    }
+                }
+            } else {
+                writingVideoFinished = true
+                didCompleteWriting()
+            }
+            // Start writing audio
+            if audioReader?.startReading() ?? false {
+                audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
+                    while audioWriterInput?.isReadyForMoreMediaData ?? false {
+                        guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
+                            audioWriterInput?.markAsFinished()
+                            writingAudioFinished = true
+                            didCompleteWriting()
+                            return
+                        }
+                        audioWriterInput?.append(sampleBuffer)
+                    }
+                }
+            } else {
+                writingAudioFinished = true
+                didCompleteWriting()
+            }
+        } catch {
+            print(error)
+            completion(nil)
+        }
+    }
+    
+    private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
+        let item = AVMutableMetadataItem()
+        let keyContentIdentifier =  "com.apple.quicktime.content.identifier"
+        let keySpaceQuickTimeMetadata = "mdta"
+        item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
+        item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
+        item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
+        item.dataType = "com.apple.metadata.datatype.UTF-8"
+        return item
+    }
+    
+    private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
+        let keyStillImageTime = "com.apple.quicktime.still-image-time"
+        let keySpaceQuickTimeMetadata = "mdta"
+        let spec : NSDictionary = [
+            kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
+            "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
+            kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
+            "com.apple.metadata.datatype.int8"            ]
+        var desc : CMFormatDescription? = nil
+        CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
+        let input = AVAssetWriterInput(mediaType: .metadata,
+                                       outputSettings: nil, sourceFormatHint: desc)
+        return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
+    }
+    
+    private func metadataItemForStillImageTime() -> AVMetadataItem {
+        let item = AVMutableMetadataItem()
+        let keyStillImageTime = "com.apple.quicktime.still-image-time"
+        let keySpaceQuickTimeMetadata = "mdta"
+        item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
+        item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
+        item.value = 0 as (NSCopying & NSObjectProtocol)?
+        item.dataType = "com.apple.metadata.datatype.int8"
+        return item
+    }
+    
+}
+
+fileprivate extension AVAsset {
+    func countFrames(exact:Bool) -> Int {
+        
+        var frameCount = 0
+        
+        if let videoReader = try? AVAssetReader(asset: self)  {
+            
+            if let videoTrack = self.tracks(withMediaType: .video).first {
+                
+                frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
+                
+                
+                if exact {
+                    
+                    frameCount = 0
+                    
+                    let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
+                    videoReader.add(videoReaderOutput)
+                    
+                    videoReader.startReading()
+                    
+                    // count frames
+                    while true {
+                        let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
+                        if sampleBuffer == nil {
+                            break
+                        }
+                        frameCount += 1
+                    }
+                    
+                    videoReader.cancelReading()
+                }
+                
+                
+            }
+        }
+        
+        return frameCount
+    }
+    
+    func stillImageTime() -> CMTime?  {
+        
+        var stillTime:CMTime? = nil
+        
+        if let videoReader = try? AVAssetReader(asset: self)  {
+            
+            if let metadataTrack = self.tracks(withMediaType: .metadata).first {
+                
+                let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
+                
+                videoReader.add(videoReaderOutput)
+                
+                videoReader.startReading()
+                
+                let keyStillImageTime = "com.apple.quicktime.still-image-time"
+                let keySpaceQuickTimeMetadata = "mdta"
+                
+                var found = false
+                
+                while found == false {
+                    if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
+                        if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
+                            let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
+                            for item in group?.items ?? [] {
+                                if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
+                                    stillTime = group?.timeRange.start
+                                    //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
+                                    found = true
+                                    break
+                                }
+                            }
+                        }
+                    }
+                    else {
+                        break;
+                    }
+                }
+                
+                videoReader.cancelReading()
+                
+            }
+        }
+        
+        return stillTime
+    }
+    
+    func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
+        
+        var time = self.duration
+        
+        var frameCount = inFrameCount
+        
+        if frameCount == 0 {
+            frameCount = self.countFrames(exact: true)
+        }
+        
+        let frameDuration = Int64(Float(time.value) / Float(frameCount))
+        
+        time.value = Int64(Float(time.value) * percent)
+        
+        //print("stillImageTime = \(CMTimeGetSeconds(time))")
+        
+        return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
+    }
+    
+    func getAssetFrame(percent:Float) -> UIImage?
+    {
+        
+        let imageGenerator = AVAssetImageGenerator(asset: self)
+        imageGenerator.appliesPreferredTrackTransform = true
+        
+        imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 100)
+        imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 100)
+        
+        var time = self.duration
+        
+        time.value = Int64(Float(time.value) * percent)
+        
+        do {
+            var actualTime = CMTime.zero
+            let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
+            
+            let img = UIImage(cgImage: imageRef)
+            
+            return img
+        }
+        catch let error as NSError
+        {
+            print("Image generation failed with error \(error)")
+            return nil
+        }
+    }
+}