NCLivePhoto.swift 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471
  1. //
  2. // LivePhoto.swift
  3. // NCLivePhoto
  4. //
  5. // Created by Alexander Pagliaro on 7/25/18.
  6. // Copyright © 2018 Limit Point LLC. All rights reserved.
  7. //
  8. import UIKit
  9. import AVFoundation
  10. import MobileCoreServices
  11. import Photos
  12. class NCLivePhoto {
  13. // MARK: PUBLIC
  14. typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
  15. /// Returns the paired image and video for the given PHLivePhoto
  16. public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  17. queue.async {
  18. shared.extractResources(from: livePhoto, completion: completion)
  19. }
  20. }
  21. /// Generates a PHLivePhoto from an image and video. Also returns the paired image and video.
  22. public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  23. queue.async {
  24. shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
  25. }
  26. }
  27. /// Save a Live Photo to the Photo Library by passing the paired image and video.
  28. public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
  29. PHPhotoLibrary.shared().performChanges({
  30. let creationRequest = PHAssetCreationRequest.forAsset()
  31. let options = PHAssetResourceCreationOptions()
  32. creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
  33. creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
  34. }, completionHandler: { (success, error) in
  35. if error != nil {
  36. print(error as Any)
  37. }
  38. completion(success)
  39. })
  40. }
  41. // MARK: PRIVATE
  42. private static let shared = NCLivePhoto()
  43. private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
  44. lazy private var cacheDirectory: URL? = {
  45. if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
  46. let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
  47. if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
  48. try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
  49. }
  50. return fullDirectory
  51. }
  52. return nil
  53. }()
  54. deinit {
  55. clearCache()
  56. }
  57. private func generateKeyPhoto(from videoURL: URL) -> URL? {
  58. var percent:Float = 0.5
  59. let videoAsset = AVURLAsset(url: videoURL)
  60. if let stillImageTime = videoAsset.stillImageTime() {
  61. percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
  62. }
  63. guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
  64. guard let jpegData = imageFrame.jpegData(compressionQuality: 1) else { return nil }
  65. guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
  66. do {
  67. try? jpegData.write(to: url)
  68. return url
  69. }
  70. }
  71. private func clearCache() {
  72. if let cacheDirectory = cacheDirectory {
  73. try? FileManager.default.removeItem(at: cacheDirectory)
  74. }
  75. }
  76. private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  77. guard let cacheDirectory = cacheDirectory else {
  78. DispatchQueue.main.async {
  79. completion(nil, nil)
  80. }
  81. return
  82. }
  83. let assetIdentifier = UUID().uuidString
  84. let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
  85. guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
  86. DispatchQueue.main.async {
  87. completion(nil, nil)
  88. }
  89. return
  90. }
  91. addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (_videoURL) in
  92. if let pairedVideoURL = _videoURL {
  93. _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in
  94. if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
  95. return
  96. }
  97. DispatchQueue.main.async {
  98. completion(livePhoto, (pairedImageURL, pairedVideoURL))
  99. }
  100. })
  101. } else {
  102. DispatchQueue.main.async {
  103. completion(nil, nil)
  104. }
  105. }
  106. }
  107. }
  108. private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
  109. let assetResources = PHAssetResource.assetResources(for: livePhoto)
  110. let group = DispatchGroup()
  111. var keyPhotoURL: URL?
  112. var videoURL: URL?
  113. for resource in assetResources {
  114. let buffer = NSMutableData()
  115. let options = PHAssetResourceRequestOptions()
  116. options.isNetworkAccessAllowed = true
  117. group.enter()
  118. PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in
  119. buffer.append(data)
  120. }) { (error) in
  121. if error == nil {
  122. if resource.type == .pairedVideo {
  123. videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  124. } else {
  125. keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  126. }
  127. } else {
  128. print(error as Any)
  129. }
  130. group.leave()
  131. }
  132. }
  133. group.notify(queue: DispatchQueue.main) {
  134. guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
  135. return completion(nil)
  136. }
  137. completion((pairedPhotoURL, pairedVideoURL))
  138. }
  139. }
  140. private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  141. if let cacheDirectory = cacheDirectory {
  142. extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
  143. }
  144. }
  145. private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
  146. let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,kUTTagClassFilenameExtension)?.takeRetainedValue()
  147. guard let ext = fileExtension else {
  148. return nil
  149. }
  150. var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
  151. fileUrl = fileUrl.appendingPathExtension(ext as String)
  152. do {
  153. try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
  154. } catch {
  155. print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
  156. return nil
  157. }
  158. return fileUrl
  159. }
  160. func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
  161. guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
  162. let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
  163. var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { return nil }
  164. let assetIdentifierKey = "17"
  165. let assetIdentifierInfo = [assetIdentifierKey : assetIdentifier]
  166. imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
  167. CGImageDestinationAddImageFromSource(imageDestination, imageSource, 0, imageProperties as CFDictionary)
  168. CGImageDestinationFinalize(imageDestination)
  169. return destinationURL
  170. }
  171. var audioReader: AVAssetReader?
  172. var videoReader: AVAssetReader?
  173. var assetWriter: AVAssetWriter?
  174. func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
  175. var audioWriterInput: AVAssetWriterInput?
  176. var audioReaderOutput: AVAssetReaderOutput?
  177. let videoAsset = AVURLAsset(url: videoURL)
  178. let frameCount = videoAsset.countFrames(exact: false)
  179. guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
  180. return completion(nil)
  181. }
  182. do {
  183. // Create the Asset Writer
  184. assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
  185. // Create Video Reader Output
  186. videoReader = try AVAssetReader(asset: videoAsset)
  187. let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
  188. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
  189. videoReader?.add(videoReaderOutput)
  190. // Create Video Writer Input
  191. let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : videoTrack.naturalSize.width, AVVideoHeightKey : videoTrack.naturalSize.height])
  192. videoWriterInput.transform = videoTrack.preferredTransform
  193. videoWriterInput.expectsMediaDataInRealTime = true
  194. assetWriter?.add(videoWriterInput)
  195. // Create Audio Reader Output & Writer Input
  196. if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
  197. do {
  198. let _audioReader = try AVAssetReader(asset: videoAsset)
  199. let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
  200. _audioReader.add(_audioReaderOutput)
  201. audioReader = _audioReader
  202. audioReaderOutput = _audioReaderOutput
  203. let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
  204. _audioWriterInput.expectsMediaDataInRealTime = false
  205. assetWriter?.add(_audioWriterInput)
  206. audioWriterInput = _audioWriterInput
  207. } catch {
  208. print(error)
  209. }
  210. }
  211. // Create necessary identifier metadata and still image time metadata
  212. let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
  213. let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
  214. assetWriter?.metadata = [assetIdentifierMetadata]
  215. assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
  216. // Start the Asset Writer
  217. assetWriter?.startWriting()
  218. assetWriter?.startSession(atSourceTime: CMTime.zero)
  219. // Add still image metadata
  220. let _stillImagePercent: Float = 0.5
  221. stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()],timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
  222. // For end of writing / progress
  223. var writingVideoFinished = false
  224. var writingAudioFinished = false
  225. var currentFrameCount = 0
  226. func didCompleteWriting() {
  227. guard writingAudioFinished && writingVideoFinished else { return }
  228. assetWriter?.finishWriting {
  229. if self.assetWriter?.status == .completed {
  230. completion(destinationURL)
  231. } else {
  232. completion(nil)
  233. }
  234. }
  235. }
  236. // Start writing video
  237. if videoReader?.startReading() ?? false {
  238. videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
  239. while videoWriterInput.isReadyForMoreMediaData {
  240. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  241. currentFrameCount += 1
  242. let percent:CGFloat = CGFloat(currentFrameCount)/CGFloat(frameCount)
  243. progress(percent)
  244. if !videoWriterInput.append(sampleBuffer) {
  245. print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
  246. self.videoReader?.cancelReading()
  247. }
  248. } else {
  249. videoWriterInput.markAsFinished()
  250. writingVideoFinished = true
  251. didCompleteWriting()
  252. }
  253. }
  254. }
  255. } else {
  256. writingVideoFinished = true
  257. didCompleteWriting()
  258. }
  259. // Start writing audio
  260. if audioReader?.startReading() ?? false {
  261. audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
  262. while audioWriterInput?.isReadyForMoreMediaData ?? false {
  263. guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
  264. audioWriterInput?.markAsFinished()
  265. writingAudioFinished = true
  266. didCompleteWriting()
  267. return
  268. }
  269. audioWriterInput?.append(sampleBuffer)
  270. }
  271. }
  272. } else {
  273. writingAudioFinished = true
  274. didCompleteWriting()
  275. }
  276. } catch {
  277. print(error)
  278. completion(nil)
  279. }
  280. }
  281. private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
  282. let item = AVMutableMetadataItem()
  283. let keyContentIdentifier = "com.apple.quicktime.content.identifier"
  284. let keySpaceQuickTimeMetadata = "mdta"
  285. item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
  286. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  287. item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
  288. item.dataType = "com.apple.metadata.datatype.UTF-8"
  289. return item
  290. }
  291. private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
  292. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  293. let keySpaceQuickTimeMetadata = "mdta"
  294. let spec : NSDictionary = [
  295. kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
  296. "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
  297. kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
  298. "com.apple.metadata.datatype.int8" ]
  299. var desc : CMFormatDescription? = nil
  300. CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
  301. let input = AVAssetWriterInput(mediaType: .metadata,
  302. outputSettings: nil, sourceFormatHint: desc)
  303. return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
  304. }
  305. private func metadataItemForStillImageTime() -> AVMetadataItem {
  306. let item = AVMutableMetadataItem()
  307. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  308. let keySpaceQuickTimeMetadata = "mdta"
  309. item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
  310. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  311. item.value = 0 as (NSCopying & NSObjectProtocol)?
  312. item.dataType = "com.apple.metadata.datatype.int8"
  313. return item
  314. }
  315. }
  316. fileprivate extension AVAsset {
  317. func countFrames(exact:Bool) -> Int {
  318. var frameCount = 0
  319. if let videoReader = try? AVAssetReader(asset: self) {
  320. if let videoTrack = self.tracks(withMediaType: .video).first {
  321. frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
  322. if exact {
  323. frameCount = 0
  324. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
  325. videoReader.add(videoReaderOutput)
  326. videoReader.startReading()
  327. // count frames
  328. while true {
  329. let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
  330. if sampleBuffer == nil {
  331. break
  332. }
  333. frameCount += 1
  334. }
  335. videoReader.cancelReading()
  336. }
  337. }
  338. }
  339. return frameCount
  340. }
  341. func stillImageTime() -> CMTime? {
  342. var stillTime:CMTime? = nil
  343. if let videoReader = try? AVAssetReader(asset: self) {
  344. if let metadataTrack = self.tracks(withMediaType: .metadata).first {
  345. let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
  346. videoReader.add(videoReaderOutput)
  347. videoReader.startReading()
  348. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  349. let keySpaceQuickTimeMetadata = "mdta"
  350. var found = false
  351. while found == false {
  352. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  353. if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
  354. let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
  355. for item in group?.items ?? [] {
  356. if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
  357. stillTime = group?.timeRange.start
  358. //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
  359. found = true
  360. break
  361. }
  362. }
  363. }
  364. }
  365. else {
  366. break;
  367. }
  368. }
  369. videoReader.cancelReading()
  370. }
  371. }
  372. return stillTime
  373. }
  374. func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
  375. var time = self.duration
  376. var frameCount = inFrameCount
  377. if frameCount == 0 {
  378. frameCount = self.countFrames(exact: true)
  379. }
  380. let frameDuration = Int64(Float(time.value) / Float(frameCount))
  381. time.value = Int64(Float(time.value) * percent)
  382. //print("stillImageTime = \(CMTimeGetSeconds(time))")
  383. return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
  384. }
  385. func getAssetFrame(percent:Float) -> UIImage?
  386. {
  387. let imageGenerator = AVAssetImageGenerator(asset: self)
  388. imageGenerator.appliesPreferredTrackTransform = true
  389. imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 100)
  390. imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 100)
  391. var time = self.duration
  392. time.value = Int64(Float(time.value) * percent)
  393. do {
  394. var actualTime = CMTime.zero
  395. let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
  396. let img = UIImage(cgImage: imageRef)
  397. return img
  398. }
  399. catch let error as NSError
  400. {
  401. print("Image generation failed with error \(error)")
  402. return nil
  403. }
  404. }
  405. }