NCLivePhoto.swift 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522
  1. //
  2. // LivePhoto.swift
  3. // NCLivePhoto
  4. //
  5. // Created by Alexander Pagliaro on 7/25/18.
  6. // Copyright © 2018 Limit Point LLC. All rights reserved.
  7. //
  8. import UIKit
  9. import AVFoundation
  10. import MobileCoreServices
  11. import Photos
  12. import NextcloudKit
  13. import UniformTypeIdentifiers
  14. class NCLivePhoto {
  15. var livePhotoFile = ""
  16. var livePhotoFile2 = ""
  17. // MARK: PUBLIC
  18. typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
  19. /// Returns the paired image and video for the given PHLivePhoto
  20. public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  21. queue.async {
  22. shared.extractResources(from: livePhoto, completion: completion)
  23. }
  24. }
  25. /// Generates a PHLivePhoto from an image and video. Also returns the paired image and video.
  26. public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  27. queue.async {
  28. shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
  29. }
  30. }
  31. /// Save a Live Photo to the Photo Library by passing the paired image and video.
  32. public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
  33. PHPhotoLibrary.shared().performChanges({
  34. let creationRequest = PHAssetCreationRequest.forAsset()
  35. let options = PHAssetResourceCreationOptions()
  36. creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
  37. creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
  38. }, completionHandler: { success, error in
  39. if error != nil {
  40. print(error as Any)
  41. }
  42. completion(success)
  43. })
  44. }
  45. // MARK: PRIVATE
  46. private static let shared = NCLivePhoto()
  47. private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
  48. lazy private var cacheDirectory: URL? = {
  49. if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
  50. let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
  51. if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
  52. try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
  53. }
  54. return fullDirectory
  55. }
  56. return nil
  57. }()
  58. deinit {
  59. clearCache()
  60. }
  61. private func generateKeyPhoto(from videoURL: URL) -> URL? {
  62. var percent: Float = 0.5
  63. let videoAsset = AVURLAsset(url: videoURL)
  64. if let stillImageTime = videoAsset.stillImageTime() {
  65. percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
  66. }
  67. guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
  68. guard let jpegData = imageFrame.jpegData(compressionQuality: 1) else { return nil }
  69. guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
  70. do {
  71. try? jpegData.write(to: url)
  72. return url
  73. }
  74. }
  75. private func clearCache() {
  76. if let cacheDirectory = cacheDirectory {
  77. try? FileManager.default.removeItem(at: cacheDirectory)
  78. }
  79. }
  80. private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  81. guard let cacheDirectory = cacheDirectory else {
  82. DispatchQueue.main.async {
  83. completion(nil, nil)
  84. }
  85. return
  86. }
  87. let assetIdentifier = UUID().uuidString
  88. let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
  89. guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
  90. DispatchQueue.main.async {
  91. completion(nil, nil)
  92. }
  93. return
  94. }
  95. addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { _videoURL in
  96. if let pairedVideoURL = _videoURL {
  97. _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable: Any]) -> Void in
  98. if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
  99. return
  100. }
  101. DispatchQueue.main.async {
  102. completion(livePhoto, (pairedImageURL, pairedVideoURL))
  103. }
  104. })
  105. } else {
  106. DispatchQueue.main.async {
  107. completion(nil, nil)
  108. }
  109. }
  110. }
  111. }
  112. private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
  113. let assetResources = PHAssetResource.assetResources(for: livePhoto)
  114. let group = DispatchGroup()
  115. var keyPhotoURL: URL?
  116. var videoURL: URL?
  117. for resource in assetResources {
  118. let buffer = NSMutableData()
  119. let options = PHAssetResourceRequestOptions()
  120. options.isNetworkAccessAllowed = true
  121. group.enter()
  122. PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { data in
  123. buffer.append(data)
  124. }) { error in
  125. if error == nil {
  126. if resource.type == .pairedVideo {
  127. videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  128. } else {
  129. keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  130. }
  131. } else {
  132. print(error as Any)
  133. }
  134. group.leave()
  135. }
  136. }
  137. group.notify(queue: DispatchQueue.main) {
  138. guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
  139. return completion(nil)
  140. }
  141. completion((pairedPhotoURL, pairedVideoURL))
  142. }
  143. }
  144. private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  145. if let cacheDirectory = cacheDirectory {
  146. extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
  147. }
  148. }
  149. private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
  150. guard let ext = UTType(tag: resource.uniformTypeIdentifier, tagClass: .filenameExtension, conformingTo: nil)?.identifier else { return nil }
  151. var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
  152. fileUrl = fileUrl.appendingPathExtension(ext as String)
  153. do {
  154. try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
  155. } catch {
  156. print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
  157. return nil
  158. }
  159. return fileUrl
  160. }
  161. func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
  162. guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, UTType.jpeg.identifier as CFString, 1, nil),
  163. let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
  164. var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable: Any] else { return nil }
  165. let assetIdentifierKey = "17"
  166. let assetIdentifierInfo = [assetIdentifierKey: assetIdentifier]
  167. imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
  168. CGImageDestinationAddImageFromSource(imageDestination, imageSource, 0, imageProperties as CFDictionary)
  169. CGImageDestinationFinalize(imageDestination)
  170. return destinationURL
  171. }
  172. var audioReader: AVAssetReader?
  173. var videoReader: AVAssetReader?
  174. var assetWriter: AVAssetWriter?
  175. func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
  176. var audioWriterInput: AVAssetWriterInput?
  177. var audioReaderOutput: AVAssetReaderOutput?
  178. let videoAsset = AVURLAsset(url: videoURL)
  179. let frameCount = videoAsset.countFrames(exact: false)
  180. guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
  181. return completion(nil)
  182. }
  183. do {
  184. // Create the Asset Writer
  185. assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
  186. // Create Video Reader Output
  187. videoReader = try AVAssetReader(asset: videoAsset)
  188. let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
  189. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
  190. videoReader?.add(videoReaderOutput)
  191. // Create Video Writer Input
  192. let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: videoTrack.naturalSize.width, AVVideoHeightKey: videoTrack.naturalSize.height])
  193. videoWriterInput.transform = videoTrack.preferredTransform
  194. videoWriterInput.expectsMediaDataInRealTime = true
  195. assetWriter?.add(videoWriterInput)
  196. // Create Audio Reader Output & Writer Input
  197. if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
  198. do {
  199. let _audioReader = try AVAssetReader(asset: videoAsset)
  200. let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
  201. _audioReader.add(_audioReaderOutput)
  202. audioReader = _audioReader
  203. audioReaderOutput = _audioReaderOutput
  204. let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
  205. _audioWriterInput.expectsMediaDataInRealTime = false
  206. assetWriter?.add(_audioWriterInput)
  207. audioWriterInput = _audioWriterInput
  208. } catch {
  209. print(error)
  210. }
  211. }
  212. // Create necessary identifier metadata and still image time metadata
  213. let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
  214. let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
  215. assetWriter?.metadata = [assetIdentifierMetadata]
  216. assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
  217. // Start the Asset Writer
  218. assetWriter?.startWriting()
  219. assetWriter?.startSession(atSourceTime: CMTime.zero)
  220. // Add still image metadata
  221. let _stillImagePercent: Float = 0.5
  222. stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()], timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
  223. // For end of writing / progress
  224. var writingVideoFinished = false
  225. var writingAudioFinished = false
  226. var currentFrameCount = 0
  227. func didCompleteWriting() {
  228. guard writingAudioFinished && writingVideoFinished else { return }
  229. assetWriter?.finishWriting {
  230. if self.assetWriter?.status == .completed {
  231. completion(destinationURL)
  232. } else {
  233. completion(nil)
  234. }
  235. }
  236. }
  237. // Start writing video
  238. if videoReader?.startReading() ?? false {
  239. videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
  240. while videoWriterInput.isReadyForMoreMediaData {
  241. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  242. currentFrameCount += 1
  243. let percent: CGFloat = CGFloat(currentFrameCount) / CGFloat(frameCount)
  244. progress(percent)
  245. if !videoWriterInput.append(sampleBuffer) {
  246. print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
  247. self.videoReader?.cancelReading()
  248. }
  249. } else {
  250. videoWriterInput.markAsFinished()
  251. writingVideoFinished = true
  252. didCompleteWriting()
  253. }
  254. }
  255. }
  256. } else {
  257. writingVideoFinished = true
  258. didCompleteWriting()
  259. }
  260. // Start writing audio
  261. if audioReader?.startReading() ?? false {
  262. audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
  263. while audioWriterInput?.isReadyForMoreMediaData ?? false {
  264. guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
  265. audioWriterInput?.markAsFinished()
  266. writingAudioFinished = true
  267. didCompleteWriting()
  268. return
  269. }
  270. audioWriterInput?.append(sampleBuffer)
  271. }
  272. }
  273. } else {
  274. writingAudioFinished = true
  275. didCompleteWriting()
  276. }
  277. } catch {
  278. print(error)
  279. completion(nil)
  280. }
  281. }
  282. private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
  283. let item = AVMutableMetadataItem()
  284. let keyContentIdentifier = "com.apple.quicktime.content.identifier"
  285. let keySpaceQuickTimeMetadata = "mdta"
  286. item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
  287. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  288. item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
  289. item.dataType = "com.apple.metadata.datatype.UTF-8"
  290. return item
  291. }
  292. private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
  293. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  294. let keySpaceQuickTimeMetadata = "mdta"
  295. let spec: NSDictionary = [
  296. kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
  297. "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
  298. kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
  299. "com.apple.metadata.datatype.int8" ]
  300. var desc: CMFormatDescription?
  301. CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
  302. let input = AVAssetWriterInput(mediaType: .metadata,
  303. outputSettings: nil, sourceFormatHint: desc)
  304. return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
  305. }
  306. private func metadataItemForStillImageTime() -> AVMetadataItem {
  307. let item = AVMutableMetadataItem()
  308. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  309. let keySpaceQuickTimeMetadata = "mdta"
  310. item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
  311. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  312. item.value = 0 as (NSCopying & NSObjectProtocol)?
  313. item.dataType = "com.apple.metadata.datatype.int8"
  314. return item
  315. }
  316. }
  317. fileprivate extension AVAsset {
  318. func countFrames(exact: Bool) -> Int {
  319. var frameCount = 0
  320. if let videoReader = try? AVAssetReader(asset: self) {
  321. if let videoTrack = self.tracks(withMediaType: .video).first {
  322. frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
  323. if exact {
  324. frameCount = 0
  325. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
  326. videoReader.add(videoReaderOutput)
  327. videoReader.startReading()
  328. // count frames
  329. while true {
  330. let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
  331. if sampleBuffer == nil {
  332. break
  333. }
  334. frameCount += 1
  335. }
  336. videoReader.cancelReading()
  337. }
  338. }
  339. }
  340. return frameCount
  341. }
  342. func stillImageTime() -> CMTime? {
  343. var stillTime: CMTime?
  344. if let videoReader = try? AVAssetReader(asset: self) {
  345. if let metadataTrack = self.tracks(withMediaType: .metadata).first {
  346. let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
  347. videoReader.add(videoReaderOutput)
  348. videoReader.startReading()
  349. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  350. let keySpaceQuickTimeMetadata = "mdta"
  351. var found = false
  352. while found == false {
  353. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  354. if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
  355. let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
  356. for item in group?.items ?? [] {
  357. if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
  358. stillTime = group?.timeRange.start
  359. // print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
  360. found = true
  361. break
  362. }
  363. }
  364. }
  365. } else {
  366. break
  367. }
  368. }
  369. videoReader.cancelReading()
  370. }
  371. }
  372. return stillTime
  373. }
  374. func makeStillImageTimeRange(percent: Float, inFrameCount: Int = 0) -> CMTimeRange {
  375. var time = self.duration
  376. var frameCount = inFrameCount
  377. if frameCount == 0 {
  378. frameCount = self.countFrames(exact: true)
  379. }
  380. let frameDuration = Int64(Float(time.value) / Float(frameCount))
  381. time.value = Int64(Float(time.value) * percent)
  382. // print("stillImageTime = \(CMTimeGetSeconds(time))")
  383. return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
  384. }
  385. func getAssetFrame(percent: Float) -> UIImage? {
  386. let imageGenerator = AVAssetImageGenerator(asset: self)
  387. imageGenerator.appliesPreferredTrackTransform = true
  388. imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 100)
  389. imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 100)
  390. var time = self.duration
  391. time.value = Int64(Float(time.value) * percent)
  392. do {
  393. var actualTime = CMTime.zero
  394. let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: &actualTime)
  395. let img = UIImage(cgImage: imageRef)
  396. return img
  397. } catch let error as NSError {
  398. print("Image generation failed with error \(error)")
  399. return nil
  400. }
  401. }
  402. }
  403. extension NCLivePhoto {
  404. func setLivephoto(serverUrlfileNamePath: String,
  405. livePhotoFile: String,
  406. account: String,
  407. options: NKRequestOptions = NKRequestOptions()) async -> (account: String, error: NKError) {
  408. await withUnsafeContinuation({ continuation in
  409. NextcloudKit.shared.setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: livePhotoFile, account: account, options: options) { account, error in
  410. continuation.resume(returning: (account: account, error: error))
  411. }
  412. })
  413. }
  414. func setLivephotoUpload(metadata: tableMetadata) {
  415. guard NCGlobal.shared.capabilityServerVersionMajor >= NCGlobal.shared.nextcloudVersion28 else { return }
  416. livePhotoFile = metadata.livePhotoFile
  417. livePhotoFile2 = metadata.fileName
  418. if livePhotoFile.isEmpty {
  419. if metadata.classFile == NKCommon.TypeClassFile.image.rawValue {
  420. livePhotoFile = (metadata.fileName as NSString).deletingPathExtension + ".mov"
  421. } else if metadata.classFile == NKCommon.TypeClassFile.video.rawValue {
  422. livePhotoFile = (metadata.fileName as NSString).deletingPathExtension + ".jpg"
  423. }
  424. }
  425. guard metadata.isLivePhoto,
  426. !livePhotoFile.isEmpty,
  427. let metadata2 = NCManageDatabase.shared.getMetadata(predicate: NSPredicate(format: "account == %@ AND urlBase == %@ AND path == %@ AND fileName == %@ AND status == %d", metadata.account, metadata.urlBase, metadata.path, livePhotoFile, NCGlobal.shared.metadataStatusNormal)) else { return }
  428. let serverUrlfileNamePath1 = metadata.urlBase + metadata.path + metadata.fileName
  429. let serverUrlfileNamePath2 = metadata2.urlBase + metadata2.path + livePhotoFile
  430. Task {
  431. if metadata.livePhotoFile.isEmpty {
  432. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath1, livePhotoFile: livePhotoFile, account: metadata.account)
  433. }
  434. if metadata2.livePhotoFile.isEmpty {
  435. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath2, livePhotoFile: livePhotoFile2, account: metadata2.account)
  436. }
  437. }
  438. }
  439. func setLivePhoto(metadata1: tableMetadata, metadata2: tableMetadata) {
  440. guard NCGlobal.shared.capabilityServerVersionMajor >= NCGlobal.shared.nextcloudVersion28,
  441. (!metadata1.livePhotoFile.isEmpty && !metadata2.livePhotoFile.isEmpty) else { return }
  442. Task {
  443. if metadata1.livePhotoFile.isEmpty {
  444. let serverUrlfileNamePath = metadata1.urlBase + metadata1.path + metadata1.fileName
  445. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: metadata2.fileName, account: metadata2.account)
  446. }
  447. if metadata2.livePhotoFile.isEmpty {
  448. let serverUrlfileNamePath = metadata2.urlBase + metadata2.path + metadata2.fileName
  449. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: metadata1.fileName, account: metadata1.account)
  450. }
  451. }
  452. }
  453. }