NCLivePhoto.swift 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528
  1. //
  2. // LivePhoto.swift
  3. // NCLivePhoto
  4. //
  5. // Created by Alexander Pagliaro on 7/25/18.
  6. // Copyright © 2018 Limit Point LLC. All rights reserved.
  7. //
  8. import UIKit
  9. import AVFoundation
  10. import MobileCoreServices
  11. import Photos
  12. import NextcloudKit
  13. import UniformTypeIdentifiers
  14. import Alamofire
  15. class NCLivePhoto {
  16. var livePhotoFile = ""
  17. var livePhotoFile2 = ""
  18. // MARK: PUBLIC
  19. typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
  20. /// Returns the paired image and video for the given PHLivePhoto
  21. public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  22. queue.async {
  23. shared.extractResources(from: livePhoto, completion: completion)
  24. }
  25. }
  26. /// Generates a PHLivePhoto from an image and video. Also returns the paired image and video.
  27. public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  28. queue.async {
  29. shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
  30. }
  31. }
  32. /// Save a Live Photo to the Photo Library by passing the paired image and video.
  33. public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
  34. PHPhotoLibrary.shared().performChanges({
  35. let creationRequest = PHAssetCreationRequest.forAsset()
  36. let options = PHAssetResourceCreationOptions()
  37. creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
  38. creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
  39. }, completionHandler: { success, error in
  40. if error != nil {
  41. print(error as Any)
  42. }
  43. completion(success)
  44. })
  45. }
  46. // MARK: PRIVATE
  47. private static let shared = NCLivePhoto()
  48. private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
  49. lazy private var cacheDirectory: URL? = {
  50. if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
  51. let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
  52. if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
  53. try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
  54. }
  55. return fullDirectory
  56. }
  57. return nil
  58. }()
  59. deinit {
  60. clearCache()
  61. }
  62. private func generateKeyPhoto(from videoURL: URL) -> URL? {
  63. var percent: Float = 0.5
  64. let videoAsset = AVURLAsset(url: videoURL)
  65. if let stillImageTime = videoAsset.stillImageTime() {
  66. percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
  67. }
  68. guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
  69. guard let jpegData = imageFrame.jpegData(compressionQuality: 1) else { return nil }
  70. guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
  71. do {
  72. try? jpegData.write(to: url)
  73. return url
  74. }
  75. }
  76. private func clearCache() {
  77. if let cacheDirectory = cacheDirectory {
  78. try? FileManager.default.removeItem(at: cacheDirectory)
  79. }
  80. }
  81. private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  82. guard let cacheDirectory = cacheDirectory else {
  83. DispatchQueue.main.async {
  84. completion(nil, nil)
  85. }
  86. return
  87. }
  88. let assetIdentifier = UUID().uuidString
  89. let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
  90. guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
  91. DispatchQueue.main.async {
  92. completion(nil, nil)
  93. }
  94. return
  95. }
  96. addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { _videoURL in
  97. if let pairedVideoURL = _videoURL {
  98. _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable: Any]) -> Void in
  99. if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
  100. return
  101. }
  102. DispatchQueue.main.async {
  103. completion(livePhoto, (pairedImageURL, pairedVideoURL))
  104. }
  105. })
  106. } else {
  107. DispatchQueue.main.async {
  108. completion(nil, nil)
  109. }
  110. }
  111. }
  112. }
  113. private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
  114. let assetResources = PHAssetResource.assetResources(for: livePhoto)
  115. let group = DispatchGroup()
  116. var keyPhotoURL: URL?
  117. var videoURL: URL?
  118. for resource in assetResources {
  119. let buffer = NSMutableData()
  120. let options = PHAssetResourceRequestOptions()
  121. options.isNetworkAccessAllowed = true
  122. group.enter()
  123. PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { data in
  124. buffer.append(data)
  125. }) { error in
  126. if error == nil {
  127. if resource.type == .pairedVideo {
  128. videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  129. } else {
  130. keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  131. }
  132. } else {
  133. print(error as Any)
  134. }
  135. group.leave()
  136. }
  137. }
  138. group.notify(queue: DispatchQueue.main) {
  139. guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
  140. return completion(nil)
  141. }
  142. completion((pairedPhotoURL, pairedVideoURL))
  143. }
  144. }
  145. private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  146. if let cacheDirectory = cacheDirectory {
  147. extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
  148. }
  149. }
  150. private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
  151. guard let ext = UTType(tag: resource.uniformTypeIdentifier, tagClass: .filenameExtension, conformingTo: nil)?.identifier else { return nil }
  152. var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
  153. fileUrl = fileUrl.appendingPathExtension(ext as String)
  154. do {
  155. try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
  156. } catch {
  157. print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
  158. return nil
  159. }
  160. return fileUrl
  161. }
  162. func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
  163. guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, UTType.jpeg.identifier as CFString, 1, nil),
  164. let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
  165. var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable: Any] else { return nil }
  166. let assetIdentifierKey = "17"
  167. let assetIdentifierInfo = [assetIdentifierKey: assetIdentifier]
  168. imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
  169. CGImageDestinationAddImageFromSource(imageDestination, imageSource, 0, imageProperties as CFDictionary)
  170. CGImageDestinationFinalize(imageDestination)
  171. return destinationURL
  172. }
  173. var audioReader: AVAssetReader?
  174. var videoReader: AVAssetReader?
  175. var assetWriter: AVAssetWriter?
  176. func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
  177. var audioWriterInput: AVAssetWriterInput?
  178. var audioReaderOutput: AVAssetReaderOutput?
  179. let videoAsset = AVURLAsset(url: videoURL)
  180. let frameCount = videoAsset.countFrames(exact: false)
  181. guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
  182. return completion(nil)
  183. }
  184. do {
  185. // Create the Asset Writer
  186. assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
  187. // Create Video Reader Output
  188. videoReader = try AVAssetReader(asset: videoAsset)
  189. let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
  190. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
  191. videoReader?.add(videoReaderOutput)
  192. // Create Video Writer Input
  193. let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: videoTrack.naturalSize.width, AVVideoHeightKey: videoTrack.naturalSize.height])
  194. videoWriterInput.transform = videoTrack.preferredTransform
  195. videoWriterInput.expectsMediaDataInRealTime = true
  196. assetWriter?.add(videoWriterInput)
  197. // Create Audio Reader Output & Writer Input
  198. if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
  199. do {
  200. let _audioReader = try AVAssetReader(asset: videoAsset)
  201. let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
  202. _audioReader.add(_audioReaderOutput)
  203. audioReader = _audioReader
  204. audioReaderOutput = _audioReaderOutput
  205. let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
  206. _audioWriterInput.expectsMediaDataInRealTime = false
  207. assetWriter?.add(_audioWriterInput)
  208. audioWriterInput = _audioWriterInput
  209. } catch {
  210. print(error)
  211. }
  212. }
  213. // Create necessary identifier metadata and still image time metadata
  214. let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
  215. let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
  216. assetWriter?.metadata = [assetIdentifierMetadata]
  217. assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
  218. // Start the Asset Writer
  219. assetWriter?.startWriting()
  220. assetWriter?.startSession(atSourceTime: CMTime.zero)
  221. // Add still image metadata
  222. let _stillImagePercent: Float = 0.5
  223. stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()], timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
  224. // For end of writing / progress
  225. var writingVideoFinished = false
  226. var writingAudioFinished = false
  227. var currentFrameCount = 0
  228. func didCompleteWriting() {
  229. guard writingAudioFinished && writingVideoFinished else { return }
  230. assetWriter?.finishWriting {
  231. if self.assetWriter?.status == .completed {
  232. completion(destinationURL)
  233. } else {
  234. completion(nil)
  235. }
  236. }
  237. }
  238. // Start writing video
  239. if videoReader?.startReading() ?? false {
  240. videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
  241. while videoWriterInput.isReadyForMoreMediaData {
  242. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  243. currentFrameCount += 1
  244. let percent: CGFloat = CGFloat(currentFrameCount) / CGFloat(frameCount)
  245. progress(percent)
  246. if !videoWriterInput.append(sampleBuffer) {
  247. print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
  248. self.videoReader?.cancelReading()
  249. }
  250. } else {
  251. videoWriterInput.markAsFinished()
  252. writingVideoFinished = true
  253. didCompleteWriting()
  254. }
  255. }
  256. }
  257. } else {
  258. writingVideoFinished = true
  259. didCompleteWriting()
  260. }
  261. // Start writing audio
  262. if audioReader?.startReading() ?? false {
  263. audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
  264. while audioWriterInput?.isReadyForMoreMediaData ?? false {
  265. guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
  266. audioWriterInput?.markAsFinished()
  267. writingAudioFinished = true
  268. didCompleteWriting()
  269. return
  270. }
  271. audioWriterInput?.append(sampleBuffer)
  272. }
  273. }
  274. } else {
  275. writingAudioFinished = true
  276. didCompleteWriting()
  277. }
  278. } catch {
  279. print(error)
  280. completion(nil)
  281. }
  282. }
  283. private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
  284. let item = AVMutableMetadataItem()
  285. let keyContentIdentifier = "com.apple.quicktime.content.identifier"
  286. let keySpaceQuickTimeMetadata = "mdta"
  287. item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
  288. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  289. item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
  290. item.dataType = "com.apple.metadata.datatype.UTF-8"
  291. return item
  292. }
  293. private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
  294. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  295. let keySpaceQuickTimeMetadata = "mdta"
  296. let spec: NSDictionary = [
  297. kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
  298. "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
  299. kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
  300. "com.apple.metadata.datatype.int8" ]
  301. var desc: CMFormatDescription?
  302. CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
  303. let input = AVAssetWriterInput(mediaType: .metadata,
  304. outputSettings: nil, sourceFormatHint: desc)
  305. return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
  306. }
  307. private func metadataItemForStillImageTime() -> AVMetadataItem {
  308. let item = AVMutableMetadataItem()
  309. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  310. let keySpaceQuickTimeMetadata = "mdta"
  311. item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
  312. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  313. item.value = 0 as (NSCopying & NSObjectProtocol)?
  314. item.dataType = "com.apple.metadata.datatype.int8"
  315. return item
  316. }
  317. }
  318. fileprivate extension AVAsset {
  319. func countFrames(exact: Bool) -> Int {
  320. var frameCount = 0
  321. if let videoReader = try? AVAssetReader(asset: self) {
  322. if let videoTrack = self.tracks(withMediaType: .video).first {
  323. frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
  324. if exact {
  325. frameCount = 0
  326. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
  327. videoReader.add(videoReaderOutput)
  328. videoReader.startReading()
  329. // count frames
  330. while true {
  331. let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
  332. if sampleBuffer == nil {
  333. break
  334. }
  335. frameCount += 1
  336. }
  337. videoReader.cancelReading()
  338. }
  339. }
  340. }
  341. return frameCount
  342. }
  343. func stillImageTime() -> CMTime? {
  344. var stillTime: CMTime?
  345. if let videoReader = try? AVAssetReader(asset: self) {
  346. if let metadataTrack = self.tracks(withMediaType: .metadata).first {
  347. let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
  348. videoReader.add(videoReaderOutput)
  349. videoReader.startReading()
  350. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  351. let keySpaceQuickTimeMetadata = "mdta"
  352. var found = false
  353. while found == false {
  354. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  355. if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
  356. let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
  357. for item in group?.items ?? [] {
  358. if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
  359. stillTime = group?.timeRange.start
  360. // print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
  361. found = true
  362. break
  363. }
  364. }
  365. }
  366. } else {
  367. break
  368. }
  369. }
  370. videoReader.cancelReading()
  371. }
  372. }
  373. return stillTime
  374. }
  375. func makeStillImageTimeRange(percent: Float, inFrameCount: Int = 0) -> CMTimeRange {
  376. var time = self.duration
  377. var frameCount = inFrameCount
  378. if frameCount == 0 {
  379. frameCount = self.countFrames(exact: true)
  380. }
  381. let frameDuration = Int64(Float(time.value) / Float(frameCount))
  382. time.value = Int64(Float(time.value) * percent)
  383. // print("stillImageTime = \(CMTimeGetSeconds(time))")
  384. return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
  385. }
  386. func getAssetFrame(percent: Float) -> UIImage? {
  387. let imageGenerator = AVAssetImageGenerator(asset: self)
  388. imageGenerator.appliesPreferredTrackTransform = true
  389. imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 100)
  390. imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 100)
  391. var time = self.duration
  392. time.value = Int64(Float(time.value) * percent)
  393. do {
  394. var actualTime = CMTime.zero
  395. let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: &actualTime)
  396. let img = UIImage(cgImage: imageRef)
  397. return img
  398. } catch let error as NSError {
  399. print("Image generation failed with error \(error)")
  400. return nil
  401. }
  402. }
  403. }
  404. extension NCLivePhoto {
  405. func setLivephoto(serverUrlfileNamePath: String,
  406. livePhotoFile: String,
  407. account: String,
  408. options: NKRequestOptions = NKRequestOptions()) async -> (account: String, responseData: AFDataResponse<Data?>?, error: NKError) {
  409. await withUnsafeContinuation({ continuation in
  410. NextcloudKit.shared.setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: livePhotoFile, account: account, options: options) { account, responseData, error in
  411. continuation.resume(returning: (account: account, responseData: responseData,error: error))
  412. }
  413. })
  414. }
  415. func setLivephotoUpload(metadata: tableMetadata) {
  416. guard NCCapabilities.shared.getCapabilities(account: metadata.account).capabilityServerVersionMajor >= NCGlobal.shared.nextcloudVersion28 else { return }
  417. livePhotoFile = metadata.livePhotoFile
  418. livePhotoFile2 = metadata.fileName
  419. if livePhotoFile.isEmpty {
  420. if metadata.classFile == NKCommon.TypeClassFile.image.rawValue {
  421. livePhotoFile = (metadata.fileName as NSString).deletingPathExtension + ".mov"
  422. } else if metadata.classFile == NKCommon.TypeClassFile.video.rawValue {
  423. livePhotoFile = (metadata.fileName as NSString).deletingPathExtension + ".jpg"
  424. }
  425. }
  426. guard metadata.isLivePhoto,
  427. !livePhotoFile.isEmpty,
  428. let metadata2 = NCManageDatabase.shared.getMetadata(predicate: NSPredicate(format: "account == %@ AND urlBase == %@ AND path == %@ AND fileName == %@ AND status == %d",
  429. metadata.account,
  430. metadata.urlBase,
  431. metadata.path,
  432. livePhotoFile,
  433. NCGlobal.shared.metadataStatusNormal)) else { return }
  434. let serverUrlfileNamePath1 = metadata.urlBase + metadata.path + metadata.fileName
  435. let serverUrlfileNamePath2 = metadata2.urlBase + metadata2.path + livePhotoFile
  436. Task {
  437. if metadata.livePhotoFile.isEmpty {
  438. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath1, livePhotoFile: livePhotoFile, account: metadata.account)
  439. }
  440. if metadata2.livePhotoFile.isEmpty {
  441. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath2, livePhotoFile: livePhotoFile2, account: metadata2.account)
  442. }
  443. }
  444. }
  445. func setLivePhoto(metadata1: tableMetadata, metadata2: tableMetadata) {
  446. guard NCCapabilities.shared.getCapabilities(account: metadata1.account).capabilityServerVersionMajor >= NCGlobal.shared.nextcloudVersion28,
  447. (!metadata1.livePhotoFile.isEmpty && !metadata2.livePhotoFile.isEmpty) else { return }
  448. Task {
  449. if metadata1.livePhotoFile.isEmpty {
  450. let serverUrlfileNamePath = metadata1.urlBase + metadata1.path + metadata1.fileName
  451. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: metadata2.fileName, account: metadata2.account)
  452. }
  453. if metadata2.livePhotoFile.isEmpty {
  454. let serverUrlfileNamePath = metadata2.urlBase + metadata2.path + metadata2.fileName
  455. _ = await setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: metadata1.fileName, account: metadata1.account)
  456. }
  457. }
  458. }
  459. }