NCLivePhoto.swift 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521
  1. //
  2. // LivePhoto.swift
  3. // NCLivePhoto
  4. //
  5. // Created by Alexander Pagliaro on 7/25/18.
  6. // Copyright © 2018 Limit Point LLC. All rights reserved.
  7. //
  8. import UIKit
  9. import AVFoundation
  10. import MobileCoreServices
  11. import Photos
  12. import NextcloudKit
  13. class NCLivePhoto {
  14. var livePhotoFile = ""
  15. var livePhotoFile2 = ""
  16. // MARK: PUBLIC
  17. typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
  18. /// Returns the paired image and video for the given PHLivePhoto
  19. public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  20. queue.async {
  21. shared.extractResources(from: livePhoto, completion: completion)
  22. }
  23. }
  24. /// Generates a PHLivePhoto from an image and video. Also returns the paired image and video.
  25. public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  26. queue.async {
  27. shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
  28. }
  29. }
  30. /// Save a Live Photo to the Photo Library by passing the paired image and video.
  31. public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
  32. PHPhotoLibrary.shared().performChanges({
  33. let creationRequest = PHAssetCreationRequest.forAsset()
  34. let options = PHAssetResourceCreationOptions()
  35. creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
  36. creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
  37. }, completionHandler: { success, error in
  38. if error != nil {
  39. print(error as Any)
  40. }
  41. completion(success)
  42. })
  43. }
  44. // MARK: PRIVATE
  45. private static let shared = NCLivePhoto()
  46. private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
  47. lazy private var cacheDirectory: URL? = {
  48. if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
  49. let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
  50. if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
  51. try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
  52. }
  53. return fullDirectory
  54. }
  55. return nil
  56. }()
  57. deinit {
  58. clearCache()
  59. }
  60. private func generateKeyPhoto(from videoURL: URL) -> URL? {
  61. var percent: Float = 0.5
  62. let videoAsset = AVURLAsset(url: videoURL)
  63. if let stillImageTime = videoAsset.stillImageTime() {
  64. percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
  65. }
  66. guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
  67. guard let jpegData = imageFrame.jpegData(compressionQuality: 1) else { return nil }
  68. guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
  69. do {
  70. try? jpegData.write(to: url)
  71. return url
  72. }
  73. }
  74. private func clearCache() {
  75. if let cacheDirectory = cacheDirectory {
  76. try? FileManager.default.removeItem(at: cacheDirectory)
  77. }
  78. }
  79. private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
  80. guard let cacheDirectory = cacheDirectory else {
  81. DispatchQueue.main.async {
  82. completion(nil, nil)
  83. }
  84. return
  85. }
  86. let assetIdentifier = UUID().uuidString
  87. let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
  88. guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
  89. DispatchQueue.main.async {
  90. completion(nil, nil)
  91. }
  92. return
  93. }
  94. addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { _videoURL in
  95. if let pairedVideoURL = _videoURL {
  96. _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable: Any]) -> Void in
  97. if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
  98. return
  99. }
  100. DispatchQueue.main.async {
  101. completion(livePhoto, (pairedImageURL, pairedVideoURL))
  102. }
  103. })
  104. } else {
  105. DispatchQueue.main.async {
  106. completion(nil, nil)
  107. }
  108. }
  109. }
  110. }
  111. private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
  112. let assetResources = PHAssetResource.assetResources(for: livePhoto)
  113. let group = DispatchGroup()
  114. var keyPhotoURL: URL?
  115. var videoURL: URL?
  116. for resource in assetResources {
  117. let buffer = NSMutableData()
  118. let options = PHAssetResourceRequestOptions()
  119. options.isNetworkAccessAllowed = true
  120. group.enter()
  121. PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { data in
  122. buffer.append(data)
  123. }) { error in
  124. if error == nil {
  125. if resource.type == .pairedVideo {
  126. videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  127. } else {
  128. keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
  129. }
  130. } else {
  131. print(error as Any)
  132. }
  133. group.leave()
  134. }
  135. }
  136. group.notify(queue: DispatchQueue.main) {
  137. guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
  138. return completion(nil)
  139. }
  140. completion((pairedPhotoURL, pairedVideoURL))
  141. }
  142. }
  143. private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
  144. if let cacheDirectory = cacheDirectory {
  145. extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
  146. }
  147. }
  148. private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
  149. let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString, kUTTagClassFilenameExtension)?.takeRetainedValue()
  150. guard let ext = fileExtension else {
  151. return nil
  152. }
  153. var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
  154. fileUrl = fileUrl.appendingPathExtension(ext as String)
  155. do {
  156. try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
  157. } catch {
  158. print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
  159. return nil
  160. }
  161. return fileUrl
  162. }
  163. func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
  164. guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
  165. let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
  166. var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable: Any] else { return nil }
  167. let assetIdentifierKey = "17"
  168. let assetIdentifierInfo = [assetIdentifierKey: assetIdentifier]
  169. imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
  170. CGImageDestinationAddImageFromSource(imageDestination, imageSource, 0, imageProperties as CFDictionary)
  171. CGImageDestinationFinalize(imageDestination)
  172. return destinationURL
  173. }
  174. var audioReader: AVAssetReader?
  175. var videoReader: AVAssetReader?
  176. var assetWriter: AVAssetWriter?
  177. func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
  178. var audioWriterInput: AVAssetWriterInput?
  179. var audioReaderOutput: AVAssetReaderOutput?
  180. let videoAsset = AVURLAsset(url: videoURL)
  181. let frameCount = videoAsset.countFrames(exact: false)
  182. guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
  183. return completion(nil)
  184. }
  185. do {
  186. // Create the Asset Writer
  187. assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
  188. // Create Video Reader Output
  189. videoReader = try AVAssetReader(asset: videoAsset)
  190. let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
  191. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
  192. videoReader?.add(videoReaderOutput)
  193. // Create Video Writer Input
  194. let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: videoTrack.naturalSize.width, AVVideoHeightKey: videoTrack.naturalSize.height])
  195. videoWriterInput.transform = videoTrack.preferredTransform
  196. videoWriterInput.expectsMediaDataInRealTime = true
  197. assetWriter?.add(videoWriterInput)
  198. // Create Audio Reader Output & Writer Input
  199. if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
  200. do {
  201. let _audioReader = try AVAssetReader(asset: videoAsset)
  202. let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
  203. _audioReader.add(_audioReaderOutput)
  204. audioReader = _audioReader
  205. audioReaderOutput = _audioReaderOutput
  206. let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
  207. _audioWriterInput.expectsMediaDataInRealTime = false
  208. assetWriter?.add(_audioWriterInput)
  209. audioWriterInput = _audioWriterInput
  210. } catch {
  211. print(error)
  212. }
  213. }
  214. // Create necessary identifier metadata and still image time metadata
  215. let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
  216. let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
  217. assetWriter?.metadata = [assetIdentifierMetadata]
  218. assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
  219. // Start the Asset Writer
  220. assetWriter?.startWriting()
  221. assetWriter?.startSession(atSourceTime: CMTime.zero)
  222. // Add still image metadata
  223. let _stillImagePercent: Float = 0.5
  224. stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()], timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
  225. // For end of writing / progress
  226. var writingVideoFinished = false
  227. var writingAudioFinished = false
  228. var currentFrameCount = 0
  229. func didCompleteWriting() {
  230. guard writingAudioFinished && writingVideoFinished else { return }
  231. assetWriter?.finishWriting {
  232. if self.assetWriter?.status == .completed {
  233. completion(destinationURL)
  234. } else {
  235. completion(nil)
  236. }
  237. }
  238. }
  239. // Start writing video
  240. if videoReader?.startReading() ?? false {
  241. videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
  242. while videoWriterInput.isReadyForMoreMediaData {
  243. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  244. currentFrameCount += 1
  245. let percent: CGFloat = CGFloat(currentFrameCount) / CGFloat(frameCount)
  246. progress(percent)
  247. if !videoWriterInput.append(sampleBuffer) {
  248. print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
  249. self.videoReader?.cancelReading()
  250. }
  251. } else {
  252. videoWriterInput.markAsFinished()
  253. writingVideoFinished = true
  254. didCompleteWriting()
  255. }
  256. }
  257. }
  258. } else {
  259. writingVideoFinished = true
  260. didCompleteWriting()
  261. }
  262. // Start writing audio
  263. if audioReader?.startReading() ?? false {
  264. audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
  265. while audioWriterInput?.isReadyForMoreMediaData ?? false {
  266. guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
  267. audioWriterInput?.markAsFinished()
  268. writingAudioFinished = true
  269. didCompleteWriting()
  270. return
  271. }
  272. audioWriterInput?.append(sampleBuffer)
  273. }
  274. }
  275. } else {
  276. writingAudioFinished = true
  277. didCompleteWriting()
  278. }
  279. } catch {
  280. print(error)
  281. completion(nil)
  282. }
  283. }
  284. private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
  285. let item = AVMutableMetadataItem()
  286. let keyContentIdentifier = "com.apple.quicktime.content.identifier"
  287. let keySpaceQuickTimeMetadata = "mdta"
  288. item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
  289. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  290. item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
  291. item.dataType = "com.apple.metadata.datatype.UTF-8"
  292. return item
  293. }
  294. private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
  295. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  296. let keySpaceQuickTimeMetadata = "mdta"
  297. let spec: NSDictionary = [
  298. kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
  299. "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
  300. kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
  301. "com.apple.metadata.datatype.int8" ]
  302. var desc: CMFormatDescription?
  303. CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
  304. let input = AVAssetWriterInput(mediaType: .metadata,
  305. outputSettings: nil, sourceFormatHint: desc)
  306. return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
  307. }
  308. private func metadataItemForStillImageTime() -> AVMetadataItem {
  309. let item = AVMutableMetadataItem()
  310. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  311. let keySpaceQuickTimeMetadata = "mdta"
  312. item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
  313. item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
  314. item.value = 0 as (NSCopying & NSObjectProtocol)?
  315. item.dataType = "com.apple.metadata.datatype.int8"
  316. return item
  317. }
  318. }
  319. fileprivate extension AVAsset {
  320. func countFrames(exact: Bool) -> Int {
  321. var frameCount = 0
  322. if let videoReader = try? AVAssetReader(asset: self) {
  323. if let videoTrack = self.tracks(withMediaType: .video).first {
  324. frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
  325. if exact {
  326. frameCount = 0
  327. let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
  328. videoReader.add(videoReaderOutput)
  329. videoReader.startReading()
  330. // count frames
  331. while true {
  332. let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
  333. if sampleBuffer == nil {
  334. break
  335. }
  336. frameCount += 1
  337. }
  338. videoReader.cancelReading()
  339. }
  340. }
  341. }
  342. return frameCount
  343. }
  344. func stillImageTime() -> CMTime? {
  345. var stillTime: CMTime?
  346. if let videoReader = try? AVAssetReader(asset: self) {
  347. if let metadataTrack = self.tracks(withMediaType: .metadata).first {
  348. let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
  349. videoReader.add(videoReaderOutput)
  350. videoReader.startReading()
  351. let keyStillImageTime = "com.apple.quicktime.still-image-time"
  352. let keySpaceQuickTimeMetadata = "mdta"
  353. var found = false
  354. while found == false {
  355. if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
  356. if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
  357. let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
  358. for item in group?.items ?? [] {
  359. if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
  360. stillTime = group?.timeRange.start
  361. // print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
  362. found = true
  363. break
  364. }
  365. }
  366. }
  367. } else {
  368. break
  369. }
  370. }
  371. videoReader.cancelReading()
  372. }
  373. }
  374. return stillTime
  375. }
  376. func makeStillImageTimeRange(percent: Float, inFrameCount: Int = 0) -> CMTimeRange {
  377. var time = self.duration
  378. var frameCount = inFrameCount
  379. if frameCount == 0 {
  380. frameCount = self.countFrames(exact: true)
  381. }
  382. let frameDuration = Int64(Float(time.value) / Float(frameCount))
  383. time.value = Int64(Float(time.value) * percent)
  384. // print("stillImageTime = \(CMTimeGetSeconds(time))")
  385. return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
  386. }
  387. func getAssetFrame(percent: Float) -> UIImage? {
  388. let imageGenerator = AVAssetImageGenerator(asset: self)
  389. imageGenerator.appliesPreferredTrackTransform = true
  390. imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 100)
  391. imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 100)
  392. var time = self.duration
  393. time.value = Int64(Float(time.value) * percent)
  394. do {
  395. var actualTime = CMTime.zero
  396. let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: &actualTime)
  397. let img = UIImage(cgImage: imageRef)
  398. return img
  399. } catch let error as NSError {
  400. print("Image generation failed with error \(error)")
  401. return nil
  402. }
  403. }
  404. }
  405. extension NCLivePhoto {
  406. func setLivephotoUpload(metadata: tableMetadata) {
  407. guard NCGlobal.shared.capabilityServerVersionMajor >= NCGlobal.shared.nextcloudVersion28 else { return }
  408. livePhotoFile = metadata.livePhotoFile
  409. livePhotoFile2 = metadata.fileName
  410. if livePhotoFile.isEmpty {
  411. if metadata.classFile == NKCommon.TypeClassFile.image.rawValue {
  412. livePhotoFile = (metadata.fileName as NSString).deletingPathExtension + ".mov"
  413. } else if metadata.classFile == NKCommon.TypeClassFile.video.rawValue {
  414. livePhotoFile = (metadata.fileName as NSString).deletingPathExtension + ".jpg"
  415. }
  416. }
  417. guard metadata.livePhoto,
  418. !livePhotoFile.isEmpty,
  419. let metadata2 = NCManageDatabase.shared.getMetadata(predicate: NSPredicate(format: "account == %@ AND urlBase == %@ AND path == %@ AND fileName == %@ AND status == %d", metadata.account, metadata.urlBase, metadata.path, livePhotoFile, NCGlobal.shared.metadataStatusNormal)) else { return }
  420. let serverUrlfileNamePath1 = metadata.urlBase + metadata.path + metadata.fileName
  421. let serverUrlfileNamePath2 = metadata2.urlBase + metadata2.path + livePhotoFile
  422. Task {
  423. if metadata.livePhotoFile.isEmpty {
  424. _ = await NextcloudKit.shared.setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath1, livePhotoFile: livePhotoFile)
  425. }
  426. if metadata2.livePhotoFile.isEmpty {
  427. _ = await NextcloudKit.shared.setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath2, livePhotoFile: livePhotoFile2)
  428. }
  429. }
  430. }
  431. func setLivePhoto(metadata1: tableMetadata, metadata2: tableMetadata) {
  432. guard NCGlobal.shared.capabilityServerVersionMajor >= NCGlobal.shared.nextcloudVersion28,
  433. (!metadata1.livePhotoFile.isEmpty && !metadata2.livePhotoFile.isEmpty) else { return }
  434. Task {
  435. if metadata1.livePhotoFile.isEmpty {
  436. let serverUrlfileNamePath = metadata1.urlBase + metadata1.path + metadata1.fileName
  437. _ = await NextcloudKit.shared.setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: metadata2.fileName)
  438. }
  439. if metadata2.livePhotoFile.isEmpty {
  440. let serverUrlfileNamePath = metadata2.urlBase + metadata2.path + metadata2.fileName
  441. _ = await NextcloudKit.shared.setLivephoto(serverUrlfileNamePath: serverUrlfileNamePath, livePhotoFile: metadata1.fileName)
  442. }
  443. }
  444. }
  445. }