marinofaggiana 3 éve
szülő
commit
881bb5d543

+ 3 - 1
iOSClient/AppDelegate.swift

@@ -63,7 +63,9 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
     var pasteboardOcIds: [String] = []
     var shares: [tableShare] = []
     var timerErrorNetworking: Timer?
-        
+    
+    var player: AVPlayer?
+
     func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
         
         let userAgent = CCUtility.getUserAgent() as String

+ 22 - 23
iOSClient/Viewer/NCViewerMedia/NCPlayer/NCPlayer.swift

@@ -35,53 +35,53 @@ class NCPlayer: NSObject {
     private var observerAVPlayerItemDidPlayToEndTime: Any?
     
     public var metadata: tableMetadata?
-    public var player: AVPlayer?
     public var videoLayer: AVPlayerLayer?
 
     init(url: URL, imageVideoContainer: imageVideoContainerView?, playerToolBar: NCPlayerToolBar?, metadata: tableMetadata, detailView: NCViewerMediaDetailView?) {
         super.init()
-        
-        print("Play URL: \(url)")
+
         var timeSeek: CMTime = .zero
-        
-        self.player = AVPlayer(url: url)
-        
+
+        print("Play URL: \(url)")
+        appDelegate.player?.pause()
+        appDelegate.player = AVPlayer(url: url)
+
         self.playerToolBar = playerToolBar
         self.metadata = metadata
         self.detailView = detailView
         
         if metadata.livePhoto {
-            self.player?.isMuted = false
+            appDelegate.player?.isMuted = false
         } else if metadata.classFile == NCCommunicationCommon.typeClassFile.audio.rawValue {
-            self.player?.isMuted = CCUtility.getAudioMute()
+            appDelegate.player?.isMuted = CCUtility.getAudioMute()
         } else {
-            self.player?.isMuted = CCUtility.getAudioMute()
+            appDelegate.player?.isMuted = CCUtility.getAudioMute()
             if let time = NCManageDatabase.shared.getVideoTime(metadata: metadata) {
                 timeSeek = time
             }
         }
-        self.player?.seek(to: timeSeek)
+        appDelegate.player?.seek(to: timeSeek)
         
         // At end go back to start & show toolbar
-        observerAVPlayerItemDidPlayToEndTime = NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: self.player?.currentItem, queue: .main) { (notification) in
-            if let item = notification.object as? AVPlayerItem, let currentItem = self.player?.currentItem, item == currentItem {
+        observerAVPlayerItemDidPlayToEndTime = NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: appDelegate.player?.currentItem, queue: .main) { (notification) in
+            if let item = notification.object as? AVPlayerItem, let currentItem = self.appDelegate.player?.currentItem, item == currentItem {
                 self.videoSeek(time: .zero)
                 self.playerToolBar?.showToolBar(metadata: metadata, detailView: nil)
                 NCKTVHTTPCache.shared.saveCache(metadata: metadata)
             }
         }
         
-        self.player?.currentItem?.asset.loadValuesAsynchronously(forKeys: ["duration", "playable"], completionHandler: {
-            if let durationTime: CMTime = (self.player?.currentItem?.asset.duration) {
+        appDelegate.player?.currentItem?.asset.loadValuesAsynchronously(forKeys: ["duration", "playable"], completionHandler: {
+            if let durationTime: CMTime = (self.appDelegate.player?.currentItem?.asset.duration) {
                 var error: NSError? = nil
-                let status = self.player?.currentItem?.asset.statusOfValue(forKey: "playable", error: &error)
+                let status = self.appDelegate.player?.currentItem?.asset.statusOfValue(forKey: "playable", error: &error)
                 switch status {
                 case .loaded:
                     DispatchQueue.main.async {
                         if let imageVideoContainer = imageVideoContainer {
                             
                             self.imageVideoContainer = imageVideoContainer
-                            self.videoLayer = AVPlayerLayer(player: self.player)
+                            self.videoLayer = AVPlayerLayer(player: self.appDelegate.player)
                             self.videoLayer!.frame = imageVideoContainer.bounds
                             self.videoLayer!.videoGravity = .resizeAspect
                             
@@ -137,7 +137,7 @@ class NCPlayer: NSObject {
     @objc func applicationDidEnterBackground(_ notification: NSNotification) {
         
         if metadata?.classFile == NCCommunicationCommon.typeClassFile.video.rawValue {
-            self.player?.pause()
+            appDelegate.player?.pause()
         }
     }
     
@@ -150,12 +150,12 @@ class NCPlayer: NSObject {
     
     func videoPlay() {
                 
-        self.player?.play()
+        appDelegate.player?.play()
     }
     
     func videoPause() {
         
-        self.player?.pause()
+        appDelegate.player?.pause()
     }
     
     func saveTime(_ time: CMTime) {
@@ -168,7 +168,7 @@ class NCPlayer: NSObject {
     
     func videoSeek(time: CMTime) {
         
-        self.player?.seek(to: time)
+        appDelegate.player?.seek(to: time)
         self.saveTime(time)
     }
     
@@ -183,7 +183,6 @@ class NCPlayer: NSObject {
         
         self.videoLayer?.removeFromSuperlayer()
         
-        self.player = nil
         self.videoLayer = nil
         self.observerAVPlayerItemDidPlayToEndTime = nil
         self.imageVideoContainer = nil
@@ -192,14 +191,14 @@ class NCPlayer: NSObject {
     }
         
     func generatorImagePreview() {
-        guard let time = self.player?.currentTime() else { return }
+        guard let time = appDelegate.player?.currentTime() else { return }
         guard let metadata = self.metadata else { return }
         if metadata.livePhoto { return }
         if metadata.classFile == NCCommunicationCommon.typeClassFile.audio.rawValue { return }
 
         var image: UIImage?
 
-        if let asset = self.player?.currentItem?.asset {
+        if let asset = appDelegate.player?.currentItem?.asset {
 
             do {
                 let fileNamePreviewLocalPath = CCUtility.getDirectoryProviderStoragePreviewOcId(metadata.ocId, etag: metadata.etag)!

+ 14 - 14
iOSClient/Viewer/NCViewerMedia/NCPlayer/NCPlayerToolBar.swift

@@ -94,7 +94,7 @@ class NCPlayerToolBar: UIView {
         print("deinit NCPlayerToolBar")
         
         if self.timeObserver != nil {
-            self.ncplayer?.player?.removeTimeObserver(self.timeObserver!)
+            appDelegate.player?.removeTimeObserver(self.timeObserver!)
         }
     }
     
@@ -117,9 +117,9 @@ class NCPlayerToolBar: UIView {
         }
         updateToolBar(timeSeek: timeSeek)
         
-        self.timeObserver = ncplayer.player?.addPeriodicTimeObserver(forInterval: CMTimeMakeWithSeconds(1, preferredTimescale: 1), queue: .main, using: { (CMTime) in
+        self.timeObserver = appDelegate.player?.addPeriodicTimeObserver(forInterval: CMTimeMakeWithSeconds(1, preferredTimescale: 1), queue: .main, using: { (CMTime) in
             
-            if ncplayer.player?.currentItem?.status == .readyToPlay {
+            if self.appDelegate.player?.currentItem?.status == .readyToPlay {
                 if self.isHidden == false {
                     self.updateToolBar()
                 }
@@ -168,10 +168,10 @@ class NCPlayerToolBar: UIView {
     public func updateToolBar(timeSeek: CMTime? = nil) {
 
         var namedPlay = "play.fill"
-        var currentTime = ncplayer?.player?.currentTime() ?? .zero
+        var currentTime = appDelegate.player?.currentTime() ?? .zero
         currentTime = currentTime.convertScale(1000, method: .default)
         
-        if ncplayer?.player?.rate == 1 { namedPlay = "pause.fill"}
+        if appDelegate.player?.rate == 1 { namedPlay = "pause.fill"}
         
         if timeSeek != nil {
             playbackSlider.value = Float(timeSeek!.value)
@@ -223,7 +223,7 @@ class NCPlayerToolBar: UIView {
             
             switch touchEvent.phase {
             case .began:
-                wasInPlay = ncplayer?.player?.rate == 1 ? true : false
+                wasInPlay = appDelegate.player?.rate == 1 ? true : false
                 ncplayer?.videoPause()
                 playbackSliderEvent = .began
             case .moved:
@@ -253,16 +253,16 @@ class NCPlayerToolBar: UIView {
     
     @IBAction func playerPause(_ sender: Any) {
         
-        if ncplayer?.player?.timeControlStatus == .playing {
+        if appDelegate.player?.timeControlStatus == .playing {
             ncplayer?.videoPause()
-            if let time = ncplayer?.player?.currentTime() {
+            if let time = appDelegate.player?.currentTime() {
                 ncplayer?.saveTime(time)
             }
-        } else if ncplayer?.player?.timeControlStatus == .paused {
+        } else if appDelegate.player?.timeControlStatus == .paused {
             ncplayer?.videoPlay()
-        } else if ncplayer?.player?.timeControlStatus == .waitingToPlayAtSpecifiedRate {
+        } else if appDelegate.player?.timeControlStatus == .waitingToPlayAtSpecifiedRate {
             print("timeControlStatus.waitingToPlayAtSpecifiedRate")
-            if let reason = ncplayer?.player?.reasonForWaitingToPlay {
+            if let reason = appDelegate.player?.reasonForWaitingToPlay {
                 switch reason {
                 case .evaluatingBufferingRate:
                     print("reasonForWaitingToPlay.evaluatingBufferingRate")
@@ -282,13 +282,13 @@ class NCPlayerToolBar: UIView {
         let mute = CCUtility.getAudioMute()
         
         CCUtility.setAudioMute(!mute)
-        ncplayer?.player?.isMuted = !mute
+        appDelegate.player?.isMuted = !mute
         updateToolBar()
     }
     
     @IBAction func forwardButtonSec(_ sender: Any) {
         guard let ncplayer = ncplayer else { return }
-        guard let player = ncplayer.player else { return }
+        guard let player = appDelegate.player else { return }
         
         let currentTime = player.currentTime()
         let newTime = CMTimeAdd(currentTime, timeToAdd)
@@ -302,7 +302,7 @@ class NCPlayerToolBar: UIView {
     
     @IBAction func backButtonSec(_ sender: Any) {
         guard let ncplayer = ncplayer else { return }
-        guard let player = ncplayer.player else { return }
+        guard let player = appDelegate.player else { return }
         
         let currentTime = player.currentTime()
         let newTime = CMTimeSubtract(currentTime, timeToAdd)

+ 2 - 2
iOSClient/Viewer/NCViewerMedia/NCViewerMedia.swift

@@ -115,7 +115,7 @@ class NCViewerMedia: UIViewController {
     override func viewWillDisappear(_ animated: Bool) {
         super.viewWillDisappear(animated)
         
-        if let player = currentViewController.ncplayer?.player {
+        if let player = appDelegate.player {
             if player.rate == 1 {
                 player.pause()
                 currentViewController.ncplayer?.saveTime(player.currentTime())
@@ -434,7 +434,7 @@ extension NCViewerMedia: UIPageViewControllerDelegate, UIPageViewControllerDataS
     func pageViewController(_ pageViewController: UIPageViewController, willTransitionTo pendingViewControllers: [UIViewController]) {
         
         // Save time video
-        if let player = currentViewController.ncplayer?.player {
+        if let player = appDelegate.player {
             if player.rate == 1 {
                 currentViewController.ncplayer?.saveTime(player.currentTime())
             }