NCAudioRecorderViewController.swift 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312
  1. //
  2. // NCAudioRecorderViewController.swift
  3. // Nextcloud
  4. //
  5. // Created by Marino Faggiana on 08/03/19.
  6. // Copyright (c) 2019 Marino Faggiana. All rights reserved.
  7. //
  8. // Author Marino Faggiana <marino.faggiana@nextcloud.com>
  9. //
  10. // This program is free software: you can redistribute it and/or modify
  11. // it under the terms of the GNU General Public License as published by
  12. // the Free Software Foundation, either version 3 of the License, or
  13. // (at your option) any later version.
  14. //
  15. // This program is distributed in the hope that it will be useful,
  16. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18. // GNU General Public License for more details.
  19. //
  20. // You should have received a copy of the GNU General Public License
  21. // along with this program. If not, see <http://www.gnu.org/licenses/>.
  22. //
  23. // --------------------------------
  24. // Based on code of Venkat Kukunuru
  25. // --------------------------------
  26. import UIKit
  27. import AVFoundation
  28. import QuartzCore
  29. import NextcloudKit
  30. class NCAudioRecorderViewController: UIViewController, NCAudioRecorderDelegate {
  31. @IBOutlet weak var contentContainerView: UIView!
  32. @IBOutlet weak var durationLabel: UILabel!
  33. @IBOutlet weak var startStopLabel: UILabel!
  34. @IBOutlet weak var voiceRecordHUD: VoiceRecordHUD!
  35. var recording: NCAudioRecorder!
  36. var startDate: Date = Date()
  37. var fileName: String = ""
  38. var controller: NCMainTabBarController!
  39. var session: NCSession.Session {
  40. NCSession.shared.getSession(controller: controller)
  41. }
  42. // MARK: - View Life Cycle
  43. override func viewDidLoad() {
  44. super.viewDidLoad()
  45. voiceRecordHUD.update(0.0)
  46. durationLabel.text = ""
  47. startStopLabel.text = NSLocalizedString("_wait_", comment: "")
  48. view.backgroundColor = .clear
  49. contentContainerView.backgroundColor = UIColor.lightGray
  50. voiceRecordHUD.fillColor = UIColor.green
  51. Task {
  52. self.fileName = await NCNetworking.shared.createFileName(fileNameBase: NSLocalizedString("_untitled_", comment: "") + ".m4a", account: self.session.account, serverUrl: controller.currentServerUrl())
  53. recording = NCAudioRecorder(to: self.fileName)
  54. recording.delegate = self
  55. do {
  56. try self.recording.prepare()
  57. startStopLabel.text = NSLocalizedString("_voice_memo_start_", comment: "")
  58. } catch {
  59. print(error)
  60. }
  61. }
  62. }
  63. // MARK: - Action
  64. @IBAction func touchViewController() {
  65. if recording.state == .record {
  66. startStop()
  67. } else {
  68. dismiss(animated: true)
  69. }
  70. }
  71. @IBAction func startStop() {
  72. if recording.state == .record {
  73. recording.stop()
  74. voiceRecordHUD.update(0.0)
  75. dismiss(animated: true) {
  76. self.uploadMetadata()
  77. }
  78. } else {
  79. do {
  80. try recording.record()
  81. startDate = Date()
  82. startStopLabel.text = NSLocalizedString("_voice_memo_stop_", comment: "")
  83. } catch {
  84. print(error)
  85. }
  86. }
  87. }
  88. func uploadMetadata() {
  89. let fileNamePath = NSTemporaryDirectory() + self.fileName
  90. let metadata = NCManageDatabase.shared.createMetadata(fileName: fileName,
  91. fileNameView: fileName,
  92. ocId: UUID().uuidString,
  93. serverUrl: controller.currentServerUrl(),
  94. url: "",
  95. contentType: "",
  96. session: self.session,
  97. sceneIdentifier: self.controller?.sceneIdentifier)
  98. metadata.session = NCNetworking.shared.sessionUploadBackground
  99. metadata.sessionSelector = NCGlobal.shared.selectorUploadFile
  100. metadata.status = NCGlobal.shared.metadataStatusWaitUpload
  101. metadata.sessionDate = Date()
  102. metadata.size = NCUtilityFileSystem().getFileSize(filePath: fileNamePath)
  103. NCUtilityFileSystem().copyFile(atPath: fileNamePath, toPath: NCUtilityFileSystem().getDirectoryProviderStorageOcId(metadata.ocId, fileNameView: metadata.fileNameView))
  104. NCNetworkingProcess.shared.createProcessUploads(metadatas: [metadata])
  105. }
  106. func audioMeterDidUpdate(_ db: Float) {
  107. // print("db level: %f", db)
  108. self.recording.recorder?.updateMeters()
  109. let ALPHA = 0.05
  110. let peakPower = pow(10, (ALPHA * Double((self.recording.recorder?.peakPower(forChannel: 0))!)))
  111. var rate: Double = 0.0
  112. if peakPower <= 0.2 {
  113. rate = 0.2
  114. } else if peakPower > 0.9 {
  115. rate = 1.0
  116. } else {
  117. rate = peakPower
  118. }
  119. voiceRecordHUD.update(CGFloat(rate))
  120. voiceRecordHUD.fillColor = UIColor.green
  121. let formatter = DateComponentsFormatter()
  122. formatter.allowedUnits = [.second]
  123. formatter.unitsStyle = .full
  124. durationLabel.text = formatter.string(from: startDate, to: Date())
  125. }
  126. }
  127. @objc public protocol NCAudioRecorderDelegate: AVAudioRecorderDelegate {
  128. @objc optional func audioMeterDidUpdate(_ dB: Float)
  129. }
  130. open class NCAudioRecorder: NSObject {
  131. public enum State: Int {
  132. case none, record, play
  133. }
  134. static var directory: String {
  135. return NSTemporaryDirectory()
  136. }
  137. open weak var delegate: NCAudioRecorderDelegate?
  138. open fileprivate(set) var url: URL
  139. open fileprivate(set) var state: State = .none
  140. open var bitRate = 192000
  141. open var sampleRate = 44100.0
  142. open var channels = 1
  143. var recorder: AVAudioRecorder?
  144. fileprivate var player: AVAudioPlayer?
  145. fileprivate var link: CADisplayLink?
  146. var metering: Bool {
  147. return delegate?.responds(to: #selector(NCAudioRecorderDelegate.audioMeterDidUpdate(_:))) == true
  148. }
  149. // MARK: - Initializers
  150. public init(to fileName: String) {
  151. url = URL(fileURLWithPath: NCAudioRecorder.directory).appendingPathComponent(fileName)
  152. super.init()
  153. do {
  154. try AVAudioSession.sharedInstance().setCategory(.playAndRecord)
  155. try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
  156. try AVAudioSession.sharedInstance().setActive(true)
  157. } catch {
  158. print(error)
  159. }
  160. }
  161. deinit {
  162. print("deinit NCAudioRecorder")
  163. do {
  164. try AVAudioSession.sharedInstance().setActive(false)
  165. } catch {
  166. print(error)
  167. }
  168. }
  169. // MARK: - Record
  170. open func prepare() throws {
  171. let settings: [String: AnyObject] = [
  172. AVFormatIDKey: NSNumber(value: Int32(kAudioFormatAppleLossless) as Int32),
  173. AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue as AnyObject,
  174. AVEncoderBitRateKey: bitRate as AnyObject,
  175. AVNumberOfChannelsKey: channels as AnyObject,
  176. AVSampleRateKey: sampleRate as AnyObject
  177. ]
  178. recorder = try AVAudioRecorder(url: url, settings: settings)
  179. recorder?.prepareToRecord()
  180. recorder?.delegate = delegate
  181. recorder?.isMeteringEnabled = metering
  182. }
  183. open func record() throws {
  184. if recorder == nil {
  185. try prepare()
  186. }
  187. self.state = .record
  188. if self.metering {
  189. self.startMetering()
  190. }
  191. self.recorder?.record()
  192. }
  193. open func stop() {
  194. switch state {
  195. case .play:
  196. player?.stop()
  197. player = nil
  198. case .record:
  199. recorder?.stop()
  200. recorder = nil
  201. stopMetering()
  202. default:
  203. break
  204. }
  205. state = .none
  206. }
  207. // MARK: - Metering
  208. @objc func updateMeter() {
  209. guard let recorder = recorder else { return }
  210. recorder.updateMeters()
  211. let dB = recorder.averagePower(forChannel: 0)
  212. delegate?.audioMeterDidUpdate?(dB)
  213. }
  214. fileprivate func startMetering() {
  215. link = CADisplayLink(target: self, selector: #selector(NCAudioRecorder.updateMeter))
  216. link?.add(to: RunLoop.current, forMode: RunLoop.Mode.common)
  217. }
  218. fileprivate func stopMetering() {
  219. link?.invalidate()
  220. link = nil
  221. }
  222. }
  223. @IBDesignable
  224. class VoiceRecordHUD: UIView {
  225. @IBInspectable var rate: CGFloat = 0.0
  226. @IBInspectable var fillColor: UIColor = UIColor.green {
  227. didSet {
  228. setNeedsDisplay()
  229. }
  230. }
  231. var image: UIImage! {
  232. didSet {
  233. setNeedsDisplay()
  234. }
  235. }
  236. // MARK: - View Life Cycle
  237. override init(frame: CGRect) {
  238. super.init(frame: frame)
  239. image = UIImage(named: "microphone")
  240. }
  241. required init?(coder aDecoder: NSCoder) {
  242. super.init(coder: aDecoder)
  243. image = UIImage(named: "microphone")
  244. }
  245. func update(_ rate: CGFloat) {
  246. self.rate = rate
  247. setNeedsDisplay()
  248. }
  249. override func draw(_ rect: CGRect) {
  250. let context = UIGraphicsGetCurrentContext()
  251. context?.translateBy(x: 0, y: bounds.size.height)
  252. context?.scaleBy(x: 1, y: -1)
  253. context?.draw(image.cgImage!, in: bounds)
  254. context?.clip(to: bounds, mask: image.cgImage!)
  255. context?.setFillColor(fillColor.cgColor.components!)
  256. context?.fill(CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height * rate))
  257. }
  258. override func prepareForInterfaceBuilder() {
  259. let bundle = Bundle(for: type(of: self))
  260. image = UIImage(named: "microphone", in: bundle, compatibleWith: self.traitCollection)
  261. }
  262. }