NCAudioRecorderViewController.swift 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314
  1. //
  2. // NCAudioRecorderViewController.swift
  3. // Nextcloud
  4. //
  5. // Created by Marino Faggiana on 08/03/19.
  6. // Copyright (c) 2019 Marino Faggiana. All rights reserved.
  7. //
  8. // Author Marino Faggiana <marino.faggiana@nextcloud.com>
  9. //
  10. // This program is free software: you can redistribute it and/or modify
  11. // it under the terms of the GNU General Public License as published by
  12. // the Free Software Foundation, either version 3 of the License, or
  13. // (at your option) any later version.
  14. //
  15. // This program is distributed in the hope that it will be useful,
  16. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18. // GNU General Public License for more details.
  19. //
  20. // You should have received a copy of the GNU General Public License
  21. // along with this program. If not, see <http://www.gnu.org/licenses/>.
  22. //
  23. // --------------------------------
  24. // Based on code of Venkat Kukunuru
  25. // --------------------------------
  26. import UIKit
  27. import AVFoundation
  28. import QuartzCore
  29. class NCAudioRecorderViewController: UIViewController, NCAudioRecorderDelegate {
  30. var recording: NCAudioRecorder!
  31. var startDate: Date = Date()
  32. var fileName: String = ""
  33. let appDelegate = (UIApplication.shared.delegate as? AppDelegate)!
  34. @IBOutlet weak var contentContainerView: UIView!
  35. @IBOutlet weak var durationLabel: UILabel!
  36. @IBOutlet weak var startStopLabel: UILabel!
  37. @IBOutlet weak var voiceRecordHUD: VoiceRecordHUD!
  38. // MARK: - View Life Cycle
  39. override func viewDidLoad() {
  40. super.viewDidLoad()
  41. voiceRecordHUD.update(0.0)
  42. durationLabel.text = ""
  43. startStopLabel.text = NSLocalizedString("_wait_", comment: "")
  44. view.backgroundColor = .clear
  45. contentContainerView.backgroundColor = UIColor.lightGray
  46. voiceRecordHUD.fillColor = UIColor.green
  47. Task {
  48. self.fileName = await NCNetworking.shared.createFileName(fileNameBase: NSLocalizedString("_untitled_", comment: "") + ".m4a", account: self.appDelegate.account, serverUrl: self.appDelegate.activeServerUrl)
  49. recording = NCAudioRecorder(to: self.fileName)
  50. recording.delegate = self
  51. do {
  52. try self.recording.prepare()
  53. startStopLabel.text = NSLocalizedString("_voice_memo_start_", comment: "")
  54. } catch {
  55. print(error)
  56. }
  57. }
  58. }
  59. override func viewWillAppear(_ animated: Bool) {
  60. super.viewWillAppear(animated)
  61. }
  62. override func viewDidAppear(_ animated: Bool) {
  63. super.viewDidAppear(animated)
  64. }
  65. override func traitCollectionDidChange(_ previousTraitCollection: UITraitCollection?) {
  66. super.traitCollectionDidChange(previousTraitCollection)
  67. }
  68. // MARK: - Action
  69. @IBAction func touchViewController() {
  70. if recording.state == .record {
  71. startStop()
  72. } else {
  73. dismiss(animated: true)
  74. }
  75. }
  76. @IBAction func startStop() {
  77. if recording.state == .record {
  78. recording.stop()
  79. voiceRecordHUD.update(0.0)
  80. dismiss(animated: true) {
  81. self.uploadMetadata()
  82. }
  83. } else {
  84. do {
  85. try recording.record()
  86. startDate = Date()
  87. startStopLabel.text = NSLocalizedString("_voice_memo_stop_", comment: "")
  88. } catch {
  89. print(error)
  90. }
  91. }
  92. }
  93. func uploadMetadata() {
  94. let fileNamePath = NSTemporaryDirectory() + self.fileName
  95. let metadata = NCManageDatabase.shared.createMetadata(account: appDelegate.account, user: appDelegate.user, userId: appDelegate.userId, fileName: fileName, fileNameView: fileName, ocId: UUID().uuidString, serverUrl: appDelegate.activeServerUrl, urlBase: appDelegate.urlBase, url: "", contentType: "")
  96. metadata.session = NCNetworking.shared.sessionUploadBackground
  97. metadata.sessionSelector = NCGlobal.shared.selectorUploadFile
  98. metadata.status = NCGlobal.shared.metadataStatusWaitUpload
  99. metadata.sessionDate = Date()
  100. metadata.size = NCUtilityFileSystem().getFileSize(filePath: fileNamePath)
  101. NCUtilityFileSystem().copyFile(atPath: fileNamePath, toPath: NCUtilityFileSystem().getDirectoryProviderStorageOcId(metadata.ocId, fileNameView: metadata.fileNameView))
  102. NCNetworkingProcess.shared.createProcessUploads(metadatas: [metadata])
  103. }
  104. func audioMeterDidUpdate(_ db: Float) {
  105. // print("db level: %f", db)
  106. self.recording.recorder?.updateMeters()
  107. let ALPHA = 0.05
  108. let peakPower = pow(10, (ALPHA * Double((self.recording.recorder?.peakPower(forChannel: 0))!)))
  109. var rate: Double = 0.0
  110. if peakPower <= 0.2 {
  111. rate = 0.2
  112. } else if peakPower > 0.9 {
  113. rate = 1.0
  114. } else {
  115. rate = peakPower
  116. }
  117. voiceRecordHUD.update(CGFloat(rate))
  118. voiceRecordHUD.fillColor = UIColor.green
  119. let formatter = DateComponentsFormatter()
  120. formatter.allowedUnits = [.second]
  121. formatter.unitsStyle = .full
  122. durationLabel.text = formatter.string(from: startDate, to: Date())
  123. }
  124. }
  125. @objc public protocol NCAudioRecorderDelegate: AVAudioRecorderDelegate {
  126. @objc optional func audioMeterDidUpdate(_ dB: Float)
  127. }
  128. open class NCAudioRecorder: NSObject {
  129. @objc public enum State: Int {
  130. case none, record, play
  131. }
  132. static var directory: String {
  133. return NSTemporaryDirectory()
  134. }
  135. open weak var delegate: NCAudioRecorderDelegate?
  136. open fileprivate(set) var url: URL
  137. open fileprivate(set) var state: State = .none
  138. open var bitRate = 192000
  139. open var sampleRate = 44100.0
  140. open var channels = 1
  141. var recorder: AVAudioRecorder?
  142. fileprivate var player: AVAudioPlayer?
  143. fileprivate var link: CADisplayLink?
  144. var metering: Bool {
  145. return delegate?.responds(to: #selector(NCAudioRecorderDelegate.audioMeterDidUpdate(_:))) == true
  146. }
  147. // MARK: - Initializers
  148. public init(to fileName: String) {
  149. url = URL(fileURLWithPath: NCAudioRecorder.directory).appendingPathComponent(fileName)
  150. super.init()
  151. do {
  152. try AVAudioSession.sharedInstance().setCategory(.playAndRecord)
  153. try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
  154. try AVAudioSession.sharedInstance().setActive(true)
  155. } catch {
  156. print(error)
  157. }
  158. }
  159. deinit {
  160. print("deinit NCAudioRecorder")
  161. do {
  162. try AVAudioSession.sharedInstance().setActive(false)
  163. } catch {
  164. print(error)
  165. }
  166. }
  167. // MARK: - Record
  168. open func prepare() throws {
  169. let settings: [String: AnyObject] = [
  170. AVFormatIDKey: NSNumber(value: Int32(kAudioFormatAppleLossless) as Int32),
  171. AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue as AnyObject,
  172. AVEncoderBitRateKey: bitRate as AnyObject,
  173. AVNumberOfChannelsKey: channels as AnyObject,
  174. AVSampleRateKey: sampleRate as AnyObject
  175. ]
  176. recorder = try AVAudioRecorder(url: url, settings: settings)
  177. recorder?.prepareToRecord()
  178. recorder?.delegate = delegate
  179. recorder?.isMeteringEnabled = metering
  180. }
  181. open func record() throws {
  182. if recorder == nil {
  183. try prepare()
  184. }
  185. self.state = .record
  186. if self.metering {
  187. self.startMetering()
  188. }
  189. self.recorder?.record()
  190. }
  191. open func stop() {
  192. switch state {
  193. case .play:
  194. player?.stop()
  195. player = nil
  196. case .record:
  197. recorder?.stop()
  198. recorder = nil
  199. stopMetering()
  200. default:
  201. break
  202. }
  203. state = .none
  204. }
  205. // MARK: - Metering
  206. @objc func updateMeter() {
  207. guard let recorder = recorder else { return }
  208. recorder.updateMeters()
  209. let dB = recorder.averagePower(forChannel: 0)
  210. delegate?.audioMeterDidUpdate?(dB)
  211. }
  212. fileprivate func startMetering() {
  213. link = CADisplayLink(target: self, selector: #selector(NCAudioRecorder.updateMeter))
  214. link?.add(to: RunLoop.current, forMode: RunLoop.Mode.common)
  215. }
  216. fileprivate func stopMetering() {
  217. link?.invalidate()
  218. link = nil
  219. }
  220. }
  221. @IBDesignable
  222. class VoiceRecordHUD: UIView {
  223. @IBInspectable var rate: CGFloat = 0.0
  224. @IBInspectable var fillColor: UIColor = UIColor.green {
  225. didSet {
  226. setNeedsDisplay()
  227. }
  228. }
  229. var image: UIImage! {
  230. didSet {
  231. setNeedsDisplay()
  232. }
  233. }
  234. // MARK: - View Life Cycle
  235. override init(frame: CGRect) {
  236. super.init(frame: frame)
  237. image = UIImage(named: "microphone")
  238. }
  239. required init?(coder aDecoder: NSCoder) {
  240. super.init(coder: aDecoder)
  241. image = UIImage(named: "microphone")
  242. }
  243. func update(_ rate: CGFloat) {
  244. self.rate = rate
  245. setNeedsDisplay()
  246. }
  247. override func draw(_ rect: CGRect) {
  248. let context = UIGraphicsGetCurrentContext()
  249. context?.translateBy(x: 0, y: bounds.size.height)
  250. context?.scaleBy(x: 1, y: -1)
  251. context?.draw(image.cgImage!, in: bounds)
  252. context?.clip(to: bounds, mask: image.cgImage!)
  253. context?.setFillColor(fillColor.cgColor.components!)
  254. context?.fill(CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height * rate))
  255. }
  256. override func prepareForInterfaceBuilder() {
  257. let bundle = Bundle(for: type(of: self))
  258. image = UIImage(named: "microphone", in: bundle, compatibleWith: self.traitCollection)
  259. }
  260. }