123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312 |
- //
- // NCAudioRecorderViewController.swift
- // Nextcloud
- //
- // Created by Marino Faggiana on 08/03/19.
- // Copyright (c) 2019 Marino Faggiana. All rights reserved.
- //
- // Author Marino Faggiana <marino.faggiana@nextcloud.com>
- //
- // This program is free software: you can redistribute it and/or modify
- // it under the terms of the GNU General Public License as published by
- // the Free Software Foundation, either version 3 of the License, or
- // (at your option) any later version.
- //
- // This program is distributed in the hope that it will be useful,
- // but WITHOUT ANY WARRANTY; without even the implied warranty of
- // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- // GNU General Public License for more details.
- //
- // You should have received a copy of the GNU General Public License
- // along with this program. If not, see <http://www.gnu.org/licenses/>.
- //
- // --------------------------------
- // Based on code of Venkat Kukunuru
- // --------------------------------
- import UIKit
- import AVFoundation
- import QuartzCore
- import NextcloudKit
- class NCAudioRecorderViewController: UIViewController, NCAudioRecorderDelegate {
- @IBOutlet weak var contentContainerView: UIView!
- @IBOutlet weak var durationLabel: UILabel!
- @IBOutlet weak var startStopLabel: UILabel!
- @IBOutlet weak var voiceRecordHUD: VoiceRecordHUD!
- var recording: NCAudioRecorder!
- var startDate: Date = Date()
- var fileName: String = ""
- var controller: NCMainTabBarController!
- var session: NCSession.Session {
- NCSession.shared.getSession(controller: controller)
- }
- // MARK: - View Life Cycle
- override func viewDidLoad() {
- super.viewDidLoad()
- voiceRecordHUD.update(0.0)
- durationLabel.text = ""
- startStopLabel.text = NSLocalizedString("_wait_", comment: "")
- view.backgroundColor = .clear
- contentContainerView.backgroundColor = UIColor.lightGray
- voiceRecordHUD.fillColor = UIColor.green
- Task {
- self.fileName = await NCNetworking.shared.createFileName(fileNameBase: NSLocalizedString("_untitled_", comment: "") + ".m4a", account: self.session.account, serverUrl: controller.currentServerUrl())
- recording = NCAudioRecorder(to: self.fileName)
- recording.delegate = self
- do {
- try self.recording.prepare()
- startStopLabel.text = NSLocalizedString("_voice_memo_start_", comment: "")
- } catch {
- print(error)
- }
- }
- }
- // MARK: - Action
- @IBAction func touchViewController() {
- if recording.state == .record {
- startStop()
- } else {
- dismiss(animated: true)
- }
- }
- @IBAction func startStop() {
- if recording.state == .record {
- recording.stop()
- voiceRecordHUD.update(0.0)
- dismiss(animated: true) {
- self.uploadMetadata()
- }
- } else {
- do {
- try recording.record()
- startDate = Date()
- startStopLabel.text = NSLocalizedString("_voice_memo_stop_", comment: "")
- } catch {
- print(error)
- }
- }
- }
- func uploadMetadata() {
- let fileNamePath = NSTemporaryDirectory() + self.fileName
- let metadata = NCManageDatabase.shared.createMetadata(fileName: fileName,
- fileNameView: fileName,
- ocId: UUID().uuidString,
- serverUrl: controller.currentServerUrl(),
- url: "",
- contentType: "",
- session: self.session,
- sceneIdentifier: self.controller?.sceneIdentifier)
- metadata.session = NCNetworking.shared.sessionUploadBackground
- metadata.sessionSelector = NCGlobal.shared.selectorUploadFile
- metadata.status = NCGlobal.shared.metadataStatusWaitUpload
- metadata.sessionDate = Date()
- metadata.size = NCUtilityFileSystem().getFileSize(filePath: fileNamePath)
- NCUtilityFileSystem().copyFile(atPath: fileNamePath, toPath: NCUtilityFileSystem().getDirectoryProviderStorageOcId(metadata.ocId, fileNameView: metadata.fileNameView))
- NCNetworkingProcess.shared.createProcessUploads(metadatas: [metadata])
- }
- func audioMeterDidUpdate(_ db: Float) {
- // print("db level: %f", db)
- self.recording.recorder?.updateMeters()
- let ALPHA = 0.05
- let peakPower = pow(10, (ALPHA * Double((self.recording.recorder?.peakPower(forChannel: 0))!)))
- var rate: Double = 0.0
- if peakPower <= 0.2 {
- rate = 0.2
- } else if peakPower > 0.9 {
- rate = 1.0
- } else {
- rate = peakPower
- }
- voiceRecordHUD.update(CGFloat(rate))
- voiceRecordHUD.fillColor = UIColor.green
- let formatter = DateComponentsFormatter()
- formatter.allowedUnits = [.second]
- formatter.unitsStyle = .full
- durationLabel.text = formatter.string(from: startDate, to: Date())
- }
- }
- @objc public protocol NCAudioRecorderDelegate: AVAudioRecorderDelegate {
- @objc optional func audioMeterDidUpdate(_ dB: Float)
- }
- open class NCAudioRecorder: NSObject {
- public enum State: Int {
- case none, record, play
- }
- static var directory: String {
- return NSTemporaryDirectory()
- }
- open weak var delegate: NCAudioRecorderDelegate?
- open fileprivate(set) var url: URL
- open fileprivate(set) var state: State = .none
- open var bitRate = 192000
- open var sampleRate = 44100.0
- open var channels = 1
- var recorder: AVAudioRecorder?
- fileprivate var player: AVAudioPlayer?
- fileprivate var link: CADisplayLink?
- var metering: Bool {
- return delegate?.responds(to: #selector(NCAudioRecorderDelegate.audioMeterDidUpdate(_:))) == true
- }
- // MARK: - Initializers
- public init(to fileName: String) {
- url = URL(fileURLWithPath: NCAudioRecorder.directory).appendingPathComponent(fileName)
- super.init()
- do {
- try AVAudioSession.sharedInstance().setCategory(.playAndRecord)
- try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
- try AVAudioSession.sharedInstance().setActive(true)
- } catch {
- print(error)
- }
- }
- deinit {
- print("deinit NCAudioRecorder")
- do {
- try AVAudioSession.sharedInstance().setActive(false)
- } catch {
- print(error)
- }
- }
- // MARK: - Record
- open func prepare() throws {
- let settings: [String: AnyObject] = [
- AVFormatIDKey: NSNumber(value: Int32(kAudioFormatAppleLossless) as Int32),
- AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue as AnyObject,
- AVEncoderBitRateKey: bitRate as AnyObject,
- AVNumberOfChannelsKey: channels as AnyObject,
- AVSampleRateKey: sampleRate as AnyObject
- ]
- recorder = try AVAudioRecorder(url: url, settings: settings)
- recorder?.prepareToRecord()
- recorder?.delegate = delegate
- recorder?.isMeteringEnabled = metering
- }
- open func record() throws {
- if recorder == nil {
- try prepare()
- }
- self.state = .record
- if self.metering {
- self.startMetering()
- }
- self.recorder?.record()
- }
- open func stop() {
- switch state {
- case .play:
- player?.stop()
- player = nil
- case .record:
- recorder?.stop()
- recorder = nil
- stopMetering()
- default:
- break
- }
- state = .none
- }
- // MARK: - Metering
- @objc func updateMeter() {
- guard let recorder = recorder else { return }
- recorder.updateMeters()
- let dB = recorder.averagePower(forChannel: 0)
- delegate?.audioMeterDidUpdate?(dB)
- }
- fileprivate func startMetering() {
- link = CADisplayLink(target: self, selector: #selector(NCAudioRecorder.updateMeter))
- link?.add(to: RunLoop.current, forMode: RunLoop.Mode.common)
- }
- fileprivate func stopMetering() {
- link?.invalidate()
- link = nil
- }
- }
- @IBDesignable
- class VoiceRecordHUD: UIView {
- @IBInspectable var rate: CGFloat = 0.0
- @IBInspectable var fillColor: UIColor = UIColor.green {
- didSet {
- setNeedsDisplay()
- }
- }
- var image: UIImage! {
- didSet {
- setNeedsDisplay()
- }
- }
- // MARK: - View Life Cycle
- override init(frame: CGRect) {
- super.init(frame: frame)
- image = UIImage(named: "microphone")
- }
- required init?(coder aDecoder: NSCoder) {
- super.init(coder: aDecoder)
- image = UIImage(named: "microphone")
- }
- func update(_ rate: CGFloat) {
- self.rate = rate
- setNeedsDisplay()
- }
- override func draw(_ rect: CGRect) {
- let context = UIGraphicsGetCurrentContext()
- context?.translateBy(x: 0, y: bounds.size.height)
- context?.scaleBy(x: 1, y: -1)
- context?.draw(image.cgImage!, in: bounds)
- context?.clip(to: bounds, mask: image.cgImage!)
- context?.setFillColor(fillColor.cgColor.components!)
- context?.fill(CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height * rate))
- }
- override func prepareForInterfaceBuilder() {
- let bundle = Bundle(for: type(of: self))
- image = UIImage(named: "microphone", in: bundle, compatibleWith: self.traitCollection)
- }
- }
|