Skip to content

Commit

Permalink
#4090 - Various tweaks and fixes following code review. Switched back…
Browse files Browse the repository at this point in the history
… to DateFormatters for formatting durations, sanitising audio player durations and current times.
  • Loading branch information
stefanceriu committed Jul 19, 2021
1 parent 089c688 commit 0ead60f
Show file tree
Hide file tree
Showing 9 changed files with 70 additions and 127 deletions.
22 changes: 5 additions & 17 deletions Riot/Modules/Room/VoiceMessages/VoiceMessageAudioPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class VoiceMessageAudioPlayer: NSObject {
private var statusObserver: NSKeyValueObservation?
private var playbackBufferEmptyObserver: NSKeyValueObservation?
private var rateObserver: NSKeyValueObservation?
private var playToEndObsever: NSObjectProtocol?
private var playToEndObserver: NSObjectProtocol?

private let delegateContainer = DelegateContainer()

Expand All @@ -55,23 +55,11 @@ class VoiceMessageAudioPlayer: NSObject {
}

var duration: TimeInterval {
guard let item = self.audioPlayer?.currentItem else {
return 0
}

let duration = CMTimeGetSeconds(item.duration)

return duration.isNaN ? 0.0 : duration
return abs(CMTimeGetSeconds(self.audioPlayer?.currentItem?.duration ?? .zero))
}

var currentTime: TimeInterval {
guard let audioPlayer = self.audioPlayer else {
return 0.0
}

let currentTime = CMTimeGetSeconds(audioPlayer.currentTime())

return currentTime.isNaN ? 0.0 : currentTime
return abs(CMTimeGetSeconds(audioPlayer?.currentTime() ?? .zero))
}

private(set) var isStopped = true
Expand Down Expand Up @@ -200,7 +188,7 @@ class VoiceMessageAudioPlayer: NSObject {
}
}

playToEndObsever = NotificationCenter.default.addObserver(forName: Notification.Name.AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [weak self] notification in
playToEndObserver = NotificationCenter.default.addObserver(forName: Notification.Name.AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [weak self] notification in
guard let self = self else { return }

self.delegateContainer.notifyDelegatesWithBlock { delegate in
Expand All @@ -213,7 +201,7 @@ class VoiceMessageAudioPlayer: NSObject {
statusObserver?.invalidate()
playbackBufferEmptyObserver?.invalidate()
rateObserver?.invalidate()
NotificationCenter.default.removeObserver(playToEndObsever as Any)
NotificationCenter.default.removeObserver(playToEndObserver as Any)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class VoiceMessageAudioRecorder: NSObject, AVAudioRecorderDelegate {
return audioRecorder?.isRecording ?? false
}

func recordWithOuputURL(_ url: URL) {
func recordWithOutputURL(_ url: URL) {

let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
Expand Down
29 changes: 11 additions & 18 deletions Riot/Modules/Room/VoiceMessages/VoiceMessageController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
private enum Constants {
static let maximumAudioRecordingDuration: TimeInterval = 120.0
static let maximumAudioRecordingLengthReachedThreshold: TimeInterval = 10.0
static let elapsedTimeFormat = "m:ss"
static let minimumRecordingDuration = 1.0
}

Expand All @@ -47,6 +48,12 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
private var isInLockedMode: Bool = false
private var notifiedRemainingTime = false

private static let timeFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = Constants.elapsedTimeFormat
return dateFormatter
}()

@objc public weak var delegate: VoiceMessageControllerDelegate?

@objc public var isRecordingAudio: Bool {
Expand Down Expand Up @@ -90,7 +97,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,

// Haptic are not played during record on iOS by default. This fix works
// only since iOS 13. A workaround for iOS 12 and earlier would be to
// dispatch after at least 100ms recordWithOuputURL call
// dispatch after at least 100ms recordWithOutputURL call
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setCategory(.playAndRecord)
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
Expand All @@ -100,7 +107,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,

audioRecorder = mediaServiceProvider.audioRecorder()
audioRecorder?.registerDelegate(self)
audioRecorder?.recordWithOuputURL(temporaryFileURL)
audioRecorder?.recordWithOutputURL(temporaryFileURL)
}

func voiceMessageToolbarViewDidRequestRecordingFinish(_ toolbarView: VoiceMessageToolbarView) {
Expand Down Expand Up @@ -335,7 +342,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,

var details = VoiceMessageToolbarViewDetails()
details.state = (isRecording ? (isInLockedMode ? .lockedModeRecord : .record) : (isInLockedMode ? .lockedModePlayback : .idle))
details.elapsedTime = durationStringFromTimeInterval(currentTime)
details.elapsedTime = VoiceMessageController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: currentTime))
details.audioSamples = audioSamples

if isRecording {
Expand Down Expand Up @@ -384,7 +391,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,

var details = VoiceMessageToolbarViewDetails()
details.state = (audioRecorder?.isRecording ?? false ? (isInLockedMode ? .lockedModeRecord : .record) : (isInLockedMode ? .lockedModePlayback : .idle))
details.elapsedTime = durationStringFromTimeInterval(audioPlayer.isPlaying ? audioPlayer.currentTime : audioPlayer.duration)
details.elapsedTime = VoiceMessageController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: (audioPlayer.isPlaying ? audioPlayer.currentTime : audioPlayer.duration)))
details.audioSamples = audioSamples
details.isPlaying = audioPlayer.isPlaying
details.progress = (audioPlayer.isPlaying ? (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0) : 0.0)
Expand All @@ -399,18 +406,4 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,

audioSamples = audioSamples + [Float](repeating: 0.0, count: delta)
}

private func durationStringFromTimeInterval(_ interval: TimeInterval) -> String {
guard interval.isFinite else {
return ""
}

var timeInterval = abs(interval)
let hours = trunc(timeInterval / 3600.0)
timeInterval -= hours * 3600.0
let minutes = trunc(timeInterval / 60.0)
timeInterval -= minutes * 60.0

return String(format: "%01.0f:%02.0f", minutes, timeInterval)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -75,12 +75,12 @@ import Foundation
// MARK: - Private

private func stopAllServicesExcept(_ service: AnyObject?) {
for audioRecoder in audioRecorders.allObjects {
if audioRecoder === service {
for audioRecorder in audioRecorders.allObjects {
if audioRecorder === service {
continue
}

audioRecoder.stopRecording()
audioRecorder.stopRecording()
}

guard let audioPlayersEnumerator = audioPlayers.objectEnumerator() else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ enum VoiceMessagePlaybackControllerState {

class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMessagePlaybackViewDelegate {

private enum Constants {
static let elapsedTimeFormat = "m:ss"
}

private let mediaServiceProvider: VoiceMessageMediaServiceProvider
private let cacheManager: VoiceMessageAttachmentCacheManager

Expand All @@ -43,6 +47,13 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
}
}

private static let timeFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = Constants.elapsedTimeFormat
return dateFormatter
}()


let playbackView: VoiceMessagePlaybackView

init(mediaServiceProvider: VoiceMessageMediaServiceProvider,
Expand Down Expand Up @@ -134,11 +145,11 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess

switch state {
case .stopped:
details.currentTime = durationStringFromTimeInterval(self.duration)
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: self.duration))
details.progress = 0.0
default:
if let audioPlayer = audioPlayer {
details.currentTime = durationStringFromTimeInterval(audioPlayer.currentTime)
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.currentTime))
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
}
}
Expand Down Expand Up @@ -199,18 +210,4 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
@objc private func updateTheme() {
playbackView.update(theme: ThemeService.shared().theme)
}

private func durationStringFromTimeInterval(_ interval: TimeInterval) -> String {
guard interval.isFinite else {
return ""
}

var timeInterval = abs(interval)
let hours = trunc(timeInterval / 3600.0)
timeInterval -= hours * 3600.0
let minutes = trunc(timeInterval / 60.0)
timeInterval -= minutes * 60.0

return String(format: "%01.0f:%02.0f", minutes, timeInterval)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ class VoiceMessagePlaybackView: UIView, NibLoadable, Themable {
playButton.backgroundColor = theme.colors.background
playButton.tintColor = theme.colors.secondaryContent
backgroundView.backgroundColor = theme.colors.quinaryContent
_waveformView.primarylineColor = theme.colors.quarterlyContent
_waveformView.primaryLineColor = theme.colors.quarterlyContent
_waveformView.secondaryLineColor = theme.colors.secondaryContent
elapsedTimeLabel.textColor = theme.colors.tertiaryContent
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ class VoiceMessageToolbarView: PassthroughView, NibLoadable, Themable, UIGesture
case UIGestureRecognizer.State.began:
delegate?.voiceMessageToolbarViewDidRequestRecordingStart(self)
case UIGestureRecognizer.State.ended:
delegate?.voiceMessageToolbarViewDidRequestRecordingFinish(self)
delegate?.voiceMessageToolbarViewDidRequestRecordingFinish(self)
default:
break
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@ class VoiceMessageWaveformView: UIView {
}
}

var primarylineColor = UIColor.lightGray {
var primaryLineColor = UIColor.lightGray {
didSet {
backgroundLayer.strokeColor = primarylineColor.cgColor
backgroundLayer.fillColor = primarylineColor.cgColor
backgroundLayer.strokeColor = primaryLineColor.cgColor
backgroundLayer.fillColor = primaryLineColor.cgColor
}
}
var secondaryLineColor = UIColor.darkGray {
Expand Down Expand Up @@ -60,7 +60,7 @@ class VoiceMessageWaveformView: UIView {
override init(frame: CGRect) {
super.init(frame: frame)

setupAndAdd(backgroundLayer, with: primarylineColor)
setupAndAdd(backgroundLayer, with: primaryLineColor)
setupAndAdd(progressLayer, with: secondaryLineColor)
progressLayer.masksToBounds = true

Expand Down
Loading

0 comments on commit 0ead60f

Please sign in to comment.