Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow voice message playback in the background #3236

Merged
merged 4 commits into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -282,9 +282,7 @@ class OnboardingFlowCoordinator: FlowCoordinatorProtocol {
let coordinator = SessionVerificationScreenCoordinator(parameters: parameters)

coordinator.actions
.sink { [weak self] action in
guard let self else { return }

.sink { action in
switch action {
case .done:
break // Moving to next state is handled by the global session verification listener
Expand Down
2 changes: 1 addition & 1 deletion ElementX/Sources/Other/MapLibre/MapLibreMapView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ extension MapLibreMapView {
}

func mapView(_ mapView: MGLMapView, regionDidChangeAnimated animated: Bool) {
// Fixes: "Publishing changes from within view updates is not allowed, this will cause undefined behavior."
// Avoid `Publishing changes from within view update` warnings
DispatchQueue.main.async { [mapLibreView] in
mapLibreView.mapCenterCoordinate = mapView.centerCoordinate
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ private struct CallView: UIViewRepresentable {

super.init()

DispatchQueue.main.async { // Avoid `Publishing changes from within view update warnings`
DispatchQueue.main.async { // Avoid `Publishing changes from within view update` warnings
viewModelContext.javaScriptEvaluator = self.evaluateJavaScript
viewModelContext.requestPictureInPictureHandler = self.requestPictureInPicture
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,9 @@ final class ComposerToolbarViewModel: ComposerToolbarViewModelType, ComposerTool
mentionBuilder = MentionBuilder()
attributedStringBuilder = AttributedStringBuilder(cacheKey: "Composer", mentionBuilder: mentionBuilder)

super.init(initialViewState: ComposerToolbarViewState(audioPlayerState: .init(id: .recorderPreview, duration: 0),
super.init(initialViewState: ComposerToolbarViewState(audioPlayerState: .init(id: .recorderPreview,
title: L10n.commonVoiceMessage,
duration: 0),
audioRecorderState: .init(),
bindings: .init()),
mediaProvider: mediaProvider)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,11 @@ extension ComposerToolbar {
mentionDisplayHelper: ComposerMentionDisplayHelper.mock,
analyticsService: ServiceLocator.shared.analytics,
composerDraftService: ComposerDraftServiceMock())
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview, duration: 10.0), waveform: .data(waveformData), isUploading: uploading)
model.state.composerMode = .previewVoiceMessage(state: AudioPlayerState(id: .recorderPreview,
title: L10n.commonVoiceMessage,
duration: 10.0),
waveform: .data(waveformData),
isUploading: uploading)
return model
}
return ComposerToolbar(context: composerViewModel.context,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,11 @@ private class ElementTextView: UITextView, PillAttachmentViewProviderDelegate {

super.init(frame: .zero, textContainer: nil)

presendCallback.wrappedValue = { [weak self] in
self?.acceptCurrentSuggestion()
// Avoid `Publishing changes from within view update` warnings
DispatchQueue.main.async {
presendCallback.wrappedValue = { [weak self] in
self?.acceptCurrentSuggestion()
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ private extension DateFormatter {

struct VoiceMessagePreviewComposer_Previews: PreviewProvider, TestablePreview {
static let playerState = AudioPlayerState(id: .recorderPreview,
title: L10n.commonVoiceMessage,
duration: 10.0,
waveform: EstimatedWaveform.mockWaveform,
progress: 0.4)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,7 @@ class TimelineInteractionHandler {
}

let playerState = AudioPlayerState(id: .timelineItemIdentifier(itemID),
title: L10n.commonVoiceMessage,
duration: voiceMessageRoomTimelineItem.content.duration,
waveform: voiceMessageRoomTimelineItem.content.waveform)
mediaPlayerProvider.register(audioPlayerState: playerState)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ class TimelineTableViewController: UIViewController {
/// Updates the typing members but also updates table view items
func setTypingMembers(_ members: [String]) {
DispatchQueue.main.async {
// Avoid `Publishing changes from within view update warnings`
// Avoid `Publishing changes from within view update` warnings
self.typingMembers.members = members
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,10 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
replyDetails: .loaded(sender: .init(id: "", displayName: "Alice"),
eventID: "123",
eventContent: .message(.text(.init(body: "Short"))))),
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random),
title: L10n.commonVoiceMessage,
duration: 10,
waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}
Expand Down Expand Up @@ -552,7 +555,10 @@ struct TimelineItemBubbledStylerView_Previews: PreviewProvider, TestablePreview
source: nil,
contentType: nil),
properties: RoomTimelineItemProperties(encryptionAuthenticity: .notGuaranteed(color: .gray))),
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random), duration: 10, waveform: EstimatedWaveform.mockWaveform))
playerState: AudioPlayerState(id: .timelineItemIdentifier(.random),
title: L10n.commonVoiceMessage,
duration: 10,
waveform: EstimatedWaveform.mockWaveform))
}
.environmentObject(viewModel.context)
}
Expand Down
32 changes: 11 additions & 21 deletions ElementX/Sources/Services/Audio/Player/AudioPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
private func setupAudioSession() {
releaseAudioSessionTask = nil
do {
try audioSession.setCategory(AVAudioSession.Category.playback)
try audioSession.setCategory(.playback)
try audioSession.setActive(true)
} catch {
MXLog.error("Could not redirect audio playback to speakers.")
Expand All @@ -157,16 +157,16 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
releaseAudioSessionTask = Task { [weak self] in
try? await Task.sleep(for: .seconds(timeInterval))
guard !Task.isCancelled else { return }
guard let self else { return }
self.releaseAudioSession()

self?.releaseAudioSession()
}
}

private func releaseAudioSession() {
releaseAudioSessionTask = nil
if audioSession.category == .playback, !audioSession.isOtherAudioPlaying {
MXLog.info("releasing audio session")
try? audioSession.setActive(false)
try? audioSession.setActive(false, options: .notifyOthersOnDeactivation)
}
}

Expand All @@ -189,10 +189,10 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {

switch playerItem.status {
case .failed:
self.setInternalState(.error(playerItem.error ?? AudioPlayerError.genericError))
setInternalState(.error(playerItem.error ?? AudioPlayerError.genericError))
case .readyToPlay:
guard state == .loading else { return }
self.setInternalState(.readyToPlay)
setInternalState(.readyToPlay)
default:
break
}
Expand All @@ -202,30 +202,20 @@ class AudioPlayer: NSObject, AudioPlayerProtocol {
guard let self else { return }

if internalAudioPlayer.rate == 0 {
if self.isStopped {
self.setInternalState(.stopped)
if isStopped {
setInternalState(.stopped)
} else {
self.setInternalState(.paused)
setInternalState(.paused)
}
} else {
self.setInternalState(.playing)
setInternalState(.playing)
}
}

NotificationCenter.default.publisher(for: Notification.Name.AVPlayerItemDidPlayToEndTime)
.sink { [weak self] _ in
guard let self else { return }
self.setInternalState(.finishedPlaying)
}
.store(in: &cancellables)

// Pause playback uppon UIApplication.didBecomeActiveNotification notification
NotificationCenter.default.publisher(for: UIApplication.didEnterBackgroundNotification)
.sink { [weak self] _ in
guard let self else { return }
self.pause()
// Release the audio session right away, as we don't play audio in the background
self.releaseAudioSession()
setInternalState(.finishedPlaying)
}
.store(in: &cancellables)
}
Expand Down
119 changes: 110 additions & 9 deletions ElementX/Sources/Services/Audio/Player/AudioPlayerState.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import Combine
import Foundation
import MediaPlayer
import UIKit

enum AudioPlayerPlaybackState {
Expand All @@ -34,16 +35,15 @@ enum AudioPlayerStateIdentifier {
@MainActor
class AudioPlayerState: ObservableObject, Identifiable {
let id: AudioPlayerStateIdentifier
let title: String
private(set) var duration: Double
let waveform: EstimatedWaveform
@Published private(set) var progress: Double

@Published private(set) var playbackState: AudioPlayerPlaybackState
/// It's similar to `playbackState`, with the a difference: `.loading`
/// updates are delayed by a fixed amount of time
@Published private(set) var playerButtonPlaybackState: AudioPlayerPlaybackState
@Published private(set) var progress: Double
var showProgressIndicator: Bool {
progress > 0
}

private weak var audioPlayer: AudioPlayerProtocol?
private var audioPlayerSubscription: AnyCancellable?
Expand All @@ -53,6 +53,10 @@ class AudioPlayerState: ObservableObject, Identifiable {
/// The file url that the last player attached to this object has loaded.
/// The file url persists even if the AudioPlayer will be detached later.
private(set) var fileURL: URL?

var showProgressIndicator: Bool {
progress > 0
}

var isAttached: Bool {
audioPlayer != nil
Expand All @@ -62,8 +66,9 @@ class AudioPlayerState: ObservableObject, Identifiable {
displayLink != nil
}

init(id: AudioPlayerStateIdentifier, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
init(id: AudioPlayerStateIdentifier, title: String, duration: Double, waveform: EstimatedWaveform? = nil, progress: Double = 0.0) {
self.id = id
self.title = title
self.duration = duration
self.waveform = waveform ?? EstimatedWaveform(data: [])
self.progress = progress
Expand Down Expand Up @@ -146,12 +151,19 @@ class AudioPlayerState: ObservableObject, Identifiable {
}
startPublishProgress()
playbackState = .playing
case .didPausePlaying, .didStopPlaying, .didFinishPlaying:
setUpRemoteCommandCenter()
case .didPausePlaying:
stopPublishProgress()
playbackState = .stopped
if case .didFinishPlaying = action {
progress = 0.0
}
case .didStopPlaying:
playbackState = .stopped
stopPublishProgress()
tearDownRemoteCommandCenter()
case .didFinishPlaying:
playbackState = .stopped
progress = 0.0
stopPublishProgress()
tearDownRemoteCommandCenter()
case .didFailWithError:
stopPublishProgress()
playbackState = .error
Expand All @@ -172,6 +184,8 @@ class AudioPlayerState: ObservableObject, Identifiable {
if let currentTime = audioPlayer?.currentTime, duration > 0 {
progress = currentTime / duration
}

updateNowPlayingInfoCenter()
}

private func stopPublishProgress() {
Expand Down Expand Up @@ -200,6 +214,93 @@ class AudioPlayerState: ObservableObject, Identifiable {
.removeDuplicates()
.weakAssign(to: \.playerButtonPlaybackState, on: self)
}

private func setUpRemoteCommandCenter() {
UIApplication.shared.beginReceivingRemoteControlEvents()

let commandCenter = MPRemoteCommandCenter.shared()

commandCenter.playCommand.isEnabled = true
commandCenter.playCommand.removeTarget(nil)
commandCenter.playCommand.addTarget { [weak self] _ in
guard let audioPlayer = self?.audioPlayer else {
return MPRemoteCommandHandlerStatus.commandFailed
stefanceriu marked this conversation as resolved.
Show resolved Hide resolved
}

audioPlayer.play()

return MPRemoteCommandHandlerStatus.success
}

commandCenter.pauseCommand.isEnabled = true
commandCenter.pauseCommand.removeTarget(nil)
commandCenter.pauseCommand.addTarget { [weak self] _ in
guard let audioPlayer = self?.audioPlayer else {
return MPRemoteCommandHandlerStatus.commandFailed
}

audioPlayer.pause()

return MPRemoteCommandHandlerStatus.success
}

commandCenter.skipForwardCommand.isEnabled = true
commandCenter.skipForwardCommand.removeTarget(nil)
commandCenter.skipForwardCommand.addTarget { [weak self] event in
guard let audioPlayer = self?.audioPlayer, let skipEvent = event as? MPSkipIntervalCommandEvent else {
return MPRemoteCommandHandlerStatus.commandFailed
}

Task {
await audioPlayer.seek(to: audioPlayer.currentTime + skipEvent.interval)
pixlwave marked this conversation as resolved.
Show resolved Hide resolved
}

return MPRemoteCommandHandlerStatus.success
}

commandCenter.skipBackwardCommand.isEnabled = true
commandCenter.skipBackwardCommand.removeTarget(nil)
commandCenter.skipBackwardCommand.addTarget { [weak self] event in
guard let audioPlayer = self?.audioPlayer, let skipEvent = event as? MPSkipIntervalCommandEvent else {
return MPRemoteCommandHandlerStatus.commandFailed
}

Task {
await audioPlayer.seek(to: audioPlayer.currentTime - skipEvent.interval)
pixlwave marked this conversation as resolved.
Show resolved Hide resolved
}

return MPRemoteCommandHandlerStatus.success
}
}

private func tearDownRemoteCommandCenter() {
UIApplication.shared.endReceivingRemoteControlEvents()

let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
nowPlayingInfoCenter.nowPlayingInfo = nil
nowPlayingInfoCenter.playbackState = .stopped

let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.playCommand.isEnabled = false
commandCenter.playCommand.removeTarget(nil)
commandCenter.pauseCommand.isEnabled = false
commandCenter.pauseCommand.removeTarget(nil)
commandCenter.skipForwardCommand.isEnabled = false
commandCenter.skipForwardCommand.removeTarget(nil)
commandCenter.skipBackwardCommand.isEnabled = false
commandCenter.skipBackwardCommand.removeTarget(nil)
}

private func updateNowPlayingInfoCenter() {
guard let audioPlayer else {
return
}

let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
nowPlayingInfoCenter.nowPlayingInfo = [MPMediaItemPropertyTitle: title,
MPMediaItemPropertyPlaybackDuration: audioPlayer.duration as Any,
MPNowPlayingInfoPropertyElapsedPlaybackTime: audioPlayer.currentTime as Any]
}
}

extension AudioPlayerState: Equatable {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class AudioRecorder: AudioRecorderProtocol {

private func releaseAudioSession() {
MXLog.info("releasing audio session")
try? audioSession.setActive(false)
try? audioSession.setActive(false, options: .notifyOthersOnDeactivation)
removeObservers()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -260,8 +260,6 @@ class ElementCallService: NSObject, ElementCallServiceProtocol, PKPushRegistryDe
// MARK: - Private

func tearDownCallSession(sendEndCallAction: Bool = true) {
try? AVAudioSession.sharedInstance().setActive(false)
pixlwave marked this conversation as resolved.
Show resolved Hide resolved

if sendEndCallAction, let ongoingCallID {
let transaction = CXTransaction(action: CXEndCallAction(call: ongoingCallID.callKitID))
callController.request(transaction) { error in
Expand Down
Loading
Loading