From ce9ca1bdaa29c20e815fb2c994eece3273430585 Mon Sep 17 00:00:00 2001 From: Michael Kirk Date: Tue, 23 Oct 2018 08:40:09 -0600 Subject: [PATCH] audio player type --- .../OWSSoundSettingsViewController.m | 4 +- .../Cells/OWSAudioMessageView.h | 2 - .../ConversationViewController.m | 11 +-- .../MessageDetailViewController.swift | 6 +- Signal/src/call/CallAudioService.swift | 4 +- Signal/src/call/SignalCall.swift | 2 +- .../Speakerbox/CallKitCallUIAdaptee.swift | 2 +- .../ViewControllers/MediaMessageView.swift | 4 +- .../attachments/OWSVideoPlayer.swift | 2 +- .../environment/OWSAudioSession.swift | 83 +++++++------------ SignalMessaging/environment/OWSSounds.h | 4 +- SignalMessaging/environment/OWSSounds.m | 10 +-- SignalMessaging/utils/OWSAudioPlayer.h | 23 +++-- SignalMessaging/utils/OWSAudioPlayer.m | 35 ++++---- 14 files changed, 81 insertions(+), 111 deletions(-) diff --git a/Signal/src/ViewControllers/AppSettings/OWSSoundSettingsViewController.m b/Signal/src/ViewControllers/AppSettings/OWSSoundSettingsViewController.m index ed1addd87..c218cd34d 100644 --- a/Signal/src/ViewControllers/AppSettings/OWSSoundSettingsViewController.m +++ b/Signal/src/ViewControllers/AppSettings/OWSSoundSettingsViewController.m @@ -112,10 +112,10 @@ NS_ASSUME_NONNULL_BEGIN - (void)soundWasSelected:(OWSSound)sound { [self.audioPlayer stop]; - self.audioPlayer = [OWSSounds audioPlayerForSound:sound]; + self.audioPlayer = [OWSSounds audioPlayerForSound:sound audioBehavior:OWSAudioBehavior_Playback]; // Suppress looping in this view. self.audioPlayer.isLooping = NO; - [self.audioPlayer playWithPlaybackAudioCategory]; + [self.audioPlayer play]; if (self.currentSound == sound) { return; diff --git a/Signal/src/ViewControllers/ConversationView/Cells/OWSAudioMessageView.h b/Signal/src/ViewControllers/ConversationView/Cells/OWSAudioMessageView.h index 1d0c98e6e..f7be8be2e 100644 --- a/Signal/src/ViewControllers/ConversationView/Cells/OWSAudioMessageView.h +++ b/Signal/src/ViewControllers/ConversationView/Cells/OWSAudioMessageView.h @@ -2,8 +2,6 @@ // Copyright (c) 2018 Open Whisper Systems. All rights reserved. // -#import "OWSAudioPlayer.h" - NS_ASSUME_NONNULL_BEGIN @class ConversationStyle; diff --git a/Signal/src/ViewControllers/ConversationView/ConversationViewController.m b/Signal/src/ViewControllers/ConversationView/ConversationViewController.m index 6b05ea60b..15d8d3cc9 100644 --- a/Signal/src/ViewControllers/ConversationView/ConversationViewController.m +++ b/Signal/src/ViewControllers/ConversationView/ConversationViewController.m @@ -149,7 +149,7 @@ typedef enum : NSUInteger { @property (nonatomic) TSThread *thread; @property (nonatomic, readonly) YapDatabaseConnection *editingDatabaseConnection; -@property (nonatomic, readonly) AudioActivity *recordVoiceNoteAudioActivity; +@property (nonatomic, readonly) OWSAudioActivity *recordVoiceNoteAudioActivity; @property (nonatomic, readonly) NSTimeInterval viewControllerCreatedAt; // These two properties must be updated in lockstep. @@ -286,7 +286,7 @@ typedef enum : NSUInteger { _contactShareViewHelper.delegate = self; NSString *audioActivityDescription = [NSString stringWithFormat:@"%@ voice note", self.logTag]; - _recordVoiceNoteAudioActivity = [AudioActivity recordActivityWithAudioDescription:audioActivityDescription]; + _recordVoiceNoteAudioActivity = [[OWSAudioActivity alloc] initWithAudioDescription:audioActivityDescription behavior:OWSAudioBehavior_PlayAndRecord]; } #pragma mark - Dependencies @@ -2232,7 +2232,7 @@ typedef enum : NSUInteger { // Is this player associated with this media adapter? if (self.audioAttachmentPlayer.owner == viewItem) { // Tap to pause & unpause. - [self.audioAttachmentPlayer togglePlayStateWithPlaybackAudioCategory]; + [self.audioAttachmentPlayer togglePlayState]; return; } [self.audioAttachmentPlayer stop]; @@ -2240,10 +2240,11 @@ typedef enum : NSUInteger { } self.audioAttachmentPlayer = - [[OWSAudioPlayer alloc] initWithMediaUrl:attachmentStream.originalMediaURL delegate:viewItem]; + [[OWSAudioPlayer alloc] initWithMediaUrl:attachmentStream.originalMediaURL audioBehavior:OWSAudioBehavior_AudioMessagePlayback delegate:viewItem]; + // Associate the player with this media adapter. self.audioAttachmentPlayer.owner = viewItem; - [self.audioAttachmentPlayer playWithPlaybackAudioCategory]; + [self.audioAttachmentPlayer play]; } - (void)didTapTruncatedTextMessage:(id)conversationItem diff --git a/Signal/src/ViewControllers/MessageDetailViewController.swift b/Signal/src/ViewControllers/MessageDetailViewController.swift index cb5c9fdf6..b08d9802d 100644 --- a/Signal/src/ViewControllers/MessageDetailViewController.swift +++ b/Signal/src/ViewControllers/MessageDetailViewController.swift @@ -668,19 +668,19 @@ class MessageDetailViewController: OWSViewController, MediaGalleryDataSourceDele // Is this player associated with this media adapter? if audioAttachmentPlayer.owner === viewItem { // Tap to pause & unpause. - audioAttachmentPlayer.togglePlayStateWithPlaybackAudioCategory() + audioAttachmentPlayer.togglePlayState() return } audioAttachmentPlayer.stop() self.audioAttachmentPlayer = nil } - let audioAttachmentPlayer = OWSAudioPlayer(mediaUrl: mediaURL, delegate: viewItem) + let audioAttachmentPlayer = OWSAudioPlayer(mediaUrl: mediaURL, audioBehavior: .audioMessagePlayback, delegate: viewItem) self.audioAttachmentPlayer = audioAttachmentPlayer // Associate the player with this media adapter. audioAttachmentPlayer.owner = viewItem - audioAttachmentPlayer.playWithPlaybackAudioCategory() + audioAttachmentPlayer.play() } func didTapTruncatedTextMessage(_ conversationItem: ConversationViewItem) { diff --git a/Signal/src/call/CallAudioService.swift b/Signal/src/call/CallAudioService.swift index e99101204..8ce926ce7 100644 --- a/Signal/src/call/CallAudioService.swift +++ b/Signal/src/call/CallAudioService.swift @@ -397,7 +397,7 @@ protocol CallAudioServiceDelegate: class { } private func play(sound: OWSSound) { - guard let newPlayer = OWSSounds.audioPlayer(for: sound) else { + guard let newPlayer = OWSSounds.audioPlayer(for: sound, audioBehavior: .call) else { owsFailDebug("unable to build player for sound: \(OWSSounds.displayName(for: sound))") return } @@ -407,7 +407,7 @@ protocol CallAudioServiceDelegate: class { // we're playing the same sound, since the player is memoized on the sound instance, we'd otherwise // stop the sound we just started. self.currentPlayer?.stop() - newPlayer.playWithCurrentAudioCategory() + newPlayer.play() self.currentPlayer = newPlayer } diff --git a/Signal/src/call/SignalCall.swift b/Signal/src/call/SignalCall.swift index 51a018803..370960238 100644 --- a/Signal/src/call/SignalCall.swift +++ b/Signal/src/call/SignalCall.swift @@ -161,7 +161,7 @@ protocol CallObserver: class { self.state = state self.remotePhoneNumber = remotePhoneNumber self.thread = TSContactThread.getOrCreateThread(contactId: remotePhoneNumber) - self.audioActivity = AudioActivity(audioDescription: "[SignalCall] with \(remotePhoneNumber)") + self.audioActivity = AudioActivity(audioDescription: "[SignalCall] with \(remotePhoneNumber)", behavior: .call) } // A string containing the three identifiers for this call. diff --git a/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift b/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift index 0fa7e0254..6e0970623 100644 --- a/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift +++ b/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift @@ -88,7 +88,7 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate { self.provider = type(of: self).sharedProvider(useSystemCallLog: useSystemCallLog) - self.audioActivity = AudioActivity(audioDescription: "[CallKitCallUIAdaptee]") + self.audioActivity = AudioActivity(audioDescription: "[CallKitCallUIAdaptee]", behavior: .call) self.showNamesOnCallScreen = showNamesOnCallScreen super.init() diff --git a/SignalMessaging/ViewControllers/MediaMessageView.swift b/SignalMessaging/ViewControllers/MediaMessageView.swift index b5eaeced3..02e7b1a51 100644 --- a/SignalMessaging/ViewControllers/MediaMessageView.swift +++ b/SignalMessaging/ViewControllers/MediaMessageView.swift @@ -131,7 +131,7 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate { return } - audioPlayer = OWSAudioPlayer(mediaUrl: dataUrl, delegate: self) + audioPlayer = OWSAudioPlayer(mediaUrl: dataUrl, audioBehavior: .playback, delegate: self) var subviews = [UIView]() @@ -393,7 +393,7 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate { @objc func audioPlayButtonPressed(sender: UIButton) { - audioPlayer?.togglePlayStateWithPlaybackAudioCategory() + audioPlayer?.togglePlayState() } // MARK: - OWSAudioPlayerDelegate diff --git a/SignalMessaging/attachments/OWSVideoPlayer.swift b/SignalMessaging/attachments/OWSVideoPlayer.swift index 614f654b5..c32e899fd 100644 --- a/SignalMessaging/attachments/OWSVideoPlayer.swift +++ b/SignalMessaging/attachments/OWSVideoPlayer.swift @@ -22,7 +22,7 @@ public class OWSVideoPlayer: NSObject { @objc init(url: URL) { self.avPlayer = AVPlayer(url: url) - self.audioActivity = AudioActivity(audioDescription: "[OWSVideoPlayer] url:\(url)", options: [.playback]) + self.audioActivity = AudioActivity(audioDescription: "[OWSVideoPlayer] url:\(url)", behavior: .playback) super.init() diff --git a/SignalMessaging/environment/OWSAudioSession.swift b/SignalMessaging/environment/OWSAudioSession.swift index 4cd125388..ef4868558 100644 --- a/SignalMessaging/environment/OWSAudioSession.swift +++ b/SignalMessaging/environment/OWSAudioSession.swift @@ -5,33 +5,16 @@ import Foundation import WebRTC -public struct AudioActivityOptions: OptionSet { - public let rawValue: Int - - public init(rawValue: Int) { - self.rawValue = rawValue - } - - public static let playback = AudioActivityOptions(rawValue: 1 << 0) - public static let record = AudioActivityOptions(rawValue: 1 << 1) - public static let proximitySwitchesToEarPiece = AudioActivityOptions(rawValue: 1 << 2) -} - -@objc +@objc(OWSAudioActivity) public class AudioActivity: NSObject { let audioDescription: String - let options: AudioActivityOptions + let behavior: OWSAudioBehavior @objc - public init(audioDescription: String) { + public init(audioDescription: String, behavior: OWSAudioBehavior) { self.audioDescription = audioDescription - self.options = [] - } - - public init(audioDescription: String, options: AudioActivityOptions) { - self.audioDescription = audioDescription - self.options = options + self.behavior = behavior } deinit { @@ -44,23 +27,6 @@ public class AudioActivity: NSObject { return Environment.shared.audioSession } - // MARK: Factory Methods - - @objc - public class func playbackActivity(audioDescription: String) -> AudioActivity { - return AudioActivity(audioDescription: audioDescription, options: .playback) - } - - @objc - public class func recordActivity(audioDescription: String) -> AudioActivity { - return AudioActivity(audioDescription: audioDescription, options: .record) - } - - @objc - public class func voiceNoteActivity(audioDescription: String) -> AudioActivity { - return AudioActivity(audioDescription: audioDescription, options: [.playback, .proximitySwitchesToEarPiece]) - } - // MARK: override public var description: String { @@ -85,8 +51,8 @@ public class OWSAudioSession: NSObject { // MARK: private var currentActivities: [Weak] = [] - var aggregateOptions: AudioActivityOptions { - return AudioActivityOptions(self.currentActivities.compactMap { $0.value?.options }) + var aggregateBehaviors: Set { + return Set(self.currentActivities.compactMap { $0.value?.behavior }) } @objc @@ -99,21 +65,28 @@ public class OWSAudioSession: NSObject { self.currentActivities.append(Weak(value: audioActivity)) do { - if aggregateOptions.contains(.record) { - assert(avAudioSession.recordPermission() == .granted) - try avAudioSession.setCategory(AVAudioSessionCategoryRecord) - } else if aggregateOptions.contains(.proximitySwitchesToEarPiece) { - try ensureCategoryForProximityState() - } else if aggregateOptions.contains(.playback) { - try avAudioSession.setCategory(AVAudioSessionCategoryPlayback) - } else { - Logger.debug("no category option specified. Leaving category untouched.") - } - - if aggregateOptions.contains(.proximitySwitchesToEarPiece) { - self.device.isProximityMonitoringEnabled = true + if aggregateBehaviors.contains(.call) { + // Do nothing while on a call. + // WebRTC/CallAudioService manages call audio + // Eventually it would be nice to consolidate more of the audio + // session handling. } else { - self.device.isProximityMonitoringEnabled = false + if aggregateBehaviors.contains(.playAndRecord) { + assert(avAudioSession.recordPermission() == .granted) + try avAudioSession.setCategory(AVAudioSessionCategoryRecord) + } else if aggregateBehaviors.contains(.audioMessagePlayback) { + try ensureCategoryForProximityState() + } else if aggregateBehaviors.contains(.playback) { + try avAudioSession.setCategory(AVAudioSessionCategoryPlayback) + } else { + owsFailDebug("no category option specified. Leaving category untouched.") + } + + if aggregateBehaviors.contains(.audioMessagePlayback) { + self.device.isProximityMonitoringEnabled = true + } else { + self.device.isProximityMonitoringEnabled = false + } } return true @@ -133,7 +106,7 @@ public class OWSAudioSession: NSObject { } func ensureCategoryForProximityState() throws { - if aggregateOptions.contains(.proximitySwitchesToEarPiece) { + if aggregateBehaviors.contains(.audioMessagePlayback) { if self.device.proximityState { Logger.debug("proximityState: true") diff --git a/SignalMessaging/environment/OWSSounds.h b/SignalMessaging/environment/OWSSounds.h index 4cee5e581..f7ff36072 100644 --- a/SignalMessaging/environment/OWSSounds.h +++ b/SignalMessaging/environment/OWSSounds.h @@ -2,6 +2,7 @@ // Copyright (c) 2018 Open Whisper Systems. All rights reserved. // +#import "OWSAudioPlayer.h" #import NS_ASSUME_NONNULL_BEGIN @@ -68,7 +69,8 @@ typedef NS_ENUM(NSUInteger, OWSSound) { #pragma mark - AudioPlayer -+ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound; ++ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound + audioBehavior:(OWSAudioBehavior)audioBehavior; @end diff --git a/SignalMessaging/environment/OWSSounds.m b/SignalMessaging/environment/OWSSounds.m index 3ef0bf1da..506b7bd8e 100644 --- a/SignalMessaging/environment/OWSSounds.m +++ b/SignalMessaging/environment/OWSSounds.m @@ -375,17 +375,13 @@ NSString *const kOWSSoundsStorageGlobalNotificationKey = @"kOWSSoundsStorageGlob } + (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound + audioBehavior:(OWSAudioBehavior)audioBehavior; { - return [self audioPlayerForSound:sound quiet:NO]; -} - -+ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound quiet:(BOOL)quiet -{ - NSURL *_Nullable soundURL = [OWSSounds soundURLForSound:sound quiet:(BOOL)quiet]; + NSURL *_Nullable soundURL = [OWSSounds soundURLForSound:sound quiet:NO]; if (!soundURL) { return nil; } - OWSAudioPlayer *player = [[OWSAudioPlayer alloc] initWithMediaUrl:soundURL]; + OWSAudioPlayer *player = [[OWSAudioPlayer alloc] initWithMediaUrl:soundURL audioBehavior:audioBehavior]; if ([self shouldAudioPlayerLoopForSound:sound]) { player.isLooping = YES; } diff --git a/SignalMessaging/utils/OWSAudioPlayer.h b/SignalMessaging/utils/OWSAudioPlayer.h index 3fd81513f..93353c63c 100644 --- a/SignalMessaging/utils/OWSAudioPlayer.h +++ b/SignalMessaging/utils/OWSAudioPlayer.h @@ -21,6 +21,14 @@ typedef NS_ENUM(NSInteger, AudioPlaybackState) { #pragma mark - +typedef NS_ENUM(NSUInteger, OWSAudioBehavior) { + OWSAudioBehavior_Unknown, + OWSAudioBehavior_Playback, + OWSAudioBehavior_AudioMessagePlayback, + OWSAudioBehavior_PlayAndRecord, + OWSAudioBehavior_Call, +}; + @interface OWSAudioPlayer : NSObject @property (nonatomic, readonly, weak) id delegate; @@ -31,19 +39,16 @@ typedef NS_ENUM(NSInteger, AudioPlaybackState) { @property (nonatomic) BOOL isLooping; -- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl; - -- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id)delegate; - -// respects silent switch -- (void)playWithCurrentAudioCategory; +- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl audioBehavior:(OWSAudioBehavior)audioBehavior; -// will ensure sound is audible, even if silent switch is enabled -- (void)playWithPlaybackAudioCategory; +- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl + audioBehavior:(OWSAudioBehavior)audioBehavior + delegate:(id)delegate; +- (void)play; - (void)pause; - (void)stop; -- (void)togglePlayStateWithPlaybackAudioCategory; +- (void)togglePlayState; @end diff --git a/SignalMessaging/utils/OWSAudioPlayer.m b/SignalMessaging/utils/OWSAudioPlayer.m index d8abb7146..e44963ef4 100644 --- a/SignalMessaging/utils/OWSAudioPlayer.m +++ b/SignalMessaging/utils/OWSAudioPlayer.m @@ -35,8 +35,7 @@ NS_ASSUME_NONNULL_BEGIN @property (nonatomic, readonly) NSURL *mediaUrl; @property (nonatomic, nullable) AVAudioPlayer *audioPlayer; @property (nonatomic, nullable) NSTimer *audioPlayerPoller; -@property (nonatomic, readonly) AudioActivity *playbackAudioActivity; -@property (nonatomic, readonly) AudioActivity *currentCategoryAudioActivity; +@property (nonatomic, readonly) OWSAudioActivity *audioActivity; @end @@ -45,11 +44,14 @@ NS_ASSUME_NONNULL_BEGIN @implementation OWSAudioPlayer - (instancetype)initWithMediaUrl:(NSURL *)mediaUrl + audioBehavior:(OWSAudioBehavior)audioBehavior { - return [self initWithMediaUrl:mediaUrl delegate:[OWSAudioPlayerDelegateStub new]]; + return [self initWithMediaUrl:mediaUrl audioBehavior:audioBehavior delegate:[OWSAudioPlayerDelegateStub new]]; } -- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id)delegate +- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl + audioBehavior:(OWSAudioBehavior)audioBehavior + delegate:(id)delegate { self = [super init]; if (!self) { @@ -59,13 +61,11 @@ NS_ASSUME_NONNULL_BEGIN OWSAssertDebug(mediaUrl); OWSAssertDebug(delegate); - _delegate = delegate; _mediaUrl = mediaUrl; + _delegate = delegate; NSString *audioActivityDescription = [NSString stringWithFormat:@"%@ %@", self.logTag, self.mediaUrl]; - // _playbackAudioActivity = [AudioActivity playbackActivityWithAudioDescription:audioActivityDescription]; - _playbackAudioActivity = [AudioActivity voiceNoteActivityWithAudioDescription:audioActivityDescription]; - _currentCategoryAudioActivity = [[AudioActivity alloc] initWithAudioDescription:audioActivityDescription]; + _audioActivity = [[OWSAudioActivity alloc] initWithAudioDescription:audioActivityDescription behavior:audioBehavior]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) @@ -100,19 +100,15 @@ NS_ASSUME_NONNULL_BEGIN #pragma mark - Methods -- (void)playWithCurrentAudioCategory +- (void)play { - OWSAssertIsOnMainThread(); - [self playWithAudioActivity:self.currentCategoryAudioActivity]; -} -- (void)playWithPlaybackAudioCategory -{ + // get current audio activity OWSAssertIsOnMainThread(); - [self playWithAudioActivity:self.playbackAudioActivity]; + [self playWithAudioActivity:self.audioActivity]; } -- (void)playWithAudioActivity:(AudioActivity *)audioActivity +- (void)playWithAudioActivity:(OWSAudioActivity *)audioActivity { OWSAssertIsOnMainThread(); @@ -188,18 +184,17 @@ NS_ASSUME_NONNULL_BEGIN - (void)endAudioActivities { - [self.audioSession endAudioActivity:self.playbackAudioActivity]; - [self.audioSession endAudioActivity:self.currentCategoryAudioActivity]; + [self.audioSession endAudioActivity:self.audioActivity]; } -- (void)togglePlayStateWithPlaybackAudioCategory +- (void)togglePlayState { OWSAssertIsOnMainThread(); if (self.delegate.audioPlaybackState == AudioPlaybackState_Playing) { [self pause]; } else { - [self playWithAudioActivity:self.playbackAudioActivity]; + [self playWithAudioActivity:self.audioActivity]; } }