Make messages show up again

pull/313/head
Niels Andriesse 3 years ago committed by nielsandriesse
parent f36f447bec
commit b030b5999b

@ -3,7 +3,8 @@ import PromiseKit
extension Storage : SessionMessagingKitStorageProtocol {
// MARK: Signal Protocol
// MARK: - Signal Protocol
public func getOrGenerateRegistrationID(using transaction: Any) -> UInt32 {
SSKEnvironment.shared.tsAccountManager.getOrGenerateRegistrationId(transaction as! YapDatabaseReadWriteTransaction)
}
@ -18,7 +19,10 @@ extension Storage : SessionMessagingKitStorageProtocol {
return try! promise.wait()
}
// MARK: Shared Sender Keys
// MARK: - Shared Sender Keys
private static let closedGroupPrivateKeyCollection = "LokiClosedGroupPrivateKeyCollection"
public func getClosedGroupPrivateKey(for publicKey: String) -> String? {
@ -49,12 +53,28 @@ extension Storage : SessionMessagingKitStorageProtocol {
getUserClosedGroupPublicKeys().contains(publicKey)
}
// MARK: Jobs
public func persist(_ job: Job, using transaction: Any) { fatalError("Not implemented.") }
public func markJobAsSucceeded(_ job: Job, using transaction: Any) { fatalError("Not implemented.") }
public func markJobAsFailed(_ job: Job, using transaction: Any) { fatalError("Not implemented.") }
// MARK: Authorization
// MARK: - Jobs
private static let jobCollection = "SNJobCollection"
public func persist(_ job: Job, using transaction: Any) {
(transaction as! YapDatabaseReadWriteTransaction).setObject(job, forKey: job.id!, inCollection: Storage.jobCollection)
}
public func markJobAsSucceeded(_ job: Job, using transaction: Any) {
(transaction as! YapDatabaseReadWriteTransaction).removeObject(forKey: job.id!, inCollection: Storage.jobCollection)
}
public func markJobAsFailed(_ job: Job, using transaction: Any) {
(transaction as! YapDatabaseReadWriteTransaction).removeObject(forKey: job.id!, inCollection: Storage.jobCollection)
}
// MARK: - Authorization
private static func getAuthTokenCollection(for server: String) -> String {
return (server == FileServerAPI.server) ? "LokiStorageAuthTokenCollection" : "LokiGroupChatAuthTokenCollection"
}
@ -78,7 +98,10 @@ extension Storage : SessionMessagingKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeObject(forKey: server, inCollection: collection)
}
// MARK: Open Group Public Keys
// MARK: - Open Group Public Keys
private static let openGroupPublicKeyCollection = "LokiOpenGroupPublicKeyCollection"
public func getOpenGroupPublicKey(for server: String) -> String? {
@ -93,7 +116,10 @@ extension Storage : SessionMessagingKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).setObject(newValue, forKey: server, inCollection: Storage.openGroupPublicKeyCollection)
}
// MARK: Last Message Server ID
// MARK: - Last Message Server ID
private static let lastMessageServerIDCollection = "LokiGroupChatLastMessageServerIDCollection"
public func getLastMessageServerID(for group: UInt64, on server: String) -> UInt64? {
@ -112,7 +138,10 @@ extension Storage : SessionMessagingKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeObject(forKey: "\(server).\(group)", inCollection: Storage.lastMessageServerIDCollection)
}
// MARK: Last Deletion Server ID
// MARK: - Last Deletion Server ID
private static let lastDeletionServerIDCollection = "LokiGroupChatLastDeletionServerIDCollection"
public func getLastDeletionServerID(for group: UInt64, on server: String) -> UInt64? {
@ -131,7 +160,10 @@ extension Storage : SessionMessagingKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeObject(forKey: "\(server).\(group)", inCollection: Storage.lastDeletionServerIDCollection)
}
// MARK: Open Group Metadata
// MARK: - Open Group Metadata
private static let openGroupUserCountCollection = "LokiPublicChatUserCountCollection"
private static let openGroupMessageIDCollection = "LKMessageIDCollection"
@ -155,4 +187,53 @@ extension Storage : SessionMessagingKitStorageProtocol {
public func setLastProfilePictureUploadDate(_ date: Date) {
UserDefaults.standard[.lastProfilePictureUpload] = date
}
// MARK: - Message Handling
public func isBlocked(_ publicKey: String) -> Bool {
return SSKEnvironment.shared.blockingManager.isRecipientIdBlocked(publicKey)
}
public func updateProfile(for publicKey: String, from profile: VisibleMessage.Profile, using transaction: Any) {
// let transaction = transaction as! YapDatabaseReadWriteTransaction
// let profileManager = SSKEnvironment.shared.profileManager
// if let displayName = profile.displayName {
// profileManager.updateProfileForContact(withID: publicKey, displayName: displayName, with: transaction)
// }
// if let profileKey = profile.profileKey, let profilePictureURL = profile.profilePictureURL, profileKey.count == kAES256_KeyByteLength {
// profileManager.setProfileKeyData(profileKey, forRecipientId: publicKey, avatarURL: profilePictureURL)
// }
}
/// Returns the ID of the thread the message was stored under along with the `TSIncomingMessage` that was constructed.
public func persist(_ message: VisibleMessage, using transaction: Any) -> (String, Any) {
let transaction = transaction as! YapDatabaseReadWriteTransaction
let thread = TSContactThread.getOrCreateThread(withContactId: message.sender!, transaction: transaction)
let message = TSIncomingMessage.from(message, using: transaction)
message.save(with: transaction)
return (thread.uniqueId!, message)
}
public func cancelTypingIndicatorsIfNeeded(for threadID: String, senderPublicKey: String) {
guard let thread = TSThread.fetch(uniqueId: threadID) else { return }
func cancelTypingIndicatorsIfNeeded() {
SSKEnvironment.shared.typingIndicators.didReceiveIncomingMessage(inThread: thread, recipientId: senderPublicKey, deviceId: 1)
}
if Thread.current.isMainThread {
cancelTypingIndicatorsIfNeeded()
} else {
DispatchQueue.main.async {
cancelTypingIndicatorsIfNeeded()
}
}
}
public func notifyUserIfNeeded(for message: Any, threadID: String) {
guard let thread = TSThread.fetch(uniqueId: threadID) else { return }
Storage.read { transaction in
SSKEnvironment.shared.notificationsManager!.notifyUser(for: (message as! TSIncomingMessage), in: thread, transaction: transaction)
}
}
}

@ -1,7 +1,8 @@
extension Storage : SessionSnodeKitStorageProtocol {
// MARK: Onion Request Paths
// MARK: - Onion Request Paths
private static let onionRequestPathCollection = "LokiOnionRequestPathCollection"
public func getOnionRequestPaths() -> [OnionRequestAPI.Path] {
@ -47,7 +48,10 @@ extension Storage : SessionSnodeKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeAllObjects(inCollection: Storage.onionRequestPathCollection)
}
// MARK: Snode Pool
// MARK: - Snode Pool
public func getSnodePool() -> Set<Snode> {
var result: Set<Snode> = []
Storage.read { transaction in
@ -70,7 +74,10 @@ extension Storage : SessionSnodeKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeAllObjects(inCollection: Storage.snodePoolCollection)
}
// MARK: Swarm
// MARK: - Swarm
public func getSwarm(for publicKey: String) -> Set<Snode> {
var result: Set<Snode> = []
let collection = Storage.getSwarmCollection(for: publicKey)
@ -96,7 +103,10 @@ extension Storage : SessionSnodeKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeAllObjects(inCollection: collection)
}
// MARK: Last Message Hash
// MARK: - Last Message Hash
private static let lastMessageHashCollection = "LokiLastMessageHashCollection"
func getLastMessageHashInfo(for snode: Snode, associatedWith publicKey: String) -> JSON? {
@ -136,7 +146,10 @@ extension Storage : SessionSnodeKitStorageProtocol {
(transaction as! YapDatabaseReadWriteTransaction).removeObject(forKey: key, inCollection: Storage.lastMessageHashCollection)
}
// MARK: Received Messages
// MARK: - Received Messages
private static let receivedMessagesCollection = "LokiReceivedMessagesCollection"
public func getReceivedMessages(for publicKey: String) -> Set<String> {

@ -16,10 +16,8 @@
#import "ConversationViewCell.h"
#import "ConversationViewItem.h"
#import "DateUtil.h"
#import "MediaDetailViewController.h"
#import "NotificationSettingsViewController.h"
#import "OWSAnyTouchGestureRecognizer.h"
#import "OWSAudioPlayer.h"
#import "OWSBackup.h"
@ -40,7 +38,6 @@
#import "OWSQRCodeScanningViewController.h"
#import "SignalApp.h"
#import "UIViewController+Permissions.h"
#import <SessionProtocolKit/NSData+keyVersionByte.h>
#import <PureLayout/PureLayout.h>
#import <Reachability/Reachability.h>
@ -54,8 +51,6 @@
#import <SignalUtilitiesKit/ContactTableViewCell.h>
#import <SignalUtilitiesKit/Environment.h>
#import <SignalUtilitiesKit/OWSAudioPlayer.h>
#import <SignalUtilitiesKit/OWSFormat.h>
#import <SignalUtilitiesKit/OWSPreferences.h>
#import <SignalUtilitiesKit/OWSProfileManager.h>
@ -74,14 +69,12 @@
#import <SignalUtilitiesKit/NSNotificationCenter+OWS.h>
#import <SignalUtilitiesKit/NSString+SSK.h>
#import <SignalUtilitiesKit/OWSBackgroundTask.h>
#import <SignalUtilitiesKit/OWSCallMessageHandler.h>
#import <SignalUtilitiesKit/OWSContactsOutputStream.h>
#import <SignalUtilitiesKit/OWSDispatch.h>
#import <SignalUtilitiesKit/OWSError.h>
#import <SignalUtilitiesKit/OWSFileSystem.h>
#import <SignalUtilitiesKit/OWSIdentityManager.h>
#import <SignalUtilitiesKit/OWSMediaGalleryFinder.h>
#import <SignalUtilitiesKit/OWSPrimaryStorage+Calling.h>
#import <SignalUtilitiesKit/OWSPrimaryStorage+SessionStore.h>
#import <SignalUtilitiesKit/OWSRecipientIdentity.h>
#import <SignalUtilitiesKit/SignalAccount.h>
@ -90,16 +83,13 @@
#import <SignalUtilitiesKit/TSAttachment.h>
#import <SignalUtilitiesKit/TSAttachmentPointer.h>
#import <SignalUtilitiesKit/TSAttachmentStream.h>
#import <SignalUtilitiesKit/TSCall.h>
#import <SignalUtilitiesKit/TSContactThread.h>
#import <SignalUtilitiesKit/TSErrorMessage.h>
#import <SignalUtilitiesKit/TSGroupThread.h>
#import <SignalUtilitiesKit/TSIncomingMessage.h>
#import <SignalUtilitiesKit/TSInfoMessage.h>
#import <SignalUtilitiesKit/TSOutgoingMessage.h>
#import <SignalUtilitiesKit/TSPreKeyManager.h>
#import <SignalUtilitiesKit/TSThread.h>
#import <SignalUtilitiesKit/LKGroupUtilities.h>
#import <SignalUtilitiesKit/UIImage+OWS.h>

@ -23,17 +23,13 @@
#import <SignalUtilitiesKit/OWSDisappearingMessagesJob.h>
#import <SignalUtilitiesKit/OWSFailedAttachmentDownloadsJob.h>
#import <SignalUtilitiesKit/OWSFailedMessagesJob.h>
#import <SignalUtilitiesKit/OWSIncompleteCallsJob.h>
#import <SignalUtilitiesKit/OWSMath.h>
#import <SignalUtilitiesKit/OWSPrimaryStorage+Calling.h>
#import <SignalUtilitiesKit/OWSReadReceiptManager.h>
#import <SignalUtilitiesKit/SSKEnvironment.h>
#import <SignalUtilitiesKit/SignalUtilitiesKit-Swift.h>
#import <SignalUtilitiesKit/TSAccountManager.h>
#import <SignalUtilitiesKit/TSDatabaseView.h>
#import <SignalUtilitiesKit/TSPreKeyManager.h>
#import <YapDatabase/YapDatabaseCryptoUtils.h>
#import <sys/utsname.h>
@ -558,8 +554,7 @@ static NSTimeInterval launchStartedAt;
// TODO: or something like that in production.
[OWSOrphanDataCleaner auditOnLaunchIfNecessary];
#endif
[self.profileManager fetchLocalUsersProfile];
[self.readReceiptManager prepareCachedValues];
// Disable the SAE until the main app has successfully completed launch process

@ -25,15 +25,6 @@ import SignalUtilitiesKit
}
}
@objc
public var callMessageHandler: WebRTCCallMessageHandler
// @objc
// public var callService: CallService
// @objc
// public var outboundCallInitiator: OutboundCallInitiator
@objc
public var accountManager: AccountManager
@ -77,7 +68,6 @@ import SignalUtilitiesKit
public var backupLazyRestore: BackupLazyRestore
private override init() {
self.callMessageHandler = WebRTCCallMessageHandler()
self.accountManager = AccountManager()
self.notificationPresenter = NotificationPresenter()
self.pushRegistrationManager = PushRegistrationManager()

@ -1,549 +0,0 @@
////
//// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
////
//
//import Foundation
//import AVFoundation
//import SignalUtilitiesKit
//import SignalUtilitiesKit
//
//struct AudioSource: Hashable {
//
// let image: UIImage
// let localizedName: String
// let portDescription: AVAudioSessionPortDescription?
//
// // The built-in loud speaker / aka speakerphone
// let isBuiltInSpeaker: Bool
//
// // The built-in quiet speaker, aka the normal phone handset receiver earpiece
// let isBuiltInEarPiece: Bool
//
// init(localizedName: String, image: UIImage, isBuiltInSpeaker: Bool, isBuiltInEarPiece: Bool, portDescription: AVAudioSessionPortDescription? = nil) {
// self.localizedName = localizedName
// self.image = image
// self.isBuiltInSpeaker = isBuiltInSpeaker
// self.isBuiltInEarPiece = isBuiltInEarPiece
// self.portDescription = portDescription
// }
//
// init(portDescription: AVAudioSessionPortDescription) {
//
// let isBuiltInEarPiece = portDescription.portType == AVAudioSession.Port.builtInMic
//
// // portDescription.portName works well for BT linked devices, but if we are using
// // the built in mic, we have "iPhone Microphone" which is a little awkward.
// // In that case, instead we prefer just the model name e.g. "iPhone" or "iPad"
// let localizedName = isBuiltInEarPiece ? UIDevice.current.localizedModel : portDescription.portName
//
// self.init(localizedName: localizedName,
// image: #imageLiteral(resourceName: "button_phone_white"), // TODO
// isBuiltInSpeaker: false,
// isBuiltInEarPiece: isBuiltInEarPiece,
// portDescription: portDescription)
// }
//
// // Speakerphone is handled separately from the other audio routes as it doesn't appear as an "input"
// static var builtInSpeaker: AudioSource {
// return self.init(localizedName: NSLocalizedString("AUDIO_ROUTE_BUILT_IN_SPEAKER", comment: "action sheet button title to enable built in speaker during a call"),
// image: #imageLiteral(resourceName: "button_phone_white"), //TODO
// isBuiltInSpeaker: true,
// isBuiltInEarPiece: false)
// }
//
// // MARK: Hashable
//
// static func ==(lhs: AudioSource, rhs: AudioSource) -> Bool {
// // Simply comparing the `portDescription` vs the `portDescription.uid`
// // caused multiple instances of the built in mic to turn up in a set.
// if lhs.isBuiltInSpeaker && rhs.isBuiltInSpeaker {
// return true
// }
//
// if lhs.isBuiltInSpeaker || rhs.isBuiltInSpeaker {
// return false
// }
//
// guard let lhsPortDescription = lhs.portDescription else {
// owsFailDebug("only the built in speaker should lack a port description")
// return false
// }
//
// guard let rhsPortDescription = rhs.portDescription else {
// owsFailDebug("only the built in speaker should lack a port description")
// return false
// }
//
// return lhsPortDescription.uid == rhsPortDescription.uid
// }
//
// var hashValue: Int {
// guard let portDescription = self.portDescription else {
// assert(self.isBuiltInSpeaker)
// return "Built In Speaker".hashValue
// }
// return portDescription.uid.hash
// }
//}
//
//protocol CallAudioServiceDelegate: class {
// func callAudioService(_ callAudioService: CallAudioService, didUpdateIsSpeakerphoneEnabled isEnabled: Bool)
// func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService)
//}
//
//@objc class CallAudioService: NSObject, CallObserver {
//
// private var vibrateTimer: Timer?
// private let audioPlayer = AVAudioPlayer()
// private let handleRinging: Bool
// weak var delegate: CallAudioServiceDelegate? {
// willSet {
// assert(newValue == nil || delegate == nil)
// }
// }
//
// // MARK: Vibration config
// private let vibrateRepeatDuration = 1.6
//
// // Our ring buzz is a pair of vibrations.
// // `pulseDuration` is the small pause between the two vibrations in the pair.
// private let pulseDuration = 0.2
//
// var audioSession: OWSAudioSession {
// return Environment.shared.audioSession
// }
//
// var avAudioSession: AVAudioSession {
// return AVAudioSession.sharedInstance()
// }
//
// // MARK: - Initializers
//
// init(handleRinging: Bool) {
// self.handleRinging = handleRinging
//
// super.init()
//
// // We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
//
// // Configure audio session so we don't prompt user with Record permission until call is connected.
//
// audioSession.configureRTCAudio()
// NotificationCenter.default.addObserver(forName: AVAudioSession.routeChangeNotification, object: avAudioSession, queue: nil) { _ in
// assert(!Thread.isMainThread)
// self.updateIsSpeakerphoneEnabled()
// }
// }
//
// deinit {
// NotificationCenter.default.removeObserver(self)
// }
//
// // MARK: - CallObserver
//
// internal func stateDidChange(call: SignalCall, state: CallState) {
// AssertIsOnMainThread()
// self.handleState(call: call)
// }
//
// internal func muteDidChange(call: SignalCall, isMuted: Bool) {
// AssertIsOnMainThread()
//
// ensureProperAudioSession(call: call)
// }
//
// internal func holdDidChange(call: SignalCall, isOnHold: Bool) {
// AssertIsOnMainThread()
//
// ensureProperAudioSession(call: call)
// }
//
// internal func audioSourceDidChange(call: SignalCall, audioSource: AudioSource?) {
// AssertIsOnMainThread()
//
// ensureProperAudioSession(call: call)
//
// if let audioSource = audioSource, audioSource.isBuiltInSpeaker {
// self.isSpeakerphoneEnabled = true
// } else {
// self.isSpeakerphoneEnabled = false
// }
// }
//
// internal func hasLocalVideoDidChange(call: SignalCall, hasLocalVideo: Bool) {
// AssertIsOnMainThread()
//
// ensureProperAudioSession(call: call)
// }
//
// // Speakerphone can be manipulated by the in-app callscreen or via the system callscreen (CallKit).
// // Unlike other CallKit CallScreen buttons, enabling doesn't trigger a CXAction, so it's not as simple
// // to track state changes. Instead we never store the state and directly access the ground-truth in the
// // AVAudioSession.
// private(set) var isSpeakerphoneEnabled: Bool = false {
// didSet {
// self.delegate?.callAudioService(self, didUpdateIsSpeakerphoneEnabled: isSpeakerphoneEnabled)
// }
// }
//
// public func requestSpeakerphone(isEnabled: Bool) {
// // This is a little too slow to execute on the main thread and the results are not immediately available after execution
// // anyway, so we dispatch async. If you need to know the new value, you'll need to check isSpeakerphoneEnabled and take
// // advantage of the CallAudioServiceDelegate.callAudioService(_:didUpdateIsSpeakerphoneEnabled:)
// DispatchQueue.global().async {
// do {
// try self.avAudioSession.overrideOutputAudioPort( isEnabled ? .speaker : .none )
// } catch {
// owsFailDebug("failed to set \(#function) = \(isEnabled) with error: \(error)")
// }
// }
// }
//
// private func updateIsSpeakerphoneEnabled() {
// let value = avAudioSession.currentRoute.outputs.contains { (portDescription: AVAudioSessionPortDescription) -> Bool in
// return portDescription.portType == .builtInSpeaker
// }
// DispatchQueue.main.async {
// self.isSpeakerphoneEnabled = value
// }
// }
//
// private func ensureProperAudioSession(call: SignalCall?) {
// AssertIsOnMainThread()
//
// guard let call = call, !call.isTerminated else {
// // Revert to default audio
// setAudioSession(category: .soloAmbient,
// mode: .default)
// return
// }
//
// // Disallow bluetooth while (and only while) the user has explicitly chosen the built in receiver.
// //
// // NOTE: I'm actually not sure why this is required - it seems like we should just be able
// // to setPreferredInput to call.audioSource.portDescription in this case,
// // but in practice I'm seeing the call revert to the bluetooth headset.
// // Presumably something else (in WebRTC?) is touching our shared AudioSession. - mjk
// let options: AVAudioSession.CategoryOptions = call.audioSource?.isBuiltInEarPiece == true ? [] : [.allowBluetooth]
//
// if call.state == .localRinging {
// // SoloAmbient plays through speaker, but respects silent switch
// setAudioSession(category: .soloAmbient,
// mode: .default)
// } else if call.hasLocalVideo {
// // Because ModeVideoChat affects gain, we don't want to apply it until the call is connected.
// // otherwise sounds like ringing will be extra loud for video vs. speakerphone
//
// // Apple Docs say that setting mode to AVAudioSessionModeVideoChat has the
// // side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// // option, and inspect AVAudioSession.sharedInstance.categoryOptions == 0. And availableInputs
// // does not include my linked bluetooth device
// setAudioSession(category: .playAndRecord,
// mode: .videoChat,
// options: options)
// } else {
// // Apple Docs say that setting mode to AVAudioSessionModeVoiceChat has the
// // side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// // option, and inspect AVAudioSession.sharedInstance.categoryOptions == 0. And availableInputs
// // does not include my linked bluetooth device
// setAudioSession(category: .playAndRecord,
// mode: .voiceChat,
// options: options)
// }
//
// do {
// // It's important to set preferred input *after* ensuring properAudioSession
// // because some sources are only valid for certain category/option combinations.
// let existingPreferredInput = avAudioSession.preferredInput
// if existingPreferredInput != call.audioSource?.portDescription {
// Logger.info("changing preferred input: \(String(describing: existingPreferredInput)) -> \(String(describing: call.audioSource?.portDescription))")
// try avAudioSession.setPreferredInput(call.audioSource?.portDescription)
// }
//
// } catch {
// owsFailDebug("failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)")
// }
// }
//
// // MARK: - Service action handlers
//
// public func didUpdateVideoTracks(call: SignalCall?) {
// Logger.verbose("")
//
// self.ensureProperAudioSession(call: call)
// }
//
// public func handleState(call: SignalCall) {
// assert(Thread.isMainThread)
//
// Logger.verbose("new state: \(call.state)")
//
// // Stop playing sounds while switching audio session so we don't
// // get any blips across a temporary unintended route.
// stopPlayingAnySounds()
// self.ensureProperAudioSession(call: call)
//
// switch call.state {
// case .idle: handleIdle(call: call)
// case .dialing: handleDialing(call: call)
// case .answering: handleAnswering(call: call)
// case .remoteRinging: handleRemoteRinging(call: call)
// case .localRinging: handleLocalRinging(call: call)
// case .connected: handleConnected(call: call)
// case .reconnecting: handleReconnecting(call: call)
// case .localFailure: handleLocalFailure(call: call)
// case .localHangup: handleLocalHangup(call: call)
// case .remoteHangup: handleRemoteHangup(call: call)
// case .remoteBusy: handleBusy(call: call)
// }
// }
//
// private func handleIdle(call: SignalCall) {
// Logger.debug("")
// }
//
// private func handleDialing(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// // HACK: Without this async, dialing sound only plays once. I don't really understand why. Does the audioSession
// // need some time to settle? Is somethign else interrupting our session?
// DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.2) {
// self.play(sound: OWSSound.callConnecting)
// }
// }
//
// private func handleAnswering(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
// }
//
// private func handleRemoteRinging(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// self.play(sound: OWSSound.callOutboundRinging)
// }
//
// private func handleLocalRinging(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// startRinging(call: call)
// }
//
// private func handleConnected(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
// }
//
// private func handleReconnecting(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
// }
//
// private func handleLocalFailure(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// play(sound: OWSSound.callFailure)
// handleCallEnded(call: call)
// }
//
// private func handleLocalHangup(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// handleCallEnded(call: call)
// }
//
// private func handleRemoteHangup(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// vibrate()
//
// handleCallEnded(call: call)
// }
//
// private func handleBusy(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// play(sound: OWSSound.callBusy)
//
// // Let the busy sound play for 4 seconds. The full file is longer than necessary
// DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 4.0) {
// self.handleCallEnded(call: call)
// }
// }
//
// private func handleCallEnded(call: SignalCall) {
// AssertIsOnMainThread()
// Logger.debug("")
//
// // Stop solo audio, revert to default.
// isSpeakerphoneEnabled = false
// setAudioSession(category: .soloAmbient)
// }
//
// // MARK: Playing Sounds
//
// var currentPlayer: OWSAudioPlayer?
//
// private func stopPlayingAnySounds() {
// currentPlayer?.stop()
// stopAnyRingingVibration()
// }
//
// private func play(sound: OWSSound) {
// guard let newPlayer = OWSSounds.audioPlayer(for: sound, audioBehavior: .call) else {
// owsFailDebug("unable to build player for sound: \(OWSSounds.displayName(for: sound))")
// return
// }
// Logger.info("playing sound: \(OWSSounds.displayName(for: sound))")
//
// // It's important to stop the current player **before** starting the new player. In the case that
// // we're playing the same sound, since the player is memoized on the sound instance, we'd otherwise
// // stop the sound we just started.
// self.currentPlayer?.stop()
// newPlayer.play()
// self.currentPlayer = newPlayer
// }
//
// // MARK: - Ringing
//
// private func startRinging(call: SignalCall) {
// guard handleRinging else {
// Logger.debug("ignoring \(#function) since CallKit handles it's own ringing state")
// return
// }
//
// vibrateTimer = WeakTimer.scheduledTimer(timeInterval: vibrateRepeatDuration, target: self, userInfo: nil, repeats: true) {[weak self] _ in
// self?.ringVibration()
// }
// vibrateTimer?.fire()
// play(sound: .defaultiOSIncomingRingtone)
// }
//
// private func stopAnyRingingVibration() {
// guard handleRinging else {
// Logger.debug("ignoring \(#function) since CallKit handles it's own ringing state")
// return
// }
// Logger.debug("")
//
// // Stop vibrating
// vibrateTimer?.invalidate()
// vibrateTimer = nil
// }
//
// // public so it can be called by timer via selector
// public func ringVibration() {
// // Since a call notification is more urgent than a message notifaction, we
// // vibrate twice, like a pulse, to differentiate from a normal notification vibration.
// vibrate()
// DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + pulseDuration) {
// self.vibrate()
// }
// }
//
// func vibrate() {
// // TODO implement HapticAdapter for iPhone7 and up
// AudioServicesPlaySystemSound(kSystemSoundID_Vibrate)
// }
//
// // MARK: - AudioSession MGMT
// // TODO move this to CallAudioSession?
//
// // Note this method is sensitive to the current audio session configuration.
// // Specifically if you call it while speakerphone is enabled you won't see
// // any connected bluetooth routes.
// var availableInputs: [AudioSource] {
// guard let availableInputs = avAudioSession.availableInputs else {
// // I'm not sure why this would happen, but it may indicate an error.
// owsFailDebug("No available inputs or inputs not ready")
// return [AudioSource.builtInSpeaker]
// }
//
// Logger.info("availableInputs: \(availableInputs)")
// return [AudioSource.builtInSpeaker] + availableInputs.map { portDescription in
// return AudioSource(portDescription: portDescription)
// }
// }
//
// func currentAudioSource(call: SignalCall) -> AudioSource? {
// if let audioSource = call.audioSource {
// return audioSource
// }
//
// // Before the user has specified an audio source on the call, we rely on the existing
// // system state to determine the current audio source.
// // If a bluetooth is connected, this will be bluetooth, otherwise
// // this will be the receiver.
// guard let portDescription = avAudioSession.currentRoute.inputs.first else {
// return nil
// }
//
// return AudioSource(portDescription: portDescription)
// }
//
// private func setAudioSession(category: AVAudioSession.Category,
// mode: AVAudioSession.Mode? = nil,
// options: AVAudioSession.CategoryOptions = AVAudioSession.CategoryOptions(rawValue: 0))