mirror of https://github.com/oxen-io/session-ios
Merge remote-tracking branch 'upstream/dev' into dev
commit
cc495bccc1
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,213 @@
|
||||
//
|
||||
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
|
||||
//
|
||||
|
||||
#import "OWSBackupSettingsViewController.h"
|
||||
#import "OWSBackup.h"
|
||||
#import "Session-Swift.h"
|
||||
|
||||
#import <PromiseKit/AnyPromise.h>
|
||||
#import <SessionMessagingKit/Environment.h>
|
||||
#import <SignalUtilitiesKit/SignalUtilitiesKit-Swift.h>
|
||||
#import <SignalUtilitiesKit/UIColor+OWS.h>
|
||||
#import <SignalUtilitiesKit/UIFont+OWS.h>
|
||||
#import <SessionUtilitiesKit/UIView+OWS.h>
|
||||
#import <SessionUtilitiesKit/MIMETypeUtil.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface OWSBackupSettingsViewController ()
|
||||
|
||||
@property (nonatomic, nullable) NSError *iCloudError;
|
||||
|
||||
@end
|
||||
|
||||
#pragma mark -
|
||||
|
||||
@implementation OWSBackupSettingsViewController
|
||||
|
||||
#pragma mark - Dependencies
|
||||
|
||||
- (OWSBackup *)backup
|
||||
{
|
||||
OWSAssertDebug(AppEnvironment.shared.backup);
|
||||
|
||||
return AppEnvironment.shared.backup;
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
||||
- (void)viewDidLoad
|
||||
{
|
||||
[super viewDidLoad];
|
||||
|
||||
self.title = NSLocalizedString(@"SETTINGS_BACKUP", @"Label for the backup view in app settings.");
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(backupStateDidChange:)
|
||||
name:NSNotificationNameBackupStateDidChange
|
||||
object:nil];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(applicationDidBecomeActive:)
|
||||
name:OWSApplicationDidBecomeActiveNotification
|
||||
object:nil];
|
||||
|
||||
[self updateTableContents];
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
}
|
||||
|
||||
- (void)viewDidAppear:(BOOL)animated
|
||||
{
|
||||
[super viewDidAppear:animated];
|
||||
|
||||
[self updateTableContents];
|
||||
[self updateICloudStatus];
|
||||
}
|
||||
|
||||
- (void)updateICloudStatus
|
||||
{
|
||||
__weak OWSBackupSettingsViewController *weakSelf = self;
|
||||
[[self.backup ensureCloudKitAccess]
|
||||
.then(^{
|
||||
OWSAssertIsOnMainThread();
|
||||
|
||||
weakSelf.iCloudError = nil;
|
||||
[weakSelf updateTableContents];
|
||||
})
|
||||
.catch(^(NSError *error) {
|
||||
OWSAssertIsOnMainThread();
|
||||
|
||||
weakSelf.iCloudError = error;
|
||||
[weakSelf updateTableContents];
|
||||
}) retainUntilComplete];
|
||||
}
|
||||
|
||||
#pragma mark - Table Contents
|
||||
|
||||
- (void)updateTableContents
|
||||
{
|
||||
OWSTableContents *contents = [OWSTableContents new];
|
||||
|
||||
BOOL isBackupEnabled = [OWSBackup.sharedManager isBackupEnabled];
|
||||
|
||||
if (self.iCloudError) {
|
||||
OWSTableSection *iCloudSection = [OWSTableSection new];
|
||||
iCloudSection.headerTitle = NSLocalizedString(
|
||||
@"SETTINGS_BACKUP_ICLOUD_STATUS", @"Label for iCloud status row in the in the backup settings view.");
|
||||
[iCloudSection
|
||||
addItem:[OWSTableItem
|
||||
longDisclosureItemWithText:[OWSBackupAPI errorMessageForCloudKitAccessError:self.iCloudError]
|
||||
actionBlock:^{
|
||||
[[UIApplication sharedApplication]
|
||||
openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
|
||||
}]];
|
||||
[contents addSection:iCloudSection];
|
||||
}
|
||||
|
||||
// TODO: This UI is temporary.
|
||||
// Enabling backup will involve entering and registering a PIN.
|
||||
OWSTableSection *enableSection = [OWSTableSection new];
|
||||
enableSection.headerTitle = NSLocalizedString(@"SETTINGS_BACKUP", @"Label for the backup view in app settings.");
|
||||
[enableSection
|
||||
addItem:[OWSTableItem switchItemWithText:
|
||||
NSLocalizedString(@"SETTINGS_BACKUP_ENABLING_SWITCH",
|
||||
@"Label for switch in settings that controls whether or not backup is enabled.")
|
||||
isOnBlock:^{
|
||||
return [OWSBackup.sharedManager isBackupEnabled];
|
||||
}
|
||||
target:self
|
||||
selector:@selector(isBackupEnabledDidChange:)]];
|
||||
[contents addSection:enableSection];
|
||||
|
||||
if (isBackupEnabled) {
|
||||
// TODO: This UI is temporary.
|
||||
// Enabling backup will involve entering and registering a PIN.
|
||||
OWSTableSection *progressSection = [OWSTableSection new];
|
||||
[progressSection
|
||||
addItem:[OWSTableItem
|
||||
labelItemWithText:NSLocalizedString(@"SETTINGS_BACKUP_STATUS",
|
||||
@"Label for backup status row in the in the backup settings view.")
|
||||
accessoryText:NSStringForBackupExportState(OWSBackup.sharedManager.backupExportState)]];
|
||||
if (OWSBackup.sharedManager.backupExportState == OWSBackupState_InProgress) {
|
||||
if (OWSBackup.sharedManager.backupExportDescription) {
|
||||
[progressSection
|
||||
addItem:[OWSTableItem
|
||||
labelItemWithText:NSLocalizedString(@"SETTINGS_BACKUP_PHASE",
|
||||
@"Label for phase row in the in the backup settings view.")
|
||||
accessoryText:OWSBackup.sharedManager.backupExportDescription]];
|
||||
if (OWSBackup.sharedManager.backupExportProgress) {
|
||||
NSUInteger progressPercent
|
||||
= (NSUInteger)round(OWSBackup.sharedManager.backupExportProgress.floatValue * 100);
|
||||
NSNumberFormatter *numberFormatter = [[NSNumberFormatter alloc] init];
|
||||
[numberFormatter setNumberStyle:NSNumberFormatterPercentStyle];
|
||||
[numberFormatter setMaximumFractionDigits:0];
|
||||
[numberFormatter setMultiplier:@1];
|
||||
NSString *progressString = [numberFormatter stringFromNumber:@(progressPercent)];
|
||||
[progressSection
|
||||
addItem:[OWSTableItem
|
||||
labelItemWithText:NSLocalizedString(@"SETTINGS_BACKUP_PROGRESS",
|
||||
@"Label for phase row in the in the backup settings view.")
|
||||
accessoryText:progressString]];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch (OWSBackup.sharedManager.backupExportState) {
|
||||
case OWSBackupState_Idle:
|
||||
case OWSBackupState_Failed:
|
||||
case OWSBackupState_Succeeded:
|
||||
[progressSection
|
||||
addItem:[OWSTableItem disclosureItemWithText:
|
||||
NSLocalizedString(@"SETTINGS_BACKUP_BACKUP_NOW",
|
||||
@"Label for 'backup now' button in the backup settings view.")
|
||||
actionBlock:^{
|
||||
[OWSBackup.sharedManager tryToExportBackup];
|
||||
}]];
|
||||
break;
|
||||
case OWSBackupState_InProgress:
|
||||
[progressSection
|
||||
addItem:[OWSTableItem disclosureItemWithText:
|
||||
NSLocalizedString(@"SETTINGS_BACKUP_CANCEL_BACKUP",
|
||||
@"Label for 'cancel backup' button in the backup settings view.")
|
||||
actionBlock:^{
|
||||
[OWSBackup.sharedManager cancelExportBackup];
|
||||
}]];
|
||||
break;
|
||||
}
|
||||
|
||||
[contents addSection:progressSection];
|
||||
}
|
||||
|
||||
self.contents = contents;
|
||||
}
|
||||
|
||||
- (void)isBackupEnabledDidChange:(UISwitch *)sender
|
||||
{
|
||||
[OWSBackup.sharedManager setIsBackupEnabled:sender.isOn];
|
||||
|
||||
[self updateTableContents];
|
||||
}
|
||||
|
||||
#pragma mark - Events
|
||||
|
||||
- (void)backupStateDidChange:(NSNotification *)notification
|
||||
{
|
||||
OWSAssertIsOnMainThread();
|
||||
|
||||
[self updateTableContents];
|
||||
}
|
||||
|
||||
- (void)applicationDidBecomeActive:(NSNotification *)notification
|
||||
{
|
||||
OWSAssertIsOnMainThread();
|
||||
|
||||
[self updateICloudStatus];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
@ -0,0 +1,357 @@
|
||||
import Foundation
|
||||
import WebRTC
|
||||
import SessionMessagingKit
|
||||
import PromiseKit
|
||||
import CallKit
|
||||
|
||||
public final class SessionCall: NSObject, WebRTCSessionDelegate {
|
||||
|
||||
@objc static let isEnabled = true
|
||||
|
||||
// MARK: Metadata Properties
|
||||
let uuid: String
|
||||
let callID: UUID // This is for CallKit
|
||||
let sessionID: String
|
||||
let mode: Mode
|
||||
var audioMode: AudioMode
|
||||
let webRTCSession: WebRTCSession
|
||||
let isOutgoing: Bool
|
||||
var remoteSDP: RTCSessionDescription? = nil
|
||||
var callMessageID: String?
|
||||
var answerCallAction: CXAnswerCallAction? = nil
|
||||
var contactName: String {
|
||||
let contact = Storage.shared.getContact(with: self.sessionID)
|
||||
return contact?.displayName(for: Contact.Context.regular) ?? "\(self.sessionID.prefix(4))...\(self.sessionID.suffix(4))"
|
||||
}
|
||||
var profilePicture: UIImage {
|
||||
if let result = OWSProfileManager.shared().profileAvatar(forRecipientId: sessionID) {
|
||||
return result
|
||||
} else {
|
||||
return Identicon.generatePlaceholderIcon(seed: sessionID, text: contactName, size: 300)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Control
|
||||
lazy public var videoCapturer: RTCVideoCapturer = {
|
||||
return RTCCameraVideoCapturer(delegate: webRTCSession.localVideoSource)
|
||||
}()
|
||||
|
||||
var isRemoteVideoEnabled = false {
|
||||
didSet {
|
||||
remoteVideoStateDidChange?(isRemoteVideoEnabled)
|
||||
}
|
||||
}
|
||||
|
||||
var isMuted = false {
|
||||
willSet {
|
||||
if newValue {
|
||||
webRTCSession.mute()
|
||||
} else {
|
||||
webRTCSession.unmute()
|
||||
}
|
||||
}
|
||||
}
|
||||
var isVideoEnabled = false {
|
||||
willSet {
|
||||
if newValue {
|
||||
webRTCSession.turnOnVideo()
|
||||
} else {
|
||||
webRTCSession.turnOffVideo()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Mode
|
||||
enum Mode {
|
||||
case offer
|
||||
case answer
|
||||
}
|
||||
|
||||
// MARK: End call mode
|
||||
enum EndCallMode {
|
||||
case local
|
||||
case remote
|
||||
case unanswered
|
||||
case answeredElsewhere
|
||||
}
|
||||
|
||||
// MARK: Audio I/O mode
|
||||
enum AudioMode {
|
||||
case earpiece
|
||||
case speaker
|
||||
case headphone
|
||||
case bluetooth
|
||||
}
|
||||
|
||||
// MARK: Call State Properties
|
||||
var connectingDate: Date? {
|
||||
didSet {
|
||||
stateDidChange?()
|
||||
hasStartedConnectingDidChange?()
|
||||
}
|
||||
}
|
||||
|
||||
var connectedDate: Date? {
|
||||
didSet {
|
||||
stateDidChange?()
|
||||
hasConnectedDidChange?()
|
||||
}
|
||||
}
|
||||
|
||||
var endDate: Date? {
|
||||
didSet {
|
||||
stateDidChange?()
|
||||
hasEndedDidChange?()
|
||||
}
|
||||
}
|
||||
|
||||
// Not yet implemented
|
||||
var isOnHold = false {
|
||||
didSet {
|
||||
stateDidChange?()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: State Change Callbacks
|
||||
var stateDidChange: (() -> Void)?
|
||||
var hasStartedConnectingDidChange: (() -> Void)?
|
||||
var hasConnectedDidChange: (() -> Void)?
|
||||
var hasEndedDidChange: (() -> Void)?
|
||||
var remoteVideoStateDidChange: ((Bool) -> Void)?
|
||||
var hasStartedReconnecting: (() -> Void)?
|
||||
var hasReconnected: (() -> Void)?
|
||||
|
||||
// MARK: Derived Properties
|
||||
var hasStartedConnecting: Bool {
|
||||
get { return connectingDate != nil }
|
||||
set { connectingDate = newValue ? Date() : nil }
|
||||
}
|
||||
|
||||
var hasConnected: Bool {
|
||||
get { return connectedDate != nil }
|
||||
set { connectedDate = newValue ? Date() : nil }
|
||||
}
|
||||
|
||||
var hasEnded: Bool {
|
||||
get { return endDate != nil }
|
||||
set { endDate = newValue ? Date() : nil }
|
||||
}
|
||||
|
||||
var timeOutTimer: Timer? = nil
|
||||
var didTimeout = false
|
||||
|
||||
var duration: TimeInterval {
|
||||
guard let connectedDate = connectedDate else {
|
||||
return 0
|
||||
}
|
||||
if let endDate = endDate {
|
||||
return endDate.timeIntervalSince(connectedDate)
|
||||
}
|
||||
|
||||
return Date().timeIntervalSince(connectedDate)
|
||||
}
|
||||
|
||||
var reconnectTimer: Timer? = nil
|
||||
|
||||
// MARK: Initialization
|
||||
init(for sessionID: String, uuid: String, mode: Mode, outgoing: Bool = false) {
|
||||
self.sessionID = sessionID
|
||||
self.uuid = uuid
|
||||
self.callID = UUID()
|
||||
self.mode = mode
|
||||
self.audioMode = .earpiece
|
||||
self.webRTCSession = WebRTCSession.current ?? WebRTCSession(for: sessionID, with: uuid)
|
||||
self.isOutgoing = outgoing
|
||||
WebRTCSession.current = self.webRTCSession
|
||||
super.init()
|
||||
self.webRTCSession.delegate = self
|
||||
if AppEnvironment.shared.callManager.currentCall == nil {
|
||||
AppEnvironment.shared.callManager.currentCall = self
|
||||
} else {
|
||||
SNLog("[Calls] A call is ongoing.")
|
||||
}
|
||||
}
|
||||
|
||||
func reportIncomingCallIfNeeded(completion: @escaping (Error?) -> Void) {
|
||||
guard case .answer = mode else { return }
|
||||
setupTimeoutTimer()
|
||||
AppEnvironment.shared.callManager.reportIncomingCall(self, callerName: contactName) { error in
|
||||
completion(error)
|
||||
}
|
||||
}
|
||||
|
||||
func didReceiveRemoteSDP(sdp: RTCSessionDescription) {
|
||||
SNLog("[Calls] Did receive remote sdp.")
|
||||
remoteSDP = sdp
|
||||
if hasStartedConnecting {
|
||||
webRTCSession.handleRemoteSDP(sdp, from: sessionID) // This sends an answer message internally
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Actions
|
||||
func startSessionCall() {
|
||||
guard case .offer = mode else { return }
|
||||
guard let thread = TSContactThread.fetch(uniqueId: TSContactThread.threadID(fromContactSessionID: sessionID)) else { return }
|
||||
|
||||
let message = CallMessage()
|
||||
message.sender = getUserHexEncodedPublicKey()
|
||||
message.sentTimestamp = NSDate.millisecondTimestamp()
|
||||
message.uuid = self.uuid
|
||||
message.kind = .preOffer
|
||||
let infoMessage = TSInfoMessage.from(message, associatedWith: thread)
|
||||
infoMessage.save()
|
||||
self.callMessageID = infoMessage.uniqueId
|
||||
|
||||
var promise: Promise<Void>!
|
||||
Storage.write(with: { transaction in
|
||||
promise = self.webRTCSession.sendPreOffer(message, in: thread, using: transaction)
|
||||
}, completion: { [weak self] in
|
||||
let _ = promise.done {
|
||||
Storage.shared.write { transaction in
|
||||
self?.webRTCSession.sendOffer(to: self!.sessionID, using: transaction as! YapDatabaseReadWriteTransaction).retainUntilComplete()
|
||||
}
|
||||
self?.setupTimeoutTimer()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func answerSessionCall() {
|
||||
guard case .answer = mode else { return }
|
||||
hasStartedConnecting = true
|
||||
if let sdp = remoteSDP {
|
||||
webRTCSession.handleRemoteSDP(sdp, from: sessionID) // This sends an answer message internally
|
||||
}
|
||||
}
|
||||
|
||||
func answerSessionCallInBackground(action: CXAnswerCallAction) {
|
||||
answerCallAction = action
|
||||
self.answerSessionCall()
|
||||
}
|
||||
|
||||
func endSessionCall() {
|
||||
guard !hasEnded else { return }
|
||||
webRTCSession.hangUp()
|
||||
Storage.write { transaction in
|
||||
self.webRTCSession.endCall(with: self.sessionID, using: transaction)
|
||||
}
|
||||
hasEnded = true
|
||||
}
|
||||
|
||||
// MARK: Update call message
|
||||
func updateCallMessage(mode: EndCallMode) {
|
||||
guard let callMessageID = callMessageID else { return }
|
||||
Storage.write { transaction in
|
||||
let infoMessage = TSInfoMessage.fetch(uniqueId: callMessageID, transaction: transaction)
|
||||
if let messageToUpdate = infoMessage {
|
||||
var shouldMarkAsRead = false
|
||||
if self.duration > 0 {
|
||||
shouldMarkAsRead = true
|
||||
} else if self.hasStartedConnecting {
|
||||
shouldMarkAsRead = true
|
||||
} else {
|
||||
switch mode {
|
||||
case .local:
|
||||
shouldMarkAsRead = true
|
||||
fallthrough
|
||||
case .remote:
|
||||
fallthrough
|
||||
case .unanswered:
|
||||
if messageToUpdate.callState == .incoming {
|
||||
messageToUpdate.updateCallInfoMessage(.missed, using: transaction)
|
||||
}
|
||||
case .answeredElsewhere:
|
||||
shouldMarkAsRead = true
|
||||
}
|
||||
}
|
||||
if shouldMarkAsRead {
|
||||
messageToUpdate.markAsRead(atTimestamp: NSDate.ows_millisecondTimeStamp(), trySendReadReceipt: false, transaction: transaction)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Renderer
|
||||
func attachRemoteVideoRenderer(_ renderer: RTCVideoRenderer) {
|
||||
webRTCSession.attachRemoteRenderer(renderer)
|
||||
}
|
||||
|
||||
func removeRemoteVideoRenderer(_ renderer: RTCVideoRenderer) {
|
||||
webRTCSession.removeRemoteRenderer(renderer)
|
||||
}
|
||||
|
||||
func attachLocalVideoRenderer(_ renderer: RTCVideoRenderer) {
|
||||
webRTCSession.attachLocalRenderer(renderer)
|
||||
}
|
||||
|
||||
// MARK: Delegate
|
||||
public func webRTCIsConnected() {
|
||||
self.invalidateTimeoutTimer()
|
||||
self.reconnectTimer?.invalidate()
|
||||
guard !self.hasConnected else {
|
||||
hasReconnected?()
|
||||
return
|
||||
}
|
||||
self.hasConnected = true
|
||||
self.answerCallAction?.fulfill()
|
||||
}
|
||||
|
||||
public func isRemoteVideoDidChange(isEnabled: Bool) {
|
||||
isRemoteVideoEnabled = isEnabled
|
||||
}
|
||||
|
||||
public func didReceiveHangUpSignal() {
|
||||
self.hasEnded = true
|
||||
DispatchQueue.main.async {
|
||||
if let currentBanner = IncomingCallBanner.current { currentBanner.dismiss() }
|
||||
if let callVC = CurrentAppContext().frontmostViewController() as? CallVC { callVC.handleEndCallMessage() }
|
||||
if let miniCallView = MiniCallView.current { miniCallView.dismiss() }
|
||||
AppEnvironment.shared.callManager.reportCurrentCallEnded(reason: .remoteEnded)
|
||||
}
|
||||
}
|
||||
|
||||
public func dataChannelDidOpen() {
|
||||
// Send initial video status
|
||||
if (isVideoEnabled) {
|
||||
webRTCSession.turnOnVideo()
|
||||
} else {
|
||||
webRTCSession.turnOffVideo()
|
||||
}
|
||||
}
|
||||
|
||||
public func reconnectIfNeeded() {
|
||||
setupTimeoutTimer()
|
||||
hasStartedReconnecting?()
|
||||
guard isOutgoing else { return }
|
||||
tryToReconnect()
|
||||
}
|
||||
|
||||
private func tryToReconnect() {
|
||||
reconnectTimer?.invalidate()
|
||||
if SSKEnvironment.shared.reachabilityManager.isReachable {
|
||||
Storage.write { transaction in
|
||||
self.webRTCSession.sendOffer(to: self.sessionID, using: transaction, isRestartingICEConnection: true).retainUntilComplete()
|
||||
}
|
||||
} else {
|
||||
reconnectTimer = Timer.scheduledTimerOnMainThread(withTimeInterval: 5, repeats: false) { _ in
|
||||
self.tryToReconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Timeout
|
||||
public func setupTimeoutTimer() {
|
||||
invalidateTimeoutTimer()
|
||||
let timeInterval: TimeInterval = hasConnected ? 60 : 30
|
||||
timeOutTimer = Timer.scheduledTimerOnMainThread(withTimeInterval: timeInterval, repeats: false) { _ in
|
||||
self.didTimeout = true
|
||||
AppEnvironment.shared.callManager.endCall(self) { error in
|
||||
self.timeOutTimer = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func invalidateTimeoutTimer() {
|
||||
timeOutTimer?.invalidate()
|
||||
timeOutTimer = nil
|
||||
}
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
extension SessionCallManager {
|
||||
@discardableResult
|
||||
public func startCallAction() -> Bool {
|
||||
guard let call = self.currentCall else { return false }
|
||||
call.startSessionCall()
|
||||
return true
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
public func answerCallAction() -> Bool {
|
||||
guard let call = self.currentCall else { return false }
|
||||
if let _ = CurrentAppContext().frontmostViewController() as? CallVC {
|
||||
call.answerSessionCall()
|
||||
} else {
|
||||
guard let presentingVC = CurrentAppContext().frontmostViewController() else { return false } // FIXME: Handle more gracefully
|
||||
let callVC = CallVC(for: self.currentCall!)
|
||||
if let conversationVC = presentingVC as? ConversationVC {
|
||||
callVC.conversationVC = conversationVC
|
||||
conversationVC.inputAccessoryView?.isHidden = true
|
||||
conversationVC.inputAccessoryView?.alpha = 0
|
||||
}
|
||||
presentingVC.present(callVC, animated: true) {
|
||||
call.answerSessionCall()
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
public func endCallAction() -> Bool {
|
||||
guard let call = self.currentCall else { return false }
|
||||
call.endSessionCall()
|
||||
if call.didTimeout {
|
||||
reportCurrentCallEnded(reason: .unanswered)
|
||||
} else {
|
||||
reportCurrentCallEnded(reason: nil)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
public func setMutedCallAction(isMuted: Bool) -> Bool {
|
||||
guard let call = self.currentCall else { return false }
|
||||
call.isMuted = isMuted
|
||||
return true
|
||||
}
|
||||
}
|
@ -0,0 +1,72 @@
|
||||
import CallKit
|
||||
import SessionUtilitiesKit
|
||||
|
||||
extension SessionCallManager {
|
||||
public func startCall(_ call: SessionCall, completion: ((Error?) -> Void)?) {
|
||||
guard case .offer = call.mode else { return }
|
||||
guard !call.hasConnected else { return }
|
||||
reportOutgoingCall(call)
|
||||
if callController != nil {
|
||||
let handle = CXHandle(type: .generic, value: call.sessionID)
|
||||
let startCallAction = CXStartCallAction(call: call.callID, handle: handle)
|
||||
|
||||
startCallAction.isVideo = false
|
||||
|
||||
let transaction = CXTransaction()
|
||||
transaction.addAction(startCallAction)
|
||||
|
||||
requestTransaction(transaction, completion: completion)
|
||||
} else {
|
||||
startCallAction()
|
||||
completion?(nil)
|
||||
}
|
||||
}
|
||||
|
||||
public func answerCall(_ call: SessionCall, completion: ((Error?) -> Void)?) {
|
||||
if callController != nil {
|
||||
let answerCallAction = CXAnswerCallAction(call: call.callID)
|
||||
let transaction = CXTransaction()
|
||||
transaction.addAction(answerCallAction)
|
||||
|
||||
requestTransaction(transaction, completion: completion)
|
||||
} else {
|
||||
answerCallAction()
|
||||
completion?(nil)
|
||||
}
|
||||
}
|
||||
|
||||
public func endCall(_ call: SessionCall, completion: ((Error?) -> Void)?) {
|
||||
if callController != nil {
|
||||
let endCallAction = CXEndCallAction(call: call.callID)
|
||||
let transaction = CXTransaction()
|
||||
transaction.addAction(endCallAction)
|
||||
|
||||
requestTransaction(transaction, completion: completion)
|
||||
} else {
|
||||
endCallAction()
|
||||
completion?(nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Not currently in use
|
||||
public func setOnHoldStatus(for call: SessionCall) {
|
||||
if callController != nil {
|
||||
let setHeldCallAction = CXSetHeldCallAction(call: call.callID, onHold: true)
|
||||
let transaction = CXTransaction()
|
||||
transaction.addAction(setHeldCallAction)
|
||||
|
||||
requestTransaction(transaction)
|
||||
}
|
||||
}
|
||||
|
||||
private func requestTransaction(_ transaction: CXTransaction, completion: ((Error?) -> Void)? = nil) {
|
||||
callController?.request(transaction) { error in
|
||||
if let error = error {
|
||||
SNLog("Error requesting transaction: \(error)")
|
||||
} else {
|
||||
SNLog("Requested transaction successfully")
|
||||
}
|
||||
completion?(error)
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
import CallKit
|
||||
|
||||
extension SessionCallManager: CXProviderDelegate {
|
||||
public func providerDidReset(_ provider: CXProvider) {
|
||||
AssertIsOnMainThread()
|
||||
currentCall?.endSessionCall()
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, perform action: CXStartCallAction) {
|
||||
AssertIsOnMainThread()
|
||||
if startCallAction() {
|
||||
action.fulfill()
|
||||
} else {
|
||||
action.fail()
|
||||
}
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
|
||||
AssertIsOnMainThread()
|
||||
print("[CallKit] Perform CXAnswerCallAction")
|
||||
guard let call = self.currentCall else { return action.fail() }
|
||||
if CurrentAppContext().isMainAppAndActive {
|
||||
if answerCallAction() {
|
||||
action.fulfill()
|
||||
} else {
|
||||
action.fail()
|
||||
}
|
||||
} else {
|
||||
call.answerSessionCallInBackground(action: action)
|
||||
}
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, perform action: CXEndCallAction) {
|
||||
print("[CallKit] Perform CXEndCallAction")
|
||||
AssertIsOnMainThread()
|
||||
if endCallAction() {
|
||||
action.fulfill()
|
||||
} else {
|
||||
action.fail()
|
||||
}
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, perform action: CXSetMutedCallAction) {
|
||||
print("[CallKit] Perform CXSetMutedCallAction, isMuted: \(action.isMuted)")
|
||||
AssertIsOnMainThread()
|
||||
if setMutedCallAction(isMuted: action.isMuted) {
|
||||
action.fulfill()
|
||||
} else {
|
||||
action.fail()
|
||||
}
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, perform action: CXSetHeldCallAction) {
|
||||
// TODO: set on hold
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, timedOutPerforming action: CXAction) {
|
||||
// TODO: handle timeout
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
|
||||
print("[CallKit] Audio session did activate.")
|
||||
AssertIsOnMainThread()
|
||||
guard let call = self.currentCall else { return }
|
||||
call.webRTCSession.audioSessionDidActivate(audioSession)
|
||||
if call.isOutgoing && !call.hasConnected { CallRingTonePlayer.shared.startPlayingRingTone() }
|
||||
}
|
||||
|
||||
public func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
|
||||
print("[CallKit] Audio session did deactivate.")
|
||||
AssertIsOnMainThread()
|
||||
guard let call = self.currentCall else { return }
|
||||
call.webRTCSession.audioSessionDidDeactivate(audioSession)
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,152 @@
|
||||
import CallKit
|
||||
import SessionMessagingKit
|
||||
|
||||
public final class SessionCallManager: NSObject {
|
||||
let provider: CXProvider?
|
||||
let callController: CXCallController?
|
||||
var currentCall: SessionCall? = nil {
|
||||
willSet {
|
||||
if (newValue != nil) {
|
||||
DispatchQueue.main.async {
|
||||
UIApplication.shared.isIdleTimerDisabled = true
|
||||
}
|
||||
} else {
|
||||
DispatchQueue.main.async {
|
||||
UIApplication.shared.isIdleTimerDisabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static var _sharedProvider: CXProvider?
|
||||
class func sharedProvider(useSystemCallLog: Bool) -> CXProvider {
|
||||
let configuration = buildProviderConfiguration(useSystemCallLog: useSystemCallLog)
|
||||
|
||||
if let sharedProvider = self._sharedProvider {
|
||||
sharedProvider.configuration = configuration
|
||||
return sharedProvider
|
||||
} else {
|
||||
SwiftSingletons.register(self)
|
||||
let provider = CXProvider(configuration: configuration)
|
||||
_sharedProvider = provider
|
||||
return provider
|
||||
}
|
||||
}
|
||||
|
||||
class func buildProviderConfiguration(useSystemCallLog: Bool) -> CXProviderConfiguration {
|
||||
let localizedName = NSLocalizedString("APPLICATION_NAME", comment: "Name of application")
|
||||
let providerConfiguration = CXProviderConfiguration(localizedName: localizedName)
|
||||
providerConfiguration.supportsVideo = true
|
||||
providerConfiguration.maximumCallGroups = 1
|
||||
providerConfiguration.maximumCallsPerCallGroup = 1
|
||||
providerConfiguration.supportedHandleTypes = [.generic]
|
||||
let iconMaskImage = #imageLiteral(resourceName: "SessionGreen32")
|
||||
providerConfiguration.iconTemplateImageData = iconMaskImage.pngData()
|
||||
providerConfiguration.includesCallsInRecents = useSystemCallLog
|
||||
|
||||
return providerConfiguration
|
||||
}
|
||||
|
||||
init(useSystemCallLog: Bool = false) {
|
||||
AssertIsOnMainThread()
|
||||
if SSKPreferences.isCallKitSupported {
|
||||
self.provider = type(of: self).sharedProvider(useSystemCallLog: useSystemCallLog)
|
||||
self.callController = CXCallController()
|
||||
} else {
|
||||
self.provider = nil
|
||||
self.callController = nil
|
||||
}
|
||||
super.init()
|
||||
// We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
|
||||
self.provider?.setDelegate(self, queue: nil)
|
||||
}
|
||||
|
||||
// MARK: Report calls
|
||||
public func reportOutgoingCall(_ call: SessionCall) {
|
||||
AssertIsOnMainThread()
|
||||
UserDefaults(suiteName: "group.com.loki-project.loki-messenger")?.set(true, forKey: "isCallOngoing")
|
||||
call.stateDidChange = {
|
||||
if call.hasStartedConnecting {
|
||||
self.provider?.reportOutgoingCall(with: call.callID, startedConnectingAt: call.connectingDate)
|
||||
}
|
||||
if call.hasConnected {
|
||||
self.provider?.reportOutgoingCall(with: call.callID, connectedAt: call.connectedDate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func reportIncomingCall(_ call: SessionCall, callerName: String, completion: @escaping (Error?) -> Void) {
|
||||
AssertIsOnMainThread()
|
||||
|
||||
if let provider = provider {
|
||||
// Construct a CXCallUpdate describing the incoming call, including the caller.
|
||||
let update = CXCallUpdate()
|
||||
update.localizedCallerName = callerName
|
||||
update.remoteHandle = CXHandle(type: .generic, value: call.callID.uuidString)
|
||||
update.hasVideo = false
|
||||
|
||||
disableUnsupportedFeatures(callUpdate: update)
|
||||
|
||||
// Report the incoming call to the system
|
||||
provider.reportNewIncomingCall(with: call.callID, update: update) { error in
|
||||
guard error == nil else {
|
||||
self.reportCurrentCallEnded(reason: .failed)
|
||||
completion(error)
|
||||
return
|
||||
}
|
||||
UserDefaults(suiteName: "group.com.loki-project.loki-messenger")?.set(true, forKey: "isCallOngoing")
|
||||
completion(nil)
|
||||
}
|
||||
} else {
|
||||
UserDefaults(suiteName: "group.com.loki-project.loki-messenger")?.set(true, forKey: "isCallOngoing")
|
||||
completion(nil)
|
||||
}
|
||||
}
|
||||
|
||||
public func reportCurrentCallEnded(reason: CXCallEndedReason?) {
|
||||
guard let call = currentCall else { return }
|
||||
if let reason = reason {
|
||||
self.provider?.reportCall(with: call.callID, endedAt: nil, reason: reason)
|
||||
switch (reason) {
|
||||
case .answeredElsewhere: call.updateCallMessage(mode: .answeredElsewhere)
|
||||
case .unanswered: call.updateCallMessage(mode: .unanswered)
|
||||
case .declinedElsewhere: call.updateCallMessage(mode: .local)
|
||||
default: call.updateCallMessage(mode: .remote)
|
||||
}
|
||||
} else {
|
||||
call.updateCallMessage(mode: .local)
|
||||
}
|
||||
call.webRTCSession.dropConnection()
|
||||
self.currentCall = nil
|
||||
WebRTCSession.current = nil
|
||||
UserDefaults(suiteName: "group.com.loki-project.loki-messenger")?.set(false, forKey: "isCallOngoing")
|
||||
}
|
||||
|
||||
// MARK: Util
|
||||
private func disableUnsupportedFeatures(callUpdate: CXCallUpdate) {
|
||||
// Call Holding is failing to restart audio when "swapping" calls on the CallKit screen
|
||||
// until user returns to in-app call screen.
|
||||
callUpdate.supportsHolding = false
|
||||
|
||||
// Not yet supported
|
||||
callUpdate.supportsGrouping = false
|
||||
callUpdate.supportsUngrouping = false
|
||||
|
||||
// Is there any reason to support this?
|
||||
callUpdate.supportsDTMF = false
|
||||
}
|
||||
|
||||
public func handleIncomingCallOfferInBusyState(offerMessage: CallMessage, using transaction: YapDatabaseReadWriteTransaction) {
|
||||
guard let caller = offerMessage.sender, let thread = TSContactThread.fetch(for: caller, using: transaction) else { return }
|
||||
let message = CallMessage()
|
||||
message.uuid = offerMessage.uuid
|
||||
message.kind = .endCall
|
||||
SNLog("[Calls] Sending end call message because there is an ongoing call.")
|
||||
MessageSender.sendNonDurably(message, in: thread, using: transaction).retainUntilComplete()
|
||||
let infoMessage = TSInfoMessage.from(offerMessage, associatedWith: thread)
|
||||
infoMessage.updateCallInfoMessage(.missed, using: transaction)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,22 @@
|
||||
import WebRTC
|
||||
|
||||
extension CallVC : CameraManagerDelegate {
|
||||
|
||||
func handleVideoOutputCaptured(sampleBuffer: CMSampleBuffer) {
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
||||
let rtcPixelBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer)
|
||||
let timestamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
||||
let timestampNs = Int64(timestamp * 1000000000)
|
||||
let rotation: RTCVideoRotation = {
|
||||
switch UIDevice.current.orientation {
|
||||
case .landscapeRight: return RTCVideoRotation._90
|
||||
case .portraitUpsideDown: return RTCVideoRotation._180
|
||||
case .landscapeLeft: return RTCVideoRotation._270
|
||||
default: return RTCVideoRotation._0
|
||||
}
|
||||
}()
|
||||
let frame = RTCVideoFrame(buffer: rtcPixelBuffer, rotation: rotation, timeStampNs: timestampNs)
|
||||
frame.timeStamp = Int32(timestamp)
|
||||
call.webRTCSession.handleLocalFrameCaptured(frame)
|
||||
}
|
||||
}
|
@ -0,0 +1,550 @@
|
||||
import WebRTC
|
||||
import SessionUIKit
|
||||
import SessionMessagingKit
|
||||
import SessionUtilitiesKit
|
||||
import UIKit
|
||||
import MediaPlayer
|
||||
|
||||
final class CallVC : UIViewController, VideoPreviewDelegate {
|
||||
let call: SessionCall
|
||||
var latestKnownAudioOutputDeviceName: String?
|
||||
var durationTimer: Timer?
|
||||
var duration: Int = 0
|
||||
var shouldRestartCamera = true
|
||||
weak var conversationVC: ConversationVC? = nil
|
||||
|
||||
lazy var cameraManager: CameraManager = {
|
||||
let result = CameraManager()
|
||||
result.delegate = self
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: UI Components
|
||||
private lazy var localVideoView: LocalVideoView = {
|
||||
let result = LocalVideoView()
|
||||
result.isHidden = !call.isVideoEnabled
|
||||
result.layer.cornerRadius = 10
|
||||
result.layer.masksToBounds = true
|
||||
result.set(.width, to: LocalVideoView.width)
|
||||
result.set(.height, to: LocalVideoView.height)
|
||||
result.makeViewDraggable()
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var remoteVideoView: RemoteVideoView = {
|
||||
let result = RemoteVideoView()
|
||||
result.alpha = 0
|
||||
result.backgroundColor = .black
|
||||
result.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(handleRemoteVieioViewTapped)))
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var fadeView: UIView = {
|
||||
let result = UIView()
|
||||
let height: CGFloat = 64
|
||||
var frame = UIScreen.main.bounds
|
||||
frame.size.height = height
|
||||
let layer = CAGradientLayer()
|
||||
layer.frame = frame
|
||||
layer.colors = [ UIColor(hex: 0x000000).withAlphaComponent(0.4).cgColor, UIColor(hex: 0x000000).withAlphaComponent(0).cgColor ]
|
||||
result.layer.insertSublayer(layer, at: 0)
|
||||
result.set(.height, to: height)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var profilePictureView: UIImageView = {
|
||||
let result = UIImageView()
|
||||
let radius: CGFloat = isIPhone6OrSmaller ? 100 : 120
|
||||
result.image = self.call.profilePicture
|
||||
result.set(.width, to: radius * 2)
|
||||
result.set(.height, to: radius * 2)
|
||||
result.layer.cornerRadius = radius
|
||||
result.layer.masksToBounds = true
|
||||
result.contentMode = .scaleAspectFill
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var minimizeButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
result.isHidden = !call.hasConnected
|
||||
let image = UIImage(named: "Minimize")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.addTarget(self, action: #selector(minimize), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var answerButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
result.isHidden = call.hasStartedConnecting
|
||||
let image = UIImage(named: "AnswerCall")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.backgroundColor = Colors.accent
|
||||
result.layer.cornerRadius = 30
|
||||
result.addTarget(self, action: #selector(answerCall), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var hangUpButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "EndCall")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.backgroundColor = Colors.destructive
|
||||
result.layer.cornerRadius = 30
|
||||
result.addTarget(self, action: #selector(endCall), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var responsePanel: UIStackView = {
|
||||
let result = UIStackView(arrangedSubviews: [hangUpButton, answerButton])
|
||||
result.axis = .horizontal
|
||||
result.spacing = Values.veryLargeSpacing * 2 + 40
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var switchCameraButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
result.isEnabled = call.isVideoEnabled
|
||||
let image = UIImage(named: "SwitchCamera")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.backgroundColor = UIColor(hex: 0x1F1F1F)
|
||||
result.layer.cornerRadius = 30
|
||||
result.addTarget(self, action: #selector(switchCamera), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var switchAudioButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "AudioOff")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.backgroundColor = call.isMuted ? Colors.destructive : UIColor(hex: 0x1F1F1F)
|
||||
result.layer.cornerRadius = 30
|
||||
result.addTarget(self, action: #selector(switchAudio), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var videoButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "VideoCall")?.withRenderingMode(.alwaysTemplate)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.tintColor = .white
|
||||
result.backgroundColor = UIColor(hex: 0x1F1F1F)
|
||||
result.layer.cornerRadius = 30
|
||||
result.addTarget(self, action: #selector(operateCamera), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var volumeView: MPVolumeView = {
|
||||
let result = MPVolumeView()
|
||||
let image = UIImage(named: "Speaker")?.withRenderingMode(.alwaysTemplate)
|
||||
result.showsVolumeSlider = false
|
||||
result.showsRouteButton = true
|
||||
result.setRouteButtonImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.tintColor = .white
|
||||
result.backgroundColor = UIColor(hex: 0x1F1F1F)
|
||||
result.layer.cornerRadius = 30
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var operationPanel: UIStackView = {
|
||||
let result = UIStackView(arrangedSubviews: [switchCameraButton, videoButton, switchAudioButton, volumeView])
|
||||
result.axis = .horizontal
|
||||
result.spacing = Values.veryLargeSpacing
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var titleLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.textColor = .white
|
||||
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
|
||||
result.textAlignment = .center
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var callInfoLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.isHidden = call.hasConnected
|
||||
result.textColor = .white
|
||||
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
|
||||
result.textAlignment = .center
|
||||
if call.hasStartedConnecting { result.text = "Connecting..." }
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var callDurationLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.isHidden = true
|
||||
result.textColor = .white
|
||||
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
|
||||
result.textAlignment = .center
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Lifecycle
|
||||
init(for call: SessionCall) {
|
||||
self.call = call
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
setupStateChangeCallbacks()
|
||||
self.modalPresentationStyle = .overFullScreen
|
||||
self.modalTransitionStyle = .crossDissolve
|
||||
}
|
||||
|
||||
func setupStateChangeCallbacks() {
|
||||
self.call.remoteVideoStateDidChange = { isEnabled in
|
||||
DispatchQueue.main.async {
|
||||
UIView.animate(withDuration: 0.25) {
|
||||
self.remoteVideoView.alpha = isEnabled ? 1 : 0
|
||||
}
|
||||
if self.callInfoLabel.alpha < 0.5 {
|
||||
UIView.animate(withDuration: 0.25) {
|
||||
self.operationPanel.alpha = 1
|
||||
self.responsePanel.alpha = 1
|
||||
self.callInfoLabel.alpha = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.call.hasStartedConnectingDidChange = {
|
||||
DispatchQueue.main.async {
|
||||
self.callInfoLabel.text = "Connecting..."
|
||||
self.answerButton.alpha = 0
|
||||
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
|
||||
self.answerButton.isHidden = true
|
||||
}, completion: nil)
|
||||
}
|
||||
}
|
||||
self.call.hasConnectedDidChange = {
|
||||
DispatchQueue.main.async {
|
||||
CallRingTonePlayer.shared.stopPlayingRingTone()
|
||||
self.callInfoLabel.text = "Connected"
|
||||
self.minimizeButton.isHidden = false
|
||||
self.durationTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
|
||||
self.updateDuration()
|
||||
}
|
||||
self.callInfoLabel.isHidden = true
|
||||
self.callDurationLabel.isHidden = false
|
||||
}
|
||||
}
|
||||
self.call.hasEndedDidChange = {
|
||||
DispatchQueue.main.async {
|
||||
self.durationTimer?.invalidate()
|
||||
self.durationTimer = nil
|
||||
self.handleEndCallMessage()
|
||||
}
|
||||
}
|
||||
self.call.hasStartedReconnecting = {
|
||||
DispatchQueue.main.async {
|
||||
self.callInfoLabel.isHidden = false
|
||||
self.callDurationLabel.isHidden = true
|
||||
self.callInfoLabel.text = "Reconnecting..."
|
||||
}
|
||||
}
|
||||
self.call.hasReconnected = {
|
||||
DispatchQueue.main.async {
|
||||
self.callInfoLabel.isHidden = true
|
||||
self.callDurationLabel.isHidden = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
required init(coder: NSCoder) { preconditionFailure("Use init(for:) instead.") }
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
view.backgroundColor = .black
|
||||
setUpViewHierarchy()
|
||||
if shouldRestartCamera { cameraManager.prepare() }
|
||||
touch(call.videoCapturer)
|
||||
titleLabel.text = self.call.contactName
|
||||
AppEnvironment.shared.callManager.startCall(call) { error in
|
||||
DispatchQueue.main.async {
|
||||
if let _ = error {
|
||||
self.callInfoLabel.text = "Can't start a call."
|
||||
self.endCall()
|
||||
} else {
|
||||
self.callInfoLabel.text = "Ringing..."
|
||||
self.answerButton.isHidden = true
|
||||
}
|
||||
}
|
||||
}
|
||||
setupOrientationMonitoring()
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(audioRouteDidChange), name: AVAudioSession.routeChangeNotification, object: nil)
|
||||
}
|
||||
|
||||
deinit {
|
||||
UIDevice.current.endGeneratingDeviceOrientationNotifications()
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
}
|
||||
|
||||
func setUpViewHierarchy() {
|
||||
// Profile picture container
|
||||
let profilePictureContainer = UIView()
|
||||
view.addSubview(profilePictureContainer)
|
||||
// Remote video view
|
||||
call.attachRemoteVideoRenderer(remoteVideoView)
|
||||
view.addSubview(remoteVideoView)
|
||||
remoteVideoView.translatesAutoresizingMaskIntoConstraints = false
|
||||
remoteVideoView.pin(to: view)
|
||||
// Local video view
|
||||
call.attachLocalVideoRenderer(localVideoView)
|
||||
// Fade view
|
||||
view.addSubview(fadeView)
|
||||
fadeView.translatesAutoresizingMaskIntoConstraints = false
|
||||
fadeView.pin([ UIView.HorizontalEdge.left, UIView.VerticalEdge.top, UIView.HorizontalEdge.right ], to: view)
|
||||
// Minimize button
|
||||
view.addSubview(minimizeButton)
|
||||
minimizeButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
minimizeButton.pin(.left, to: .left, of: view)
|
||||
minimizeButton.pin(.top, to: .top, of: view, withInset: 32)
|
||||
// Title label
|
||||
view.addSubview(titleLabel)
|
||||
titleLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
titleLabel.center(.vertical, in: minimizeButton)
|
||||
titleLabel.center(.horizontal, in: view)
|
||||
// Response Panel
|
||||
view.addSubview(responsePanel)
|
||||
responsePanel.center(.horizontal, in: view)
|
||||
responsePanel.pin(.bottom, to: .bottom, of: view, withInset: -Values.newConversationButtonBottomOffset)
|
||||
// Operation Panel
|
||||
view.addSubview(operationPanel)
|
||||
operationPanel.center(.horizontal, in: view)
|
||||
operationPanel.pin(.bottom, to: .top, of: responsePanel, withInset: -Values.veryLargeSpacing)
|
||||
// Profile picture view
|
||||
profilePictureContainer.pin(.top, to: .bottom, of: fadeView)
|
||||
profilePictureContainer.pin(.bottom, to: .top, of: operationPanel)
|
||||
profilePictureContainer.pin([ UIView.HorizontalEdge.left, UIView.HorizontalEdge.right ], to: view)
|
||||
profilePictureContainer.addSubview(profilePictureView)
|
||||
profilePictureView.center(in: profilePictureContainer)
|
||||
// Call info label
|
||||
let callInfoLabelContainer = UIView()
|
||||
view.addSubview(callInfoLabelContainer)
|
||||
callInfoLabelContainer.pin(.top, to: .bottom, of: profilePictureView)
|
||||
callInfoLabelContainer.pin(.bottom, to: .bottom, of: profilePictureContainer)
|
||||
callInfoLabelContainer.pin([ UIView.HorizontalEdge.left, UIView.HorizontalEdge.right ], to: view)
|
||||
callInfoLabelContainer.addSubview(callInfoLabel)
|
||||
callInfoLabelContainer.addSubview(callDurationLabel)
|
||||
callInfoLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
callInfoLabel.center(in: callInfoLabelContainer)
|
||||
callDurationLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
callDurationLabel.center(in: callInfoLabelContainer)
|
||||
}
|
||||
|
||||
private func addLocalVideoView() {
|
||||
let safeAreaInsets = UIApplication.shared.keyWindow!.safeAreaInsets
|
||||
let window = CurrentAppContext().mainWindow!
|
||||
window.addSubview(localVideoView)
|
||||
localVideoView.autoPinEdge(toSuperviewEdge: .right, withInset: Values.smallSpacing)
|
||||
let topMargin = safeAreaInsets.top + Values.veryLargeSpacing
|
||||
localVideoView.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
|
||||
}
|
||||
|
||||
override func viewDidAppear(_ animated: Bool) {
|
||||
super.viewDidAppear(animated)
|
||||
if (call.isVideoEnabled && shouldRestartCamera) { cameraManager.start() }
|
||||
shouldRestartCamera = true
|
||||
addLocalVideoView()
|
||||
remoteVideoView.alpha = call.isRemoteVideoEnabled ? 1 : 0
|
||||
}
|
||||
|
||||
override func viewWillDisappear(_ animated: Bool) {
|
||||
super.viewWillDisappear(animated)
|
||||
if (call.isVideoEnabled && shouldRestartCamera) { cameraManager.stop() }
|
||||
localVideoView.removeFromSuperview()
|
||||
}
|
||||
|
||||
// MARK: - Orientation
|
||||
|
||||
private func setupOrientationMonitoring() {
|
||||
UIDevice.current.beginGeneratingDeviceOrientationNotifications()
|
||||
NotificationCenter.default.addObserver(self, selector: #selector(didChangeDeviceOrientation), name: UIDevice.orientationDidChangeNotification, object: UIDevice.current)
|
||||
}
|
||||
|
||||
@objc func didChangeDeviceOrientation(notification: Notification) {
|
||||
|
||||
func rotateAllButtons(rotationAngle: CGFloat) {
|
||||
let transform = CGAffineTransform(rotationAngle: rotationAngle)
|
||||
UIView.animate(withDuration: 0.2) {
|
||||
self.answerButton.transform = transform
|
||||
self.hangUpButton.transform = transform
|
||||
self.switchAudioButton.transform = transform
|
||||
self.switchCameraButton.transform = transform
|
||||
self.videoButton.transform = transform
|
||||
self.volumeView.transform = transform
|
||||
}
|
||||
}
|
||||
|
||||
switch UIDevice.current.orientation {
|
||||
case .portrait:
|
||||
rotateAllButtons(rotationAngle: 0)
|
||||
case .portraitUpsideDown:
|
||||
rotateAllButtons(rotationAngle: .pi)
|
||||
case .landscapeLeft:
|
||||
rotateAllButtons(rotationAngle: .halfPi)
|
||||
case .landscapeRight:
|
||||
rotateAllButtons(rotationAngle: .pi + .halfPi)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Call signalling
|
||||
func handleAnswerMessage(_ message: CallMessage) {
|
||||
callInfoLabel.text = "Connecting..."
|
||||
}
|
||||
|
||||
func handleEndCallMessage() {
|
||||
SNLog("[Calls] Ending call.")
|
||||
self.callInfoLabel.isHidden = false
|
||||
self.callDurationLabel.isHidden = true
|
||||
callInfoLabel.text = "Call Ended"
|
||||
UIView.animate(withDuration: 0.25) {
|
||||
self.remoteVideoView.alpha = 0
|
||||
self.operationPanel.alpha = 1
|
||||
self.responsePanel.alpha = 1
|
||||
self.callInfoLabel.alpha = 1
|
||||
}
|
||||
Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { _ in
|
||||
self.conversationVC?.showInputAccessoryView()
|
||||
self.presentingViewController?.dismiss(animated: true, completion: nil)
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func answerCall() {
|
||||
AppEnvironment.shared.callManager.answerCall(call) { error in
|
||||
DispatchQueue.main.async {
|
||||
if let _ = error {
|
||||
self.callInfoLabel.text = "Can't answer the call."
|
||||
self.endCall()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func endCall() {
|
||||
AppEnvironment.shared.callManager.endCall(call) { error in
|
||||
if let _ = error {
|
||||
self.call.endSessionCall()
|
||||
AppEnvironment.shared.callManager.reportCurrentCallEnded(reason: nil)
|
||||
}
|
||||
DispatchQueue.main.async {
|
||||
self.conversationVC?.showInputAccessoryView()
|
||||
self.presentingViewController?.dismiss(animated: true, completion: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func updateDuration() {
|
||||
callDurationLabel.text = String(format: "%.2d:%.2d", duration/60, duration%60)
|
||||
duration += 1
|
||||
}
|
||||
|
||||
// MARK: Minimize to a floating view
|
||||
@objc private func minimize() {
|
||||
self.shouldRestartCamera = false
|
||||
let miniCallView = MiniCallView(from: self)
|
||||
miniCallView.show()
|
||||
self.conversationVC?.showInputAccessoryView()
|
||||
presentingViewController?.dismiss(animated: true, completion: nil)
|
||||
}
|
||||
|
||||
// MARK: Video and Audio
|
||||
@objc private func operateCamera() {
|
||||
if (call.isVideoEnabled) {
|
||||
localVideoView.isHidden = true
|
||||
cameraManager.stop()
|
||||
videoButton.tintColor = .white
|
||||
videoButton.backgroundColor = UIColor(hex: 0x1F1F1F)
|
||||
switchCameraButton.isEnabled = false
|
||||
call.isVideoEnabled = false
|
||||
} else {
|
||||
guard requestCameraPermissionIfNeeded() else { return }
|
||||
let previewVC = VideoPreviewVC()
|
||||
previewVC.delegate = self
|
||||
present(previewVC, animated: true, completion: nil)
|
||||
}
|
||||
}
|
||||
|
||||
func cameraDidConfirmTurningOn() {
|
||||
localVideoView.isHidden = false
|
||||
cameraManager.prepare()
|
||||
cameraManager.start()
|
||||
videoButton.tintColor = UIColor(hex: 0x1F1F1F)
|
||||
videoButton.backgroundColor = .white
|
||||
switchCameraButton.isEnabled = true
|
||||
call.isVideoEnabled = true
|
||||
}
|
||||
|
||||
@objc private func switchCamera() {
|
||||
cameraManager.switchCamera()
|
||||
}
|
||||
|
||||
@objc private func switchAudio() {
|
||||
if call.isMuted {
|
||||
switchAudioButton.backgroundColor = UIColor(hex: 0x1F1F1F)
|
||||
call.isMuted = false
|
||||
} else {
|
||||
switchAudioButton.backgroundColor = Colors.destructive
|
||||
call.isMuted = true
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func audioRouteDidChange() {
|
||||
let currentSession = AVAudioSession.sharedInstance()
|
||||
let currentRoute = currentSession.currentRoute
|
||||
if let currentOutput = currentRoute.outputs.first {
|
||||
if let latestKnownAudioOutputDeviceName = latestKnownAudioOutputDeviceName, currentOutput.portName == latestKnownAudioOutputDeviceName { return }
|
||||
latestKnownAudioOutputDeviceName = currentOutput.portName
|
||||
switch currentOutput.portType {
|
||||
case .builtInSpeaker:
|
||||
let image = UIImage(named: "Speaker")?.withRenderingMode(.alwaysTemplate)
|
||||
volumeView.setRouteButtonImage(image, for: .normal)
|
||||
volumeView.tintColor = UIColor(hex: 0x1F1F1F)
|
||||
volumeView.backgroundColor = .white
|
||||
case .headphones:
|
||||
let image = UIImage(named: "Headsets")?.withRenderingMode(.alwaysTemplate)
|
||||
volumeView.setRouteButtonImage(image, for: .normal)
|
||||
volumeView.tintColor = UIColor(hex: 0x1F1F1F)
|
||||
volumeView.backgroundColor = .white
|
||||
case .bluetoothLE: fallthrough
|
||||
case .bluetoothA2DP:
|
||||
let image = UIImage(named: "Bluetooth")?.withRenderingMode(.alwaysTemplate)
|
||||
volumeView.setRouteButtonImage(image, for: .normal)
|
||||
volumeView.tintColor = UIColor(hex: 0x1F1F1F)
|
||||
volumeView.backgroundColor = .white
|
||||
case .bluetoothHFP:
|
||||
let image = UIImage(named: "Airpods")?.withRenderingMode(.alwaysTemplate)
|
||||
volumeView.setRouteButtonImage(image, for: .normal)
|
||||
volumeView.tintColor = UIColor(hex: 0x1F1F1F)
|
||||
volumeView.backgroundColor = .white
|
||||
case .builtInReceiver: fallthrough
|
||||
default:
|
||||
let image = UIImage(named: "Speaker")?.withRenderingMode(.alwaysTemplate)
|
||||
volumeView.setRouteButtonImage(image, for: .normal)
|
||||
volumeView.tintColor = .white
|
||||
volumeView.backgroundColor = UIColor(hex: 0x1F1F1F)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func handleRemoteVieioViewTapped(gesture: UITapGestureRecognizer) {
|
||||
let isHidden = callDurationLabel.alpha < 0.5
|
||||
UIView.animate(withDuration: 0.5) {
|
||||
self.operationPanel.alpha = isHidden ? 1 : 0
|
||||
self.responsePanel.alpha = isHidden ? 1 : 0
|
||||
self.callDurationLabel.alpha = isHidden ? 1 : 0
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,88 @@
|
||||
import Foundation
|
||||
import AVFoundation
|
||||
import SessionUtilitiesKit
|
||||
|
||||
@objc
|
||||
protocol CameraManagerDelegate : AnyObject {
|
||||
|
||||
func handleVideoOutputCaptured(sampleBuffer: CMSampleBuffer)
|
||||
}
|
||||
|
||||
final class CameraManager : NSObject {
|
||||
private let captureSession = AVCaptureSession()
|
||||
private let videoDataOutput = AVCaptureVideoDataOutput()
|
||||
private let videoDataOutputQueue
|
||||
= DispatchQueue(label: "CameraManager.videoDataOutputQueue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
|
||||
private let audioDataOutput = AVCaptureAudioDataOutput()
|
||||
private var isCapturing = false
|
||||
weak var delegate: CameraManagerDelegate?
|
||||
|
||||
private var videoCaptureDevice: AVCaptureDevice?
|
||||
private var videoInput: AVCaptureDeviceInput?
|
||||
|
||||
func prepare() {
|
||||
print("[Calls] Preparing camera.")
|
||||
addNewVideoIO(position: .front)
|
||||
}
|
||||
|
||||
private func addNewVideoIO(position: AVCaptureDevice.Position) {
|
||||
if let videoCaptureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position),
|
||||
let videoInput = try? AVCaptureDeviceInput(device: videoCaptureDevice), captureSession.canAddInput(videoInput) {
|
||||
captureSession.addInput(videoInput)
|
||||
self.videoCaptureDevice = videoCaptureDevice
|
||||
self.videoInput = videoInput
|
||||
}
|
||||
if captureSession.canAddOutput(videoDataOutput) {
|
||||
captureSession.addOutput(videoDataOutput)
|
||||
videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32BGRA) ]
|
||||
videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
|
||||
guard let connection = videoDataOutput.connection(with: AVMediaType.video) else { return }
|
||||
connection.videoOrientation = .portrait
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = (position == .front)
|
||||
} else {
|
||||
SNLog("Couldn't add video data output to capture session.")
|
||||
}
|
||||
}
|
||||
|
||||
func start() {
|
||||
guard !isCapturing else { return }
|
||||
print("[Calls] Starting camera.")
|
||||
isCapturing = true
|
||||
captureSession.startRunning()
|
||||
}
|
||||
|
||||
func stop() {
|
||||
guard isCapturing else { return }
|
||||
print("[Calls] Stopping camera.")
|
||||
isCapturing = false
|
||||
captureSession.stopRunning()
|
||||
}
|
||||
|
||||
func switchCamera() {
|
||||
guard let videoCaptureDevice = videoCaptureDevice, let videoInput = videoInput else { return }
|
||||
stop()
|
||||
if videoCaptureDevice.position == .front {
|
||||
captureSession.removeInput(videoInput)
|
||||
captureSession.removeOutput(videoDataOutput)
|
||||
addNewVideoIO(position: .back)
|
||||
} else {
|
||||
captureSession.removeInput(videoInput)
|
||||
captureSession.removeOutput(videoDataOutput)
|
||||
addNewVideoIO(position: .front)
|
||||
}
|
||||
start()
|
||||
}
|
||||
}
|
||||
|
||||
extension CameraManager : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
|
||||
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
||||
guard connection == videoDataOutput.connection(with: .video) else { return }
|
||||
delegate?.handleVideoOutputCaptured(sampleBuffer: sampleBuffer)
|
||||
}
|
||||
|
||||
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
||||
print("[Calls] Frame dropped.")
|
||||
}
|
||||
}
|
@ -0,0 +1,123 @@
|
||||
import UIKit
|
||||
import WebRTC
|
||||
|
||||
public protocol VideoPreviewDelegate : AnyObject {
|
||||
func cameraDidConfirmTurningOn()
|
||||
}
|
||||
|
||||
class VideoPreviewVC: UIViewController, CameraManagerDelegate {
|
||||
weak var delegate: VideoPreviewDelegate?
|
||||
|
||||
lazy var cameraManager: CameraManager = {
|
||||
let result = CameraManager()
|
||||
result.delegate = self
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: UI Components
|
||||
private lazy var renderView: RenderView = {
|
||||
let result = RenderView()
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var fadeView: UIView = {
|
||||
let result = UIView()
|
||||
let height: CGFloat = 64
|
||||
var frame = UIScreen.main.bounds
|
||||
frame.size.height = height
|
||||
let layer = CAGradientLayer()
|
||||
layer.frame = frame
|
||||
layer.colors = [ UIColor(hex: 0x000000).withAlphaComponent(0.4).cgColor, UIColor(hex: 0x000000).withAlphaComponent(0).cgColor ]
|
||||
result.layer.insertSublayer(layer, at: 0)
|
||||
result.set(.height, to: height)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var closeButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "X")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.addTarget(self, action: #selector(cancel), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var confirmButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "Check")!.withTint(.white)
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 60)
|
||||
result.set(.height, to: 60)
|
||||
result.addTarget(self, action: #selector(confirm), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var titleLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.text = "Preview"
|
||||
result.textColor = .white
|
||||
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
|
||||
result.textAlignment = .center
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Lifecycle
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
view.backgroundColor = .black
|
||||
setUpViewHierarchy()
|
||||
cameraManager.prepare()
|
||||
}
|
||||
|
||||
func setUpViewHierarchy() {
|
||||
// Preview video view
|
||||
view.addSubview(renderView)
|
||||
renderView.translatesAutoresizingMaskIntoConstraints = false
|
||||
renderView.pin(to: view)
|
||||
// Fade view
|
||||
view.addSubview(fadeView)
|
||||
fadeView.translatesAutoresizingMaskIntoConstraints = false
|
||||
fadeView.pin([ UIView.HorizontalEdge.left, UIView.VerticalEdge.top, UIView.HorizontalEdge.right ], to: view)
|
||||
// Close button
|
||||
view.addSubview(closeButton)
|
||||
closeButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
closeButton.pin(.left, to: .left, of: view)
|
||||
closeButton.center(.vertical, in: fadeView)
|
||||
// Confirm button
|
||||
view.addSubview(confirmButton)
|
||||
confirmButton.translatesAutoresizingMaskIntoConstraints = false
|
||||
confirmButton.pin(.right, to: .right, of: view)
|
||||
confirmButton.center(.vertical, in: fadeView)
|
||||
// Title label
|
||||
view.addSubview(titleLabel)
|
||||
titleLabel.translatesAutoresizingMaskIntoConstraints = false
|
||||
titleLabel.center(.vertical, in: closeButton)
|
||||
titleLabel.center(.horizontal, in: view)
|
||||
}
|
||||
|
||||
override func viewDidAppear(_ animated: Bool) {
|
||||
super.viewDidAppear(animated)
|
||||
cameraManager.start()
|
||||
}
|
||||
|
||||
override func viewWillDisappear(_ animated: Bool) {
|
||||
super.viewWillDisappear(animated)
|
||||
cameraManager.stop()
|
||||
}
|
||||
|
||||
// MARK: Interaction
|
||||
@objc func confirm() {
|
||||
delegate?.cameraDidConfirmTurningOn()
|
||||
self.dismiss(animated: true, completion: nil)
|
||||
}
|
||||
|
||||
@objc func cancel() {
|
||||
self.dismiss(animated: true, completion: nil)
|
||||
}
|
||||
|
||||
// MARK: CameraManagerDelegate
|
||||
func handleVideoOutputCaptured(sampleBuffer: CMSampleBuffer) {
|
||||
renderView.enqueue(sampleBuffer: sampleBuffer)
|
||||
}
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
import UIKit
|
||||
|
||||
@objc
|
||||
final class CallMissedTipsModal : Modal {
|
||||
private let caller: String
|
||||
|
||||
// MARK: Lifecycle
|
||||
@objc
|
||||
init(caller: String) {
|
||||
self.caller = caller
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
self.modalPresentationStyle = .overFullScreen
|
||||
self.modalTransitionStyle = .crossDissolve
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(onCallEnabled:) instead.")
|
||||
}
|
||||
|
||||
override init(nibName: String?, bundle: Bundle?) {
|
||||
preconditionFailure("Use init(onCallEnabled:) instead.")
|
||||
}
|
||||
|
||||
override func populateContentView() {
|
||||
// Tips icon
|
||||
let tipsIconImageView = UIImageView(image: UIImage(named: "Tips")?.withTint(Colors.text))
|
||||
tipsIconImageView.set(.width, to: 19)
|
||||
tipsIconImageView.set(.height, to: 28)
|
||||
// Title
|
||||
let titleLabel = UILabel()
|
||||
titleLabel.textColor = Colors.text
|
||||
titleLabel.font = .boldSystemFont(ofSize: Values.mediumFontSize)
|
||||
titleLabel.text = NSLocalizedString("modal_call_missed_tips_title", comment: "")
|
||||
titleLabel.textAlignment = .center
|
||||
// Message
|
||||
let messageLabel = UILabel()
|
||||
messageLabel.textColor = Colors.text
|
||||
messageLabel.font = .systemFont(ofSize: Values.smallFontSize)
|
||||
let message = String(format: NSLocalizedString("modal_call_missed_tips_explanation", comment: ""), caller)
|
||||
messageLabel.text = message
|
||||
messageLabel.numberOfLines = 0
|
||||
messageLabel.lineBreakMode = .byWordWrapping
|
||||
messageLabel.textAlignment = .natural
|
||||
// Cancel Button
|
||||
cancelButton.setTitle(NSLocalizedString("OK", comment: ""), for: .normal)
|
||||
// Main stack view
|
||||
let mainStackView = UIStackView(arrangedSubviews: [ tipsIconImageView, titleLabel, messageLabel, cancelButton ])
|
||||
mainStackView.axis = .vertical
|
||||
mainStackView.alignment = .center
|
||||
mainStackView.spacing = Values.largeSpacing
|
||||
contentView.addSubview(mainStackView)
|
||||
mainStackView.pin(.leading, to: .leading, of: contentView, withInset: Values.largeSpacing)
|
||||
mainStackView.pin(.top, to: .top, of: contentView, withInset: Values.largeSpacing)
|
||||
contentView.pin(.trailing, to: .trailing, of: mainStackView, withInset: Values.largeSpacing)
|
||||
contentView.pin(.bottom, to: .bottom, of: mainStackView, withInset: Values.largeSpacing)
|
||||
}
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
|
||||
import WebRTC
|
||||
import Foundation
|
||||
|
||||
// MARK: RemoteVideoView
|
||||
|
||||
class RemoteVideoView: RTCMTLVideoView {
|
||||
|
||||
override func renderFrame(_ frame: RTCVideoFrame?) {
|
||||
super.renderFrame(frame)
|
||||
guard let frame = frame else { return }
|
||||
DispatchMainThreadSafe {
|
||||
let frameRatio = Double(frame.height) / Double(frame.width)
|
||||
let frameRotation = frame.rotation
|
||||
let deviceRotation = UIDevice.current.orientation
|
||||
var rotationOverride: RTCVideoRotation? = nil
|
||||
switch deviceRotation {
|
||||
case .portrait, .portraitUpsideDown:
|
||||
// We don't have to do anything, the renderer will automatically make sure it's right-side-up.
|
||||
break
|
||||
case .landscapeLeft:
|
||||
switch frameRotation {
|
||||
case RTCVideoRotation._0: rotationOverride = RTCVideoRotation._90 // Landscape left
|
||||
case RTCVideoRotation._90: rotationOverride = RTCVideoRotation._180 // Portrait
|
||||
case RTCVideoRotation._180: rotationOverride = RTCVideoRotation._270 // Landscape right
|
||||
case RTCVideoRotation._270: rotationOverride = RTCVideoRotation._0 // Portrait upside-down
|
||||
default: break
|
||||
}
|
||||
case .landscapeRight:
|
||||
switch frameRotation {
|
||||
case RTCVideoRotation._0: rotationOverride = RTCVideoRotation._270 // Landscape left
|
||||
case RTCVideoRotation._90: rotationOverride = RTCVideoRotation._0 // Portrait
|
||||
case RTCVideoRotation._180: rotationOverride = RTCVideoRotation._90 // Landscape right
|
||||
case RTCVideoRotation._270: rotationOverride = RTCVideoRotation._180 // Portrait upside-down
|
||||
default: break
|
||||
}
|
||||
default:
|
||||
// Do nothing if we're face down, up, etc.
|
||||
// Assume we're already setup for the correct orientation.
|
||||
break
|
||||
}
|
||||
|
||||
if let rotationOverride = rotationOverride {
|
||||
self.rotationOverride = NSNumber(value: rotationOverride.rawValue)
|
||||
if [ RTCVideoRotation._0, RTCVideoRotation._180 ].contains(rotationOverride) {
|
||||
self.videoContentMode = .scaleAspectFill
|
||||
} else {
|
||||
self.videoContentMode = .scaleAspectFit
|
||||
}
|
||||
} else {
|
||||
self.rotationOverride = nil
|
||||
if [ RTCVideoRotation._0, RTCVideoRotation._180 ].contains(frameRotation) {
|
||||
self.videoContentMode = .scaleAspectFill
|
||||
} else {
|
||||
self.videoContentMode = .scaleAspectFit
|
||||
}
|
||||
}
|
||||
// if not a mobile ratio, always use .scaleAspectFit
|
||||
if frameRatio < 1.5 {
|
||||
self.videoContentMode = .scaleAspectFit
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: LocalVideoView
|
||||
|
||||
class LocalVideoView: RTCMTLVideoView {
|
||||
|
||||
static let width: CGFloat = 80
|
||||
static let height: CGFloat = 173
|
||||
|
||||
override func renderFrame(_ frame: RTCVideoFrame?) {
|
||||
super.renderFrame(frame)
|
||||
DispatchMainThreadSafe {
|
||||
// This is a workaround for a weird issue that
|
||||
// sometimes the rotationOverride is not working
|
||||
// if it is only set once on initialization
|
||||
self.rotationOverride = NSNumber(value: RTCVideoRotation._0.rawValue)
|
||||
self.videoContentMode = .scaleAspectFill
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,191 @@
|
||||
import UIKit
|
||||
import WebRTC
|
||||
import SessionMessagingKit
|
||||
|
||||
final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
|
||||
private static let swipeToOperateThreshold: CGFloat = 60
|
||||
private var previousY: CGFloat = 0
|
||||
let call: SessionCall
|
||||
|
||||
// MARK: UI Components
|
||||
private lazy var profilePictureView: ProfilePictureView = {
|
||||
let result = ProfilePictureView()
|
||||
let size = CGFloat(60)
|
||||
result.size = size
|
||||
result.set(.width, to: size)
|
||||
result.set(.height, to: size)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var displayNameLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.textColor = UIColor.white
|
||||
result.font = .boldSystemFont(ofSize: Values.mediumFontSize)
|
||||
result.lineBreakMode = .byTruncatingTail
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var answerButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "AnswerCall")!.withTint(.white)?.resizedImage(to: CGSize(width: 24.8, height: 24.8))
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 48)
|
||||
result.set(.height, to: 48)
|
||||
result.backgroundColor = Colors.accent
|
||||
result.layer.cornerRadius = 24
|
||||
result.addTarget(self, action: #selector(answerCall), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var hangUpButton: UIButton = {
|
||||
let result = UIButton(type: .custom)
|
||||
let image = UIImage(named: "EndCall")!.withTint(.white)?.resizedImage(to: CGSize(width: 29.6, height: 11.2))
|
||||
result.setImage(image, for: UIControl.State.normal)
|
||||
result.set(.width, to: 48)
|
||||
result.set(.height, to: 48)
|
||||
result.backgroundColor = Colors.destructive
|
||||
result.layer.cornerRadius = 24
|
||||
result.addTarget(self, action: #selector(endCall), for: UIControl.Event.touchUpInside)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var panGestureRecognizer: UIPanGestureRecognizer = {
|
||||
let result = UIPanGestureRecognizer(target: self, action: #selector(handlePan))
|
||||
result.delegate = self
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Initialization
|
||||
public static var current: IncomingCallBanner?
|
||||
|
||||
init(for call: SessionCall) {
|
||||
self.call = call
|
||||
super.init(frame: CGRect.zero)
|
||||
setUpViewHierarchy()
|
||||
setUpGestureRecognizers()
|
||||
if let incomingCallBanner = IncomingCallBanner.current {
|
||||
incomingCallBanner.dismiss()
|
||||
}
|
||||
IncomingCallBanner.current = self
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
preconditionFailure("Use init(message:) instead.")
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(coder:) instead.")
|
||||
}
|
||||
|
||||
private func setUpViewHierarchy() {
|
||||
self.backgroundColor = UIColor(hex: 0x000000).withAlphaComponent(0.8)
|
||||
self.layer.cornerRadius = Values.largeSpacing
|
||||
self.layer.masksToBounds = true
|
||||
self.set(.height, to: 100)
|
||||
profilePictureView.publicKey = call.sessionID
|
||||
profilePictureView.update()
|
||||
displayNameLabel.text = call.contactName
|
||||
let stackView = UIStackView(arrangedSubviews: [profilePictureView, displayNameLabel, hangUpButton, answerButton])
|
||||
stackView.axis = .horizontal
|
||||
stackView.alignment = .center
|
||||
stackView.spacing = Values.largeSpacing
|
||||
self.addSubview(stackView)
|
||||
stackView.center(.vertical, in: self)
|
||||
stackView.autoPinWidthToSuperview(withMargin: Values.mediumSpacing)
|
||||
}
|
||||
|
||||
private func setUpGestureRecognizers() {
|
||||
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTap))
|
||||
tapGestureRecognizer.numberOfTapsRequired = 1
|
||||
addGestureRecognizer(tapGestureRecognizer)
|
||||
addGestureRecognizer(panGestureRecognizer)
|
||||
}
|
||||
|
||||
// MARK: Interaction
|
||||
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||
if gestureRecognizer == panGestureRecognizer {
|
||||
let v = panGestureRecognizer.velocity(in: self)
|
||||
return abs(v.y) > abs(v.x) // It has to be more vertical than horizontal
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||
showCallVC(answer: false)
|
||||
}
|
||||
|
||||
@objc private func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
let translationY = gestureRecognizer.translation(in: self).y
|
||||
switch gestureRecognizer.state {
|
||||
case .changed:
|
||||
self.transform = CGAffineTransform(translationX: 0, y: min(translationY, IncomingCallBanner.swipeToOperateThreshold))
|
||||
if abs(translationY) > IncomingCallBanner.swipeToOperateThreshold && abs(previousY) < IncomingCallBanner.swipeToOperateThreshold {
|
||||
UIImpactFeedbackGenerator(style: .heavy).impactOccurred() // Let the user know when they've hit the swipe to reply threshold
|
||||
}
|
||||
previousY = translationY
|
||||
case .ended, .cancelled:
|
||||
if abs(translationY) > IncomingCallBanner.swipeToOperateThreshold {
|
||||
if translationY > 0 { showCallVC(answer: false) }
|
||||
else { endCall() } // TODO: Or just put the call on hold?
|
||||
} else {
|
||||
self.transform = .identity
|
||||
}
|
||||
default: break
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func answerCall() {
|
||||
showCallVC(answer: true)
|
||||
}
|
||||
|
||||
@objc private func endCall() {
|
||||
AppEnvironment.shared.callManager.endCall(call) { error in
|
||||
if let _ = error {
|
||||
self.call.endSessionCall()
|
||||
AppEnvironment.shared.callManager.reportCurrentCallEnded(reason: nil)
|
||||
}
|
||||
self.dismiss()
|
||||
}
|
||||
}
|
||||
|
||||
public func showCallVC(answer: Bool) {
|
||||
dismiss()
|
||||
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
|
||||
let callVC = CallVC(for: self.call)
|
||||
if let conversationVC = presentingVC as? ConversationVC {
|
||||
callVC.conversationVC = conversationVC
|
||||
conversationVC.inputAccessoryView?.isHidden = true
|
||||
conversationVC.inputAccessoryView?.alpha = 0
|
||||
}
|
||||
presentingVC.present(callVC, animated: true) {
|
||||
if answer { self.call.answerSessionCall() }
|
||||
}
|
||||
}
|
||||
|
||||
public func show() {
|
||||
self.alpha = 0.0
|
||||
let window = CurrentAppContext().mainWindow!
|
||||
window.addSubview(self)
|
||||
let topMargin = window.safeAreaInsets.top - Values.smallSpacing
|
||||
self.autoPinWidthToSuperview(withMargin: Values.smallSpacing)
|
||||
self.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
|
||||
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
|
||||
self.alpha = 1.0
|
||||
}, completion: nil)
|
||||
CallRingTonePlayer.shared.startVibration()
|
||||
CallRingTonePlayer.shared.startPlayingRingTone()
|
||||
}
|
||||
|
||||
public func dismiss() {
|
||||
CallRingTonePlayer.shared.stopVibrationIfPossible()
|
||||
CallRingTonePlayer.shared.stopPlayingRingTone()
|
||||
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
|
||||
self.alpha = 0.0
|
||||
}, completion: { _ in
|
||||
IncomingCallBanner.current = nil
|
||||
self.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,161 @@
|
||||
import UIKit
|
||||
import WebRTC
|
||||
|
||||
final class MiniCallView: UIView, RTCVideoViewDelegate {
|
||||
var callVC: CallVC
|
||||
|
||||
// MARK: UI
|
||||
private static let defaultSize: CGFloat = 100
|
||||
private let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
|
||||
private let bottomMargin = UIApplication.shared.keyWindow!.safeAreaInsets.bottom
|
||||
|
||||
private var width: NSLayoutConstraint?
|
||||
private var height: NSLayoutConstraint?
|
||||
private var left: NSLayoutConstraint?
|
||||
private var right: NSLayoutConstraint?
|
||||
private var top: NSLayoutConstraint?
|
||||
private var bottom: NSLayoutConstraint?
|
||||
|
||||
private lazy var remoteVideoView: RTCMTLVideoView = {
|
||||
let result = RTCMTLVideoView()
|
||||
result.delegate = self
|
||||
result.alpha = self.callVC.call.isRemoteVideoEnabled ? 1 : 0
|
||||
result.videoContentMode = .scaleAspectFit
|
||||
result.backgroundColor = .black
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Initialization
|
||||
public static var current: MiniCallView?
|
||||
|
||||
init(from callVC: CallVC) {
|
||||
self.callVC = callVC
|
||||
super.init(frame: CGRect.zero)
|
||||
self.backgroundColor = UIColor.init(white: 0, alpha: 0.8)
|
||||
setUpViewHierarchy()
|
||||
setUpGestureRecognizers()
|
||||
MiniCallView.current = self
|
||||
self.callVC.call.remoteVideoStateDidChange = { isEnabled in
|
||||
DispatchQueue.main.async {
|
||||
UIView.animate(withDuration: 0.25) {
|
||||
self.remoteVideoView.alpha = isEnabled ? 1 : 0
|
||||
if !isEnabled {
|
||||
self.width?.constant = MiniCallView.defaultSize
|
||||
self.height?.constant = MiniCallView.defaultSize
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
preconditionFailure("Use init(message:) instead.")
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(coder:) instead.")
|
||||
}
|
||||
|
||||
private func setUpViewHierarchy() {
|
||||
self.width = self.set(.width, to: MiniCallView.defaultSize)
|
||||
self.height = self.set(.height, to: MiniCallView.defaultSize)
|
||||
self.layer.cornerRadius = 10
|
||||
self.layer.masksToBounds = true
|
||||
// Background
|
||||
let background = getBackgroudView()
|
||||
self.addSubview(background)
|
||||
background.pin(to: self)
|
||||
// Remote video view
|
||||
callVC.call.attachRemoteVideoRenderer(remoteVideoView)
|
||||
self.addSubview(remoteVideoView)
|
||||
remoteVideoView.translatesAutoresizingMaskIntoConstraints = false
|
||||
remoteVideoView.pin(to: self)
|
||||
}
|
||||
|
||||
private func getBackgroudView() -> UIView {
|
||||
let background = UIView()
|
||||
let imageView = UIImageView()
|
||||
imageView.layer.cornerRadius = 32
|
||||
imageView.layer.masksToBounds = true
|
||||
imageView.contentMode = .scaleAspectFill
|
||||
imageView.image = callVC.call.profilePicture
|
||||
background.addSubview(imageView)
|
||||
imageView.set(.width, to: 64)
|
||||
imageView.set(.height, to: 64)
|
||||
imageView.center(in: background)
|
||||
return background
|
||||
}
|
||||
|
||||
private func setUpGestureRecognizers() {
|
||||
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTap))
|
||||
tapGestureRecognizer.numberOfTapsRequired = 1
|
||||
addGestureRecognizer(tapGestureRecognizer)
|
||||
makeViewDraggable()
|
||||
}
|
||||
|
||||
// MARK: Interaction
|
||||
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||
dismiss()
|
||||
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
|
||||
presentingVC.present(callVC, animated: true, completion: nil)
|
||||
}
|
||||
|
||||
public func show() {
|
||||
self.alpha = 0.0
|
||||
let window = CurrentAppContext().mainWindow!
|
||||
window.addSubview(self)
|
||||
left = self.autoPinEdge(toSuperviewEdge: .left)
|
||||
left?.isActive = false
|
||||
right = self.autoPinEdge(toSuperviewEdge: .right)
|
||||
top = self.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
|
||||
bottom = self.autoPinEdge(toSuperviewEdge: .bottom, withInset: bottomMargin)
|
||||
bottom?.isActive = false
|
||||
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
|
||||
self.alpha = 1.0
|
||||
}, completion: nil)
|
||||
}
|
||||
|
||||
public func dismiss() {
|
||||
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
|
||||
self.alpha = 0.0
|
||||
}, completion: { _ in
|
||||
self.callVC.call.removeRemoteVideoRenderer(self.remoteVideoView)
|
||||
self.callVC.setupStateChangeCallbacks()
|
||||
MiniCallView.current = nil
|
||||
self.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
|
||||
// MARK: RTCVideoViewDelegate
|
||||
func videoView(_ videoView: RTCVideoRenderer, didChangeVideoSize size: CGSize) {
|
||||
let newSize = CGSize(width: min(160.0, 160.0 * size.width / size.height), height: min(160.0, 160.0 * size.height / size.width))
|
||||
persistCurrentPosition(newSize: newSize)
|
||||
self.width?.constant = newSize.width
|
||||
self.height?.constant = newSize.height
|
||||
}
|
||||
|
||||
func persistCurrentPosition(newSize: CGSize) {
|
||||
let currentCenter = self.center
|
||||
|
||||
if currentCenter.x < self.superview!.width() / 2 {
|
||||
left?.isActive = true
|
||||
right?.isActive = false
|
||||
} else {
|
||||
left?.isActive = false
|
||||
right?.isActive = true
|
||||
}
|
||||
|
||||
let willTouchTop = currentCenter.y < newSize.height / 2 + topMargin
|
||||
let willTouchBottom = currentCenter.y + newSize.height / 2 >= self.superview!.height()
|
||||
if willTouchBottom {
|
||||
top?.isActive = false
|
||||
bottom?.isActive = true
|
||||
} else {
|
||||
let constant = willTouchTop ? topMargin : currentCenter.y - newSize.height / 2
|
||||
top?.constant = constant
|
||||
top?.isActive = true
|
||||
bottom?.isActive = false
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
// Copyright © 2021 Rangeproof Pty Ltd. All rights reserved.
|
||||
|
||||
import UIKit
|
||||
import CoreMedia
|
||||
|
||||
class RenderView: UIView {
|
||||
|
||||
private lazy var displayLayer: AVSampleBufferDisplayLayer = {
|
||||
let result = AVSampleBufferDisplayLayer()
|
||||
result.videoGravity = .resizeAspectFill
|
||||
return result
|
||||
}()
|
||||
|
||||
init() {
|
||||
super.init(frame: CGRect.zero)
|
||||
self.layer.addSublayer(displayLayer)
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
preconditionFailure("Use init(message:) instead.")
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(coder:) instead.")
|
||||
}
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
displayLayer.frame = self.bounds
|
||||
}
|
||||
|
||||
public func enqueue(sampleBuffer: CMSampleBuffer) {
|
||||
displayLayer.enqueue(sampleBuffer)
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,119 @@
|
||||
import UIKit
|
||||
import SessionMessagingKit
|
||||
|
||||
final class CallMessageCell : MessageCell {
|
||||
private lazy var iconImageViewWidthConstraint = iconImageView.set(.width, to: 0)
|
||||
private lazy var iconImageViewHeightConstraint = iconImageView.set(.height, to: 0)
|
||||
|
||||
private lazy var infoImageViewWidthConstraint = infoImageView.set(.width, to: 0)
|
||||
private lazy var infoImageViewHeightConstraint = infoImageView.set(.height, to: 0)
|
||||
|
||||
// MARK: UI Components
|
||||
private lazy var iconImageView = UIImageView()
|
||||
|
||||
private lazy var infoImageView = UIImageView(image: UIImage(named: "ic_info")?.withTint(Colors.text))
|
||||
|
||||
private lazy var timestampLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.font = .boldSystemFont(ofSize: Values.verySmallFontSize)
|
||||
result.textColor = Colors.text
|
||||
result.textAlignment = .center
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var label: UILabel = {
|
||||
let result = UILabel()
|
||||
result.numberOfLines = 0
|
||||
result.lineBreakMode = .byWordWrapping
|
||||
result.font = .boldSystemFont(ofSize: Values.smallFontSize)
|
||||
result.textColor = Colors.text
|
||||
result.textAlignment = .center
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var container: UIView = {
|
||||
let result = UIView()
|
||||
result.set(.height, to: 50)
|
||||
result.layer.cornerRadius = 18
|
||||
result.backgroundColor = Colors.callMessageBackground
|
||||
result.addSubview(label)
|
||||
label.autoCenterInSuperview()
|
||||
result.addSubview(iconImageView)
|
||||
iconImageView.autoVCenterInSuperview()
|
||||
iconImageView.pin(.left, to: .left, of: result, withInset: CallMessageCell.inset)
|
||||
result.addSubview(infoImageView)
|
||||
infoImageView.autoVCenterInSuperview()
|
||||
infoImageView.pin(.right, to: .right, of: result, withInset: -CallMessageCell.inset)
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var stackView: UIStackView = {
|
||||
let result = UIStackView(arrangedSubviews: [ timestampLabel, container ])
|
||||
result.axis = .vertical
|
||||
result.alignment = .center
|
||||
result.spacing = Values.smallSpacing
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Settings
|
||||
private static let iconSize: CGFloat = 16
|
||||
private static let inset = Values.mediumSpacing
|
||||
private static let margin = UIScreen.main.bounds.width * 0.1
|
||||
|
||||
override class var identifier: String { "CallMessageCell" }
|
||||
|
||||
// MARK: Lifecycle
|
||||
override func setUpViewHierarchy() {
|
||||
super.setUpViewHierarchy()
|
||||
iconImageViewWidthConstraint.isActive = true
|
||||
iconImageViewHeightConstraint.isActive = true
|
||||
addSubview(stackView)
|
||||
container.autoPinWidthToSuperview()
|
||||
stackView.pin(.left, to: .left, of: self, withInset: CallMessageCell.margin)
|
||||
stackView.pin(.top, to: .top, of: self, withInset: CallMessageCell.inset)
|
||||
stackView.pin(.right, to: .right, of: self, withInset: -CallMessageCell.margin)
|
||||
stackView.pin(.bottom, to: .bottom, of: self, withInset: -CallMessageCell.inset)
|
||||
}
|
||||
|
||||
override func setUpGestureRecognizers() {
|
||||
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTap))
|
||||
tapGestureRecognizer.numberOfTapsRequired = 1
|
||||
addGestureRecognizer(tapGestureRecognizer)
|
||||
}
|
||||
|
||||
// MARK: Updating
|
||||
override func update() {
|
||||
guard let message = viewItem?.interaction as? TSInfoMessage, message.messageType == .call else { return }
|
||||
let icon: UIImage?
|
||||
switch message.callState {
|
||||
case .outgoing: icon = UIImage(named: "CallOutgoing")?.withTint(Colors.text)
|
||||
case .incoming: icon = UIImage(named: "CallIncoming")?.withTint(Colors.text)
|
||||
case .missed, .permissionDenied: icon = UIImage(named: "CallMissed")?.withTint(Colors.destructive)
|
||||
default: icon = nil
|
||||
}
|
||||
iconImageView.image = icon
|
||||
iconImageViewWidthConstraint.constant = (icon != nil) ? CallMessageCell.iconSize : 0
|
||||
iconImageViewHeightConstraint.constant = (icon != nil) ? CallMessageCell.iconSize : 0
|
||||
|
||||
let shouldShowInfoIcon = message.callState == .permissionDenied && !SSKPreferences.areCallsEnabled
|
||||
infoImageViewWidthConstraint.constant = shouldShowInfoIcon ? CallMessageCell.iconSize : 0
|
||||
infoImageViewHeightConstraint.constant = shouldShowInfoIcon ? CallMessageCell.iconSize : 0
|
||||
|
||||
Storage.read { transaction in
|
||||
self.label.text = message.previewText(with: transaction)
|
||||
}
|
||||
|
||||
let date = message.dateForUI()
|
||||
let description = DateUtil.formatDate(forDisplay: date)
|
||||
timestampLabel.text = description
|
||||
}
|
||||
|
||||
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||
guard let viewItem = viewItem, let message = viewItem.interaction as? TSInfoMessage, message.messageType == .call else { return }
|
||||
let shouldBeTappable = message.callState == .permissionDenied && !SSKPreferences.areCallsEnabled
|
||||
if shouldBeTappable {
|
||||
delegate?.handleViewItemTapped(viewItem, gestureRecognizer: gestureRecognizer)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
|
||||
final class CallMessageView : UIView {
|
||||
private let viewItem: ConversationViewItem
|
||||
private let textColor: UIColor
|
||||
|
||||
// MARK: Settings
|
||||
private static let iconSize: CGFloat = 24
|
||||
private static let iconImageViewSize: CGFloat = 40
|
||||
|
||||
// MARK: Lifecycle
|
||||
init(viewItem: ConversationViewItem, textColor: UIColor) {
|
||||
self.viewItem = viewItem
|
||||
self.textColor = textColor
|
||||
super.init(frame: CGRect.zero)
|
||||
setUpViewHierarchy()
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
preconditionFailure("Use init(viewItem:textColor:) instead.")
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(viewItem:textColor:) instead.")
|
||||
}
|
||||
|
||||
private func setUpViewHierarchy() {
|
||||
guard let message = viewItem.interaction as? TSMessage else { preconditionFailure() }
|
||||
// Image view
|
||||
let iconSize = CallMessageView.iconSize
|
||||
let icon = UIImage(named: "Phone")?.withTint(textColor)?.resizedImage(to: CGSize(width: iconSize, height: iconSize))
|
||||
let imageView = UIImageView(image: icon)
|
||||
imageView.contentMode = .center
|
||||
let iconImageViewSize = CallMessageView.iconImageViewSize
|
||||
imageView.set(.width, to: iconImageViewSize)
|
||||
imageView.set(.height, to: iconImageViewSize)
|
||||
// Body label
|
||||
let titleLabel = UILabel()
|
||||
titleLabel.lineBreakMode = .byTruncatingTail
|
||||
titleLabel.text = message.body
|
||||
titleLabel.textColor = textColor
|
||||
titleLabel.font = .systemFont(ofSize: Values.mediumFontSize)
|
||||
// Stack view
|
||||
let stackView = UIStackView(arrangedSubviews: [ imageView, titleLabel ])
|
||||
stackView.axis = .horizontal
|
||||
stackView.alignment = .center
|
||||
stackView.isLayoutMarginsRelativeArrangement = true
|
||||
stackView.layoutMargins = UIEdgeInsets(top: 0, leading: 0, bottom: 0, trailing: 12)
|
||||
addSubview(stackView)
|
||||
stackView.pin(to: self, withInset: Values.smallSpacing)
|
||||
}
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
|
||||
@objc
|
||||
final class CallModal : Modal {
|
||||
private let onCallEnabled: () -> Void
|
||||
|
||||
// MARK: Lifecycle
|
||||
@objc
|
||||
init(onCallEnabled: @escaping () -> Void) {
|
||||
self.onCallEnabled = onCallEnabled
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
self.modalPresentationStyle = .overFullScreen
|
||||
self.modalTransitionStyle = .crossDissolve
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(onCallEnabled:) instead.")
|
||||
}
|
||||
|
||||
override init(nibName: String?, bundle: Bundle?) {
|
||||
preconditionFailure("Use init(onCallEnabled:) instead.")
|
||||
}
|
||||
|
||||
override func populateContentView() {
|
||||
// Title
|
||||
let titleLabel = UILabel()
|
||||
titleLabel.textColor = Colors.text
|
||||
titleLabel.font = .boldSystemFont(ofSize: Values.largeFontSize)
|
||||
titleLabel.text = NSLocalizedString("modal_call_title", comment: "")
|
||||
titleLabel.textAlignment = .center
|
||||
// Message
|
||||
let messageLabel = UILabel()
|
||||
messageLabel.textColor = Colors.text
|
||||
messageLabel.font = .systemFont(ofSize: Values.smallFontSize)
|
||||
let message = NSLocalizedString("modal_call_explanation", comment: "")
|
||||
messageLabel.text = message
|
||||
messageLabel.numberOfLines = 0
|
||||
messageLabel.lineBreakMode = .byWordWrapping
|
||||
messageLabel.textAlignment = .center
|
||||
// Enable button
|
||||
let enableButton = UIButton()
|
||||
enableButton.set(.height, to: Values.mediumButtonHeight)
|
||||
enableButton.layer.cornerRadius = Modal.buttonCornerRadius
|
||||
enableButton.backgroundColor = Colors.buttonBackground
|
||||
enableButton.titleLabel!.font = .systemFont(ofSize: Values.smallFontSize)
|
||||
enableButton.setTitleColor(Colors.text, for: UIControl.State.normal)
|
||||
enableButton.setTitle(NSLocalizedString("modal_link_previews_button_title", comment: ""), for: UIControl.State.normal)
|
||||
enableButton.addTarget(self, action: #selector(enable), for: UIControl.Event.touchUpInside)
|
||||
// Button stack view
|
||||
let buttonStackView = UIStackView(arrangedSubviews: [ cancelButton, enableButton ])
|
||||
buttonStackView.axis = .horizontal
|
||||
buttonStackView.spacing = Values.mediumSpacing
|
||||
buttonStackView.distribution = .fillEqually
|
||||
// Main stack view
|
||||
let mainStackView = UIStackView(arrangedSubviews: [ titleLabel, messageLabel, buttonStackView ])
|
||||
mainStackView.axis = .vertical
|
||||
mainStackView.spacing = Values.largeSpacing
|
||||
contentView.addSubview(mainStackView)
|
||||
mainStackView.pin(.leading, to: .leading, of: contentView, withInset: Values.largeSpacing)
|
||||
mainStackView.pin(.top, to: .top, of: contentView, withInset: Values.largeSpacing)
|
||||
contentView.pin(.trailing, to: .trailing, of: mainStackView, withInset: Values.largeSpacing)
|
||||
contentView.pin(.bottom, to: .bottom, of: mainStackView, withInset: Values.largeSpacing)
|
||||
}
|
||||
|
||||
// MARK: Interaction
|
||||
@objc private func enable() {
|
||||
SSKPreferences.areCallsEnabled = true
|
||||
presentingViewController?.dismiss(animated: true, completion: nil)
|
||||
onCallEnabled()
|
||||
}
|
||||
}
|
@ -0,0 +1,79 @@
|
||||
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
|
||||
|
||||
@objc
|
||||
final class CallPermissionRequestModal : Modal {
|
||||
|
||||
// MARK: Lifecycle
|
||||
@objc
|
||||
init() {
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
self.modalPresentationStyle = .overFullScreen
|
||||
self.modalTransitionStyle = .crossDissolve
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure("Use init(onCallEnabled:) instead.")
|
||||
}
|
||||
|
||||
override init(nibName: String?, bundle: Bundle?) {
|
||||
preconditionFailure("Use init(onCallEnabled:) instead.")
|
||||
}
|
||||
|
||||
override func populateContentView() {
|
||||
// Title
|
||||
let titleLabel = UILabel()
|
||||
titleLabel.textColor = Colors.text
|
||||
titleLabel.font = .boldSystemFont(ofSize: Values.mediumFontSize)
|
||||
titleLabel.text = NSLocalizedString("modal_call_permission_request_title", comment: "")
|
||||
titleLabel.textAlignment = .center
|
||||
// Message
|
||||
let messageLabel = UILabel()
|
||||
messageLabel.textColor = Colors.text
|
||||
messageLabel.font = .systemFont(ofSize: Values.smallFontSize)
|
||||
let message = NSLocalizedString("modal_call_permission_request_explanation", comment: "")
|
||||
messageLabel.text = message
|
||||
messageLabel.numberOfLines = 0
|
||||
messageLabel.lineBreakMode = .byWordWrapping
|
||||
messageLabel.textAlignment = .center
|
||||
// Enable button
|
||||
let goToSettingsButton = UIButton()
|
||||
goToSettingsButton.set(.height, to: Values.mediumButtonHeight)
|
||||
goToSettingsButton.layer.cornerRadius = Modal.buttonCornerRadius
|
||||
goToSettingsButton.backgroundColor = Colors.buttonBackground
|
||||
goToSettingsButton.titleLabel!.font = .systemFont(ofSize: Values.smallFontSize)
|
||||
goToSettingsButton.setTitleColor(Colors.text, for: UIControl.State.normal)
|
||||
goToSettingsButton.setTitle(NSLocalizedString("vc_settings_title", comment: ""), for: UIControl.State.normal)
|
||||
goToSettingsButton.addTarget(self, action: #selector(goToSettings), for: UIControl.Event.touchUpInside)
|
||||
// Content stack view
|
||||
let contentStackView = UIStackView(arrangedSubviews: [ titleLabel, messageLabel ])
|
||||
contentStackView.axis = .vertical
|
||||
contentStackView.spacing = Values.largeSpacing
|
||||
// Button stack view
|
||||
let buttonStackView = UIStackView(arrangedSubviews: [ cancelButton, goToSettingsButton ])
|
||||
buttonStackView.axis = .horizontal
|
||||
buttonStackView.distribution = .fillEqually
|
||||
// Main stack view
|
||||
let spacing = Values.largeSpacing - Values.smallFontSize / 2
|
||||
let mainStackView = UIStackView(arrangedSubviews: [ contentStackView, buttonStackView ])
|
||||
mainStackView.axis = .vertical
|
||||
mainStackView.spacing = spacing
|
||||
contentView.addSubview(mainStackView)
|
||||
mainStackView.pin(.leading, to: .leading, of: contentView, withInset: Values.largeSpacing)
|
||||
mainStackView.pin(.top, to: .top, of: contentView, withInset: Values.largeSpacing)
|
||||
contentView.pin(.trailing, to: .trailing, of: mainStackView, withInset: Values.largeSpacing)
|
||||
contentView.pin(.bottom, to: .bottom, of: mainStackView, withInset: spacing)
|
||||
}
|
||||
|
||||
// MARK: Interaction
|
||||
@objc func goToSettings(_ sender: Any) {
|
||||
dismiss(animated: true, completion: {
|
||||
if let vc = CurrentAppContext().frontmostViewController() {
|
||||
let privacySettingsVC = PrivacySettingsTableViewController()
|
||||
privacySettingsVC.shouldShowCloseButton = true
|
||||
let nav = OWSNavigationController(rootViewController: privacySettingsVC)
|
||||
nav.modalPresentationStyle = .fullScreen
|
||||
vc.present(nav, animated: true, completion: nil)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Airpods.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "AnswerCall.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "audio_off_fill.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Bluetooth.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "CallIncoming.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "CallMissed.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "CallOutgoing.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "check.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Path.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Headsets.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "minimize.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Phone.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "speaker.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "switch_camera_fill.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Tips.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "video_call_fill.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue