Compiling, but video sending not working.

pull/1/head
Michael Kirk 7 years ago
parent 064035f3f4
commit 0cd1cb80cc

@ -883,13 +883,12 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver,
}
@objc func didPressFlipCamera(sender: UIButton) {
// toggle value
sender.isSelected = !sender.isSelected
let useBackCamera = sender.isSelected
Logger.info("\(TAG) in \(#function) with useBackCamera: \(useBackCamera)")
let isUsingFrontCamera = !sender.isSelected
Logger.info("\(TAG) in \(#function) with isUsingFrontCamera: \(isUsingFrontCamera)")
callUIAdapter.setCameraSource(call: call, useBackCamera: useBackCamera)
callUIAdapter.setCameraSource(call: call, isUsingFrontCamera: isUsingFrontCamera)
}
/**

@ -1278,20 +1278,14 @@ private class SignalCallData: NSObject {
self.setHasLocalVideo(hasLocalVideo: true)
}
func setCameraSource(call: SignalCall, useBackCamera: Bool) {
func setCameraSource(call: SignalCall, isUsingFrontCamera: Bool) {
SwiftAssertIsOnMainThread(#function)
guard call == self.call else {
owsFail("\(logTag) in \(#function) for non-current call.")
return
}
guard let peerConnectionClient = self.peerConnectionClient else {
owsFail("\(logTag) in \(#function) peerConnectionClient was unexpectedly nil")
return
}
peerConnectionClient.setCameraSource(useBackCamera: useBackCamera)
peerConnectionClient.setCameraSource(isUsingFrontCamera: isUsingFrontCamera)
}
/**

@ -192,7 +192,7 @@ class PeerConnectionProxy: NSObject, RTCPeerConnectionDelegate, RTCDataChannelDe
* It is primarily a wrapper around `RTCPeerConnection`, which is responsible for sending and receiving our call data
* including audio, video, and some post-connected signaling (hangup, add video)
*/
class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelDelegate {
class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelDelegate, VideoCaptureSettingsDelegate {
enum Identifiers: String {
case mediaStream = "ARDAMS",
@ -232,6 +232,7 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
// Video
private var videoCaptureController: VideoCaptureController?
private var videoCaptureSession: AVCaptureSession?
private var videoSender: RTCRtpSender?
private var localVideoTrack: RTCVideoTrack?
@ -307,15 +308,21 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
let configuration = RTCDataChannelConfiguration()
// Insist upon an "ordered" TCP data channel for delivery reliability.
configuration.isOrdered = true
let dataChannel = peerConnection.dataChannel(forLabel: Identifiers.dataChannelSignaling.rawValue,
configuration: configuration)
guard let dataChannel = peerConnection.dataChannel(forLabel: Identifiers.dataChannelSignaling.rawValue,
configuration: configuration) else {
// TODO fail outgoing call?
owsFail("dataChannel was unexpectedly nil")
return
}
dataChannel.delegate = proxy
assert(self.dataChannel == nil)
self.dataChannel = dataChannel
}
// MARK: Video
// MARK: - Video
fileprivate func createVideoSender() {
SwiftAssertIsOnMainThread(#function)
@ -331,20 +338,18 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
return
}
// TODO: We could cap the maximum video size.
let cameraConstraints = RTCMediaConstraints(mandatoryConstraints: nil,
optionalConstraints: nil)
let videoSource = factory.videoSource()
// TODO: Revisit the cameraConstraints.
let videoSource = factory.avFoundationVideoSource(with: cameraConstraints)
// TODO - MJK I don't think anyone cares about videoSource, just the capturer. Remove it?
self.localVideoSource = videoSource
self.videoCaptureSession = videoSource.captureSession
videoSource.useBackCamera = false
let capturer = RTCCameraVideoCapturer(delegate: videoSource)
self.videoCaptureSession = capturer.captureSession
let localVideoTrack = factory.videoTrack(with: videoSource, trackId: Identifiers.videoTrack.rawValue)
self.localVideoTrack = localVideoTrack
self.videoCaptureController = VideoCaptureController(capturer: capturer, settingsDelegate: self)
// Disable by default until call is connected.
// FIXME - do we require mic permissions at this point?
// if so maybe it would be better to not even add the track until the call is connected
@ -356,24 +361,20 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
self.videoSender = videoSender
}
public func setCameraSource(useBackCamera: Bool) {
public func setCameraSource(isUsingFrontCamera: Bool) {
SwiftAssertIsOnMainThread(#function)
let proxyCopy = self.proxy
PeerConnectionClient.signalingQueue.async {
guard let strongSelf = proxyCopy.get() else { return }
guard let localVideoSource = strongSelf.localVideoSource else {
Logger.debug("\(strongSelf.logTag) \(#function) Ignoring obsolete event in terminated client")
return
}
// certain devices, e.g. 16GB iPod touch don't have a back camera
guard localVideoSource.canUseBackCamera else {
owsFail("\(strongSelf.logTag) in \(#function) canUseBackCamera was unexpectedly false")
guard let captureController = strongSelf.videoCaptureController else {
owsFail("\(self.logTag) in \(#function) captureController was unexpectedly nil")
return
}
localVideoSource.useBackCamera = useBackCamera
captureController.switchCamera(isUsingFrontCamera: isUsingFrontCamera)
captureController.startCapture()
}
}
@ -416,7 +417,18 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
}
}
// MARK: Audio
// MARK: VideoCaptureSettingsDelegate
// MJK: fixme
var videoWidth: Int32 {
return 400
}
var videoHeight: Int32 {
return 400
}
// MARK: - Audio
fileprivate func createAudioSender() {
SwiftAssertIsOnMainThread(#function)
@ -1092,6 +1104,90 @@ class HardenedRTCSessionDescription {
}
}
protocol VideoCaptureSettingsDelegate: class {
var videoWidth: Int32 { get }
var videoHeight: Int32 { get }
}
class VideoCaptureController {
let capturer: RTCCameraVideoCapturer
weak var settingsDelegate: VideoCaptureSettingsDelegate?
var isUsingFrontCamera: Bool = true
public init(capturer: RTCCameraVideoCapturer, settingsDelegate: VideoCaptureSettingsDelegate) {
self.capturer = capturer
self.settingsDelegate = settingsDelegate
}
public func startCapture() {
let position: AVCaptureDevice.Position = isUsingFrontCamera ? .front : .back
guard let device: AVCaptureDevice = self.device(position: position) else {
owsFail("unable to find captureDevice")
return
}
guard let format: AVCaptureDevice.Format = self.format(device: device) else {
owsFail("unable to find captureDevice")
return
}
let fps = self.framesPerSecond(format: format)
capturer.startCapture(with: device, format: format, fps: fps)
}
public func stopCapture() {
self.capturer.stopCapture()
}
public func switchCamera(isUsingFrontCamera: Bool) {
self.isUsingFrontCamera = isUsingFrontCamera
self.startCapture()
}
private func device(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let captureDevices = RTCCameraVideoCapturer.captureDevices()
guard let device = (captureDevices.first { $0.position == position }) else {
Logger.debug("unable to find desired position: \(position)")
return captureDevices.first
}
return device
}
private func format(device: AVCaptureDevice) -> AVCaptureDevice.Format? {
let formats = RTCCameraVideoCapturer.supportedFormats(for: device)
let targetWidth = settingsDelegate?.videoWidth ?? 0
let targetHeight = settingsDelegate?.videoHeight ?? 0
var selectedFormat: AVCaptureDevice.Format?
var currentDiff: Int32 = Int32.max
for format in formats {
let dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
let diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height)
if diff < currentDiff {
selectedFormat = format
currentDiff = diff
}
}
assert(selectedFormat != nil)
return selectedFormat
}
private func framesPerSecond(format: AVCaptureDevice.Format) -> Int {
var maxFrameRate: Float64 = 0
for range in format.videoSupportedFrameRateRanges {
maxFrameRate = max(maxFrameRate, range.maxFrameRate)
}
return Int(maxFrameRate)
}
}
// Mark: Pretty Print Objc enums.
fileprivate extension RTCSignalingState {

@ -250,10 +250,10 @@ extension CallUIAdaptee {
call.audioSource = audioSource
}
internal func setCameraSource(call: SignalCall, useBackCamera: Bool) {
internal func setCameraSource(call: SignalCall, isUsingFrontCamera: Bool) {
SwiftAssertIsOnMainThread(#function)
callService.setCameraSource(call: call, useBackCamera: useBackCamera)
callService.setCameraSource(call: call, isUsingFrontCamera: isUsingFrontCamera)
}
// CallKit handles ringing state on it's own. But for non-call kit we trigger ringing start/stop manually.

Loading…
Cancel
Save