center avatar in draggable video window and handle mute video events

pull/1969/head
Audric Ackermann 4 years ago
parent ecceaeaa8f
commit 678a5bcb3b
No known key found for this signature in database
GPG Key ID: 999F434D76324AD4

@ -13,6 +13,8 @@ import {
getSelectedConversationKey, getSelectedConversationKey,
} from '../../../state/selectors/conversations'; } from '../../../state/selectors/conversations';
import { openConversationWithMessages } from '../../../state/ducks/conversations'; import { openConversationWithMessages } from '../../../state/ducks/conversations';
import { Avatar, AvatarSize } from '../../Avatar';
import { getConversationController } from '../../../session/conversations';
export const DraggableCallWindow = styled.div` export const DraggableCallWindow = styled.div`
position: absolute; position: absolute;
@ -26,10 +28,11 @@ export const DraggableCallWindow = styled.div`
border: var(--session-border); border: var(--session-border);
`; `;
export const StyledVideoElement = styled.video` export const StyledVideoElement = styled.video<{ isRemoteVideoMuted: boolean }>`
padding: 0 1rem; padding: 0 1rem;
height: 100%; height: 100%;
width: 100%; width: 100%;
opacity: ${props => (props.isRemoteVideoMuted ? 0 : 1)};
`; `;
const StyledDraggableVideoElement = styled(StyledVideoElement)` const StyledDraggableVideoElement = styled(StyledVideoElement)`
@ -40,6 +43,20 @@ const DraggableCallWindowInner = styled.div`
cursor: pointer; cursor: pointer;
`; `;
const CenteredAvatarInDraggable = styled.div`
position: absolute;
width: 100%;
top: 0;
bottom: 0;
left: 0;
right: 50%;
min-height: 85px;
min-width: 85px;
display: flex;
justify-content: center;
align-items: center;
`;
// TODO: // TODO:
/** /**
* Add mute input, deafen, end call, possibly add person to call * Add mute input, deafen, end call, possibly add person to call
@ -54,6 +71,7 @@ export const DraggableCallContainer = () => {
const [positionY, setPositionY] = useState(window.innerHeight / 2); const [positionY, setPositionY] = useState(window.innerHeight / 2);
const [lastPositionX, setLastPositionX] = useState(0); const [lastPositionX, setLastPositionX] = useState(0);
const [lastPositionY, setLastPositionY] = useState(0); const [lastPositionY, setLastPositionY] = useState(0);
const [isRemoteVideoMuted, setIsRemoteVideoMuted] = useState(true);
const ongoingCallPubkey = ongoingCallProps?.id; const ongoingCallPubkey = ongoingCallProps?.id;
const videoRefRemote = useRef<any>(undefined); const videoRefRemote = useRef<any>(undefined);
@ -77,9 +95,16 @@ export const DraggableCallContainer = () => {
useEffect(() => { useEffect(() => {
if (ongoingCallPubkey !== selectedConversationKey) { if (ongoingCallPubkey !== selectedConversationKey) {
CallManager.setVideoEventsListener( CallManager.setVideoEventsListener(
(_localStream: MediaStream | null, remoteStream: MediaStream | null) => { (
_localStream: MediaStream | null,
remoteStream: MediaStream | null,
_camerasList: any,
_audioList: any,
remoteVideoIsMuted: boolean
) => {
if (mountedState() && videoRefRemote?.current) { if (mountedState() && videoRefRemote?.current) {
videoRefRemote.current.srcObject = remoteStream; videoRefRemote.current.srcObject = remoteStream;
setIsRemoteVideoMuted(remoteVideoIsMuted);
} }
} }
); );
@ -99,6 +124,13 @@ export const DraggableCallContainer = () => {
if (!hasOngoingCall || !ongoingCallProps || ongoingCallPubkey === selectedConversationKey) { if (!hasOngoingCall || !ongoingCallProps || ongoingCallPubkey === selectedConversationKey) {
return null; return null;
} }
const ongoingCallUsername = ongoingCallProps?.profileName || ongoingCallProps?.name;
const avatarPath = ongoingCallPubkey
? getConversationController()
.get(ongoingCallPubkey)
.getAvatarPath()
: undefined;
return ( return (
<Draggable <Draggable
@ -120,7 +152,21 @@ export const DraggableCallContainer = () => {
> >
<DraggableCallWindow className="dragHandle"> <DraggableCallWindow className="dragHandle">
<DraggableCallWindowInner> <DraggableCallWindowInner>
<StyledDraggableVideoElement ref={videoRefRemote} autoPlay={true} /> <StyledDraggableVideoElement
ref={videoRefRemote}
autoPlay={true}
isRemoteVideoMuted={isRemoteVideoMuted}
/>
{isRemoteVideoMuted && (
<CenteredAvatarInDraggable>
<Avatar
size={AvatarSize.XL}
avatarPath={avatarPath}
name={ongoingCallUsername}
pubkey={ongoingCallPubkey}
/>
</CenteredAvatarInDraggable>
)}
</DraggableCallWindowInner> </DraggableCallWindowInner>
</DraggableCallWindow> </DraggableCallWindow>
</Draggable> </Draggable>

@ -123,9 +123,8 @@ const AudioInputMenu = ({
); );
}; };
const CenteredAvatar = styled.div` const CenteredAvatarInConversation = styled.div`
position: absolute; position: absolute;
top: 0; top: 0;
bottom: 0; bottom: 0;
left: 0; left: 0;
@ -172,13 +171,12 @@ export const InConversationCallContainer = () => {
localStream: MediaStream | null, localStream: MediaStream | null,
remoteStream: MediaStream | null, remoteStream: MediaStream | null,
camerasList: Array<InputItem>, camerasList: Array<InputItem>,
audioInputList: Array<InputItem> audioInputList: Array<InputItem>,
isRemoteVideoStreamMuted: boolean
) => { ) => {
if (mountedState() && videoRefRemote?.current && videoRefLocal?.current) { if (mountedState() && videoRefRemote?.current && videoRefLocal?.current) {
videoRefLocal.current.srcObject = localStream; videoRefLocal.current.srcObject = localStream;
setIsRemoteVideoMuted( setIsRemoteVideoMuted(isRemoteVideoStreamMuted);
Boolean(remoteStream?.getTracks().find(t => t.kind === 'video')?.muted)
);
videoRefRemote.current.srcObject = remoteStream; videoRefRemote.current.srcObject = remoteStream;
setCurrentConnectedCameras(camerasList); setCurrentConnectedCameras(camerasList);
@ -262,20 +260,29 @@ export const InConversationCallContainer = () => {
<InConvoCallWindow> <InConvoCallWindow>
<RelativeCallWindow> <RelativeCallWindow>
<VideoContainer> <VideoContainer>
<StyledVideoElement ref={videoRefRemote} autoPlay={true} /> <StyledVideoElement
{isRemoteVideoMuted && ongoingCallPubkey && ( ref={videoRefRemote}
<CenteredAvatar> autoPlay={true}
isRemoteVideoMuted={isRemoteVideoMuted}
/>
{isRemoteVideoMuted && (
<CenteredAvatarInConversation>
<Avatar <Avatar
size={AvatarSize.XL} size={AvatarSize.XL}
avatarPath={avatarPath} avatarPath={avatarPath}
name={ongoingCallUsername} name={ongoingCallUsername}
pubkey={ongoingCallPubkey} pubkey={ongoingCallPubkey}
/> />
</CenteredAvatar> </CenteredAvatarInConversation>
)} )}
</VideoContainer> </VideoContainer>
<VideoContainer> <VideoContainer>
<StyledVideoElement ref={videoRefLocal} autoPlay={true} muted={true} /> <StyledVideoElement
ref={videoRefLocal}
autoPlay={true}
muted={true}
isRemoteVideoMuted={false}
/>
</VideoContainer> </VideoContainer>
<InConvoCallWindowControls> <InConvoCallWindowControls>

@ -17,6 +17,7 @@ import { CallMessage } from '../messages/outgoing/controlMessage/CallMessage';
import { ed25519Str } from '../onions/onionPath'; import { ed25519Str } from '../onions/onionPath';
import { getMessageQueue } from '../sending'; import { getMessageQueue } from '../sending';
import { PubKey } from '../types'; import { PubKey } from '../types';
export type InputItem = { deviceId: string; label: string }; export type InputItem = { deviceId: string; label: string };
// const VIDEO_WIDTH = 640; // const VIDEO_WIDTH = 640;
@ -27,14 +28,21 @@ type CallManagerListener =
localStream: MediaStream | null, localStream: MediaStream | null,
remoteStream: MediaStream | null, remoteStream: MediaStream | null,
camerasList: Array<InputItem>, camerasList: Array<InputItem>,
audioInputsList: Array<InputItem> audioInputsList: Array<InputItem>,
isRemoteVideoStreamMuted: boolean
) => void) ) => void)
| null; | null;
let videoEventsListener: CallManagerListener; let videoEventsListener: CallManagerListener;
function callVideoListener() { function callVideoListener() {
if (videoEventsListener) { if (videoEventsListener) {
videoEventsListener(mediaDevices, remoteStream, camerasList, audioInputsList); videoEventsListener(
mediaDevices,
remoteStream,
camerasList,
audioInputsList,
remoteVideoStreamIsMuted
);
} }
} }
@ -52,6 +60,8 @@ let peerConnection: RTCPeerConnection | null;
let dataChannel: RTCDataChannel | null; let dataChannel: RTCDataChannel | null;
let remoteStream: MediaStream | null; let remoteStream: MediaStream | null;
let mediaDevices: MediaStream | null; let mediaDevices: MediaStream | null;
let remoteVideoStreamIsMuted = true;
export const INPUT_DISABLED_DEVICE_ID = 'off'; export const INPUT_DISABLED_DEVICE_ID = 'off';
let makingOffer = false; let makingOffer = false;
@ -108,6 +118,15 @@ async function updateInputLists() {
})); }));
} }
function sendVideoStatusViaDataChannel() {
const videoEnabledLocally =
selectedCameraId !== undefined && selectedCameraId !== INPUT_DISABLED_DEVICE_ID;
const stringToSend = JSON.stringify({
video: videoEnabledLocally,
});
dataChannel?.send(stringToSend);
}
export async function selectCameraByDeviceId(cameraDeviceId: string) { export async function selectCameraByDeviceId(cameraDeviceId: string) {
if (cameraDeviceId === INPUT_DISABLED_DEVICE_ID) { if (cameraDeviceId === INPUT_DISABLED_DEVICE_ID) {
selectedCameraId = cameraDeviceId; selectedCameraId = cameraDeviceId;
@ -118,6 +137,7 @@ export async function selectCameraByDeviceId(cameraDeviceId: string) {
if (sender?.track) { if (sender?.track) {
sender.track.enabled = false; sender.track.enabled = false;
} }
sendVideoStatusViaDataChannel();
return; return;
} }
if (camerasList.some(m => m.deviceId === cameraDeviceId)) { if (camerasList.some(m => m.deviceId === cameraDeviceId)) {
@ -146,6 +166,7 @@ export async function selectCameraByDeviceId(cameraDeviceId: string) {
mediaDevices?.removeTrack(t); mediaDevices?.removeTrack(t);
}); });
mediaDevices?.addTrack(videoTrack); mediaDevices?.addTrack(videoTrack);
sendVideoStatusViaDataChannel();
} else { } else {
throw new Error('Failed to get sender for selectCameraByDeviceId '); throw new Error('Failed to get sender for selectCameraByDeviceId ');
} }
@ -205,6 +226,9 @@ async function handleNegotiationNeededEvent(_event: Event, recipient: string) {
offerToReceiveAudio: true, offerToReceiveAudio: true,
offerToReceiveVideo: true, offerToReceiveVideo: true,
}); });
if (!offer) {
throw new Error('Could not create an offer');
}
await peerConnection?.setLocalDescription(offer); await peerConnection?.setLocalDescription(offer);
if (offer && offer.sdp) { if (offer && offer.sdp) {
@ -394,10 +418,28 @@ function closeVideoCall() {
mediaDevices = null; mediaDevices = null;
remoteStream = null; remoteStream = null;
if (videoEventsListener) { if (videoEventsListener) {
videoEventsListener(null, null, [], []); videoEventsListener(null, null, [], [], true);
} }
} }
function onDataChannelReceivedMessage(ev: MessageEvent<string>) {
try {
const parsed = JSON.parse(ev.data);
if (parsed.video !== undefined) {
remoteVideoStreamIsMuted = !Boolean(parsed.video);
}
callVideoListener();
} catch (e) {
window.log.warn('onDataChannelReceivedMessage Could not parse data in event', ev);
}
}
function onDataChannelOnOpen() {
window.log.info('onDataChannelOnOpen: sending video status');
sendVideoStatusViaDataChannel();
}
function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolean) { function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolean) {
if (peerConnection) { if (peerConnection) {
return peerConnection; return peerConnection;
@ -412,41 +454,21 @@ function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolea
peerConnection.ondatachannel = e => { peerConnection.ondatachannel = e => {
if (!createDataChannel) { if (!createDataChannel) {
dataChannel = e.channel; dataChannel = e.channel;
console.warn('ondatachannel'); window.log.info('Got our datachannel setup');
setInterval(() => { onDataChannelOnOpen();
console.warn('ondatachannel: sending yoooooo');
dataChannel?.send('yooooooooooooooo: ' + Date.now()); dataChannel.onmessage = onDataChannelReceivedMessage;
}, 1000);
dataChannel.onmessage = e => {
console.warn('ondatachannel: datachannel on message', e);
};
} }
}; };
if (createDataChannel) { if (createDataChannel) {
console.warn('createOrGetPeerConnection: createDataChannel'); // console.warn('createOrGetPeerConnection: createDataChannel');
dataChannel = peerConnection.createDataChannel('session-datachannel'); dataChannel = peerConnection.createDataChannel('session-datachannel');
dataChannel.onmessage = e => { dataChannel.onmessage = onDataChannelReceivedMessage;
console.warn('createDataChannel: datachannel on message', e); dataChannel.onopen = onDataChannelOnOpen;
};
dataChannel.onopen = () => {
window.log.info('onopen of datachannel');
const videoEnabledLocally =
selectedCameraId !== undefined && selectedCameraId !== INPUT_DISABLED_DEVICE_ID;
dataChannel?.send(
JSON.stringify({
video: videoEnabledLocally,
})
);
};
dataChannel.onclose = () => {
window.log.info('onclose of datachannel');
};
} }
peerConnection.onsignalingstatechange = handleSignalingStateChangeEvent; peerConnection.onsignalingstatechange = handleSignalingStateChangeEvent;
@ -563,7 +585,7 @@ export function handleCallTypeEndCall(sender: string) {
window.log.info('handling callMessage END_CALL'); window.log.info('handling callMessage END_CALL');
if (videoEventsListener) { if (videoEventsListener) {
videoEventsListener(null, null, [], []); videoEventsListener(null, null, [], [], true);
} }
closeVideoCall(); closeVideoCall();
// //

Loading…
Cancel
Save