Files
desktop/app/providers/CallProvider/CallProvider.tsx

585 lines
24 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import { Call } from "@/app/components/Call/Call";
import { useConsoleLogger } from "@/app/hooks/useConsoleLogger";
import { createContext, useEffect, useRef, useState } from "react";
import nacl from 'tweetnacl';
import { useSender } from "../ProtocolProvider/useSender";
import { PacketSignalPeer, SignalType } from "../ProtocolProvider/protocol/packets/packet.signal.peer";
import { usePacket } from "../ProtocolProvider/usePacket";
import { usePublicKey } from "../AccountProvider/usePublicKey";
import { PacketWebRTC, WebRTCSignalType } from "../ProtocolProvider/protocol/packets/packet.webrtc";
import { PacketIceServers } from "../ProtocolProvider/protocol/packets/packet.ice.servers";
import { modals } from "@mantine/modals";
import { Button, Flex, Text } from "@mantine/core";
import { useSound } from "@/app/hooks/useSound";
import useWindow from "@/app/hooks/useWindow";
import { attachReceiverE2EE, attachSenderE2EE } from "./audioE2EE";
import { useDeattachedSender } from "../DialogProvider/useDeattachedSender";
import { AttachmentType } from "../ProtocolProvider/protocol/packets/packet.message";
import { generateRandomKey } from "@/app/utils/utils";
export interface CallContextValue {
call: (callable: string) => void;
close: () => void;
activeCall: string;
callState: CallState;
muted: boolean;
sound: boolean;
setMuted: (muted: boolean) => void;
setSound: (sound: boolean) => void;
duration: number;
setShowCallView: (show: boolean) => void;
getKeyCast: () => string;
accept: () => void;
}
export enum CallState {
CONNECTING,
KEY_EXCHANGE,
/**
* Финальная стадия сигналинга, на которой обе стороны обменялись ключами и теперь устанавливают защищенный канал связи для звонка,
* через WebRTC, и готовятся к активному звонку.
*/
WEB_RTC_EXCHANGE,
ACTIVE,
ENDED,
INCOMING
}
export enum CallRole {
/**
* Вызывающая сторона, которая инициирует звонок
*/
CALLER,
/**
* Принимающая сторона, которая отвечает на звонок и принимает его
*/
CALLEE
}
export const CallContext = createContext<CallContextValue | null>(null);
export interface CallProviderProps {
children: React.ReactNode;
}
export function CallProvider(props : CallProviderProps) {
const [activeCall, setActiveCall] = useState<string>("");
const [callState, setCallState] = useState<CallState>(CallState.ENDED);
const [muted, setMutedState] = useState<boolean>(false);
const [sound, setSoundState] = useState<boolean>(true);
const durationIntervalRef = useRef<NodeJS.Timeout | null>(null);
const [duration, setDuration] = useState<number>(0);
const [showCallView, setShowCallView] = useState<boolean>(callState == CallState.INCOMING);
const {info} = useConsoleLogger("CallProvider");
const [sessionKeys, setSessionKeys] = useState<nacl.BoxKeyPair | null>(null);
const send = useSender();
const publicKey = usePublicKey();
const peerConnectionRef = useRef<RTCPeerConnection | null>(null);
const roomIdRef = useRef<string>("");
const roleRef = useRef<CallRole | null>(null);
const sharedSecretRef = useRef<string>("");
const iceServersRef = useRef<RTCIceServer[]>([]);
const remoteAudioRef = useRef<HTMLAudioElement | null>(null);
const iceCandidatesBufferRef = useRef<RTCIceCandidate[]>([]);
const mutedRef = useRef<boolean>(false);
const soundRef = useRef<boolean>(true);
const {sendMessage} = useDeattachedSender();
const {playSound, stopSound, stopLoopSound} = useSound();
const {setWindowPriority} = useWindow();
useEffect(() => {
if(callState == CallState.ACTIVE){
stopLoopSound();
stopSound();
playSound("connected.mp3");
setWindowPriority(false);
durationIntervalRef.current = setInterval(() => {
setDuration(prev => prev + 1);
}, 1000);
}
}, [callState]);
useEffect(() => {
/**
* Нам нужно получить ICE серверы для установки соединения из разных сетей
* Получаем их от сервера
*/
let packet = new PacketIceServers();
send(packet);
return () => {
stopSound();
if (remoteAudioRef.current) {
remoteAudioRef.current.pause();
remoteAudioRef.current.srcObject = null;
}
peerConnectionRef.current?.close();
peerConnectionRef.current = null;
};
}, []);
usePacket(28, async (packet: PacketIceServers) => {
let iceServers = packet.getIceServers();
/**
* ICE серверы получены, теперь нужно привести их к форматку клиента и добавить udp и tcp варианты
*/
let formattedIceServers: RTCIceServer[] = [];
for(let i = 0; i < iceServers.length; i++){
let server = iceServers[i];
formattedIceServers.push({
urls: "turn:" + server.url + "?transport=" + server.transport,
username: server.username,
credential: server.credential
});
}
iceServersRef.current = formattedIceServers;
info("Received ICE servers from server, count: " + formattedIceServers.length);
}, []);
usePacket(27, async (packet: PacketWebRTC) => {
if(callState != CallState.WEB_RTC_EXCHANGE && callState != CallState.ACTIVE){
/**
* Нет активного звонка или мы не на стадии обмена WebRTC сигналами, игнорируем
*/
return;
}
const signalType = packet.getSignalType();
if(signalType == WebRTCSignalType.ANSWER){
/**
* Другая сторона (сервер SFU) отправил нам SDP ответ на наш оффер
*/
const sdp = JSON.parse(packet.getSdpOrCandidate());
await peerConnectionRef.current?.setRemoteDescription(new RTCSessionDescription(sdp));
if(iceCandidatesBufferRef.current.length > 0){
/**
* У нас есть буферизированные ICE кандидаты, которые мы получили до установки удаленного описания, теперь мы можем их добавить в PeerConnection
*/
for(let i = 0; i < iceCandidatesBufferRef.current.length; i++){
await peerConnectionRef.current?.addIceCandidate(iceCandidatesBufferRef.current[i]);
}
iceCandidatesBufferRef.current = [];
}
info("Received WebRTC answer and set remote description");
return;
}
if(signalType == WebRTCSignalType.ICE_CANDIDATE){
/**
* Другая сторона отправила нам ICE кандидата для установления WebRTC соединения
*/
const candidate = JSON.parse(packet.getSdpOrCandidate());
console.info(candidate);
if(peerConnectionRef.current?.remoteDescription == null){
/**
* Удаленное описание еще не установлено, буферизуем кандидата, чтобы добавить его после установки удаленного описания
*/
iceCandidatesBufferRef.current.push(new RTCIceCandidate(candidate));
info("Received WebRTC ICE candidate but remote description is not set yet, buffering candidate");
return;
}
await peerConnectionRef.current?.addIceCandidate(new RTCIceCandidate(candidate));
info("Received WebRTC ICE candidate and added to peer connection");
return;
}
if(signalType == WebRTCSignalType.OFFER && peerConnectionRef.current){
/**
* SFU сервер отправил нам оффер, например при renegotiation, нам нужно его принять и
* отправить ответ (ANSWER)
*/
const sdp = JSON.parse(packet.getSdpOrCandidate());
await peerConnectionRef.current?.setRemoteDescription(new RTCSessionDescription(sdp));
let answer = await peerConnectionRef.current?.createAnswer();
await peerConnectionRef.current?.setLocalDescription(answer);
let answerSignal = new PacketWebRTC();
answerSignal.setSignalType(WebRTCSignalType.ANSWER);
answerSignal.setSdpOrCandidate(JSON.stringify(answer));
send(answerSignal);
info("Received WebRTC offer, set remote description and sent answer");
return;
}
}, [activeCall, sessionKeys, callState, roomIdRef]);
usePacket(26, async (packet: PacketSignalPeer) => {
const signalType = packet.getSignalType();
if(signalType == SignalType.END_CALL_BECAUSE_BUSY) {
openCallsModal("Line is busy, the user is currently on another call. Please try again later.");
end();
}
if(signalType == SignalType.END_CALL_BECAUSE_PEER_DISCONNECTED) {
openCallsModal("The connection with the user was lost. The call has ended.")
end();
}
if(activeCall){
/**
* У нас уже есть активный звонок, игнорируем все сигналы, кроме сигналов от текущего звонка
*/
if(packet.getSrc() != activeCall && packet.getSrc() != publicKey){
console.info("Received signal from " + packet.getSrc() + " but active call is with " + activeCall + ", ignoring");
info("Received signal for another call, ignoring");
return;
}
}
if(signalType == SignalType.END_CALL){
/**
* Сбросили звонок
*/
end();
return;
}
if(signalType == SignalType.CALL){
/**
* Нам поступает звонок
*/
setWindowPriority(true);
playSound("ringtone.mp3", true);
setActiveCall(packet.getSrc());
setCallState(CallState.INCOMING);
setShowCallView(true);
}
if(signalType == SignalType.KEY_EXCHANGE && roleRef.current == CallRole.CALLER){
console.info("EXCHANGE SIGNAL RECEIVED, CALLER ROLE");
/**
* Другая сторона сгенерировала ключи для сессии и отправила нам публичную часть,
* теперь мы можем создать общую секретную сессию для шифрования звонка
*/
const sharedPublic = packet.getSharedPublic();
if(!sharedPublic){
info("Received key exchange signal without shared public key");
return;
}
const sessionKeys = generateSessionKeys();
const computedSharedSecret = nacl.box.before(Buffer.from(sharedPublic, 'hex'), sessionKeys.secretKey);
sharedSecretRef.current = Buffer.from(computedSharedSecret).toString('hex');
info("Generated shared secret for call session: " + sharedSecretRef.current);
/**
* Нам нужно отправить свой публичный ключ другой стороне, чтобы она тоже могла создать общую секретную сессию
*/
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(packet.getSrc());
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(sessionKeys.publicKey).toString('hex'));
send(signalPacket);
setCallState(CallState.WEB_RTC_EXCHANGE);
/**
* Создаем комнату на сервере SFU, комнату создает звонящий
*/
let webRtcSignal = new PacketSignalPeer();
webRtcSignal.setSignalType(SignalType.CREATE_ROOM);
webRtcSignal.setSrc(publicKey);
webRtcSignal.setDst(packet.getSrc());
send(webRtcSignal);
}
if(signalType == SignalType.KEY_EXCHANGE && roleRef.current == CallRole.CALLEE){
console.info("EXCHANGE SIGNAL RECEIVED, CALLEE ROLE");
/**
* Мы отправили свою публичную часть ключа другой стороне,
* теперь мы получили ее публичную часть и можем создать общую
* секретную сессию для шифрования звонка
*/
const sharedPublic = packet.getSharedPublic();
if(!sharedPublic){
info("Received key exchange signal without shared public key");
return;
}
if(!sessionKeys){
info("Received key exchange signal but session keys are not generated");
return;
}
const computedSharedSecret = nacl.box.before(Buffer.from(sharedPublic, 'hex'), sessionKeys.secretKey);
sharedSecretRef.current = Buffer.from(computedSharedSecret).toString('hex');
info("Generated shared secret for call session: " + sharedSecretRef.current);
setCallState(CallState.WEB_RTC_EXCHANGE);
}
if(signalType == SignalType.CREATE_ROOM) {
/**
* Создана комната для обмена WebRTC потоками
*/
roomIdRef.current = packet.getRoomId();
info("WebRTC room created with id: " + packet.getRoomId());
/**
* Нужно отправить свой SDP оффер другой стороне, чтобы установить WebRTC соединение
*/
peerConnectionRef.current = new RTCPeerConnection({
iceServers: iceServersRef.current,
// @ts-ignore
encodedInsertableStreams: true
});
/**
* Подписываемся на ICE кандидат
*/
peerConnectionRef.current.onicecandidate = (event) => {
if(event.candidate){
let candidateSignal = new PacketWebRTC();
candidateSignal.setSignalType(WebRTCSignalType.ICE_CANDIDATE);
candidateSignal.setSdpOrCandidate(JSON.stringify(event.candidate));
send(candidateSignal);
}
}
/**
* Соединение установлено, можно начинать звонок, переходим в активное состояние звонка
*/
peerConnectionRef.current.onconnectionstatechange = () => {
console.info("Peer connection state changed: " + peerConnectionRef.current?.connectionState);
if(peerConnectionRef.current?.connectionState == "connected"){
setCallState(CallState.ACTIVE);
info("WebRTC connection established, call is active");
}
}
peerConnectionRef.current.ontrack = async (event) => {
try {
await attachReceiverE2EE(event.receiver, Buffer.from(sharedSecretRef.current, "hex"));
} catch (e) {
console.error("attachReceiverE2EE failed:", e);
}
/**
* При получении медиа-трека с другой стороны
*/
if(remoteAudioRef.current && event.streams[0]){
console.info(event.streams);
remoteAudioRef.current.srcObject = event.streams[0];
remoteAudioRef.current.muted = !soundRef.current;
void remoteAudioRef.current.play().catch((e) => {
console.error("Failed to play remote audio:", e);
});
}
}
/**
* Запрашиваем Аудио поток с микрофона и добавляем его в PeerConnection, чтобы другая сторона могла его получить и воспроизвести,
* когда мы установим WebRTC соединение
*/
const localStream = await navigator.mediaDevices.getUserMedia({ audio: true });
const audioTrack = localStream.getAudioTracks()[0];
const tx = peerConnectionRef.current.addTransceiver(audioTrack, {
direction: "sendrecv",
streams: [localStream]
});
await attachSenderE2EE(tx.sender, Buffer.from(sharedSecretRef.current, "hex"));
/**
* Отправляем свой оффер другой стороне
*/
let offer = await peerConnectionRef.current.createOffer();
await peerConnectionRef.current.setLocalDescription(offer);
let offerSignal = new PacketWebRTC();
offerSignal.setSignalType(WebRTCSignalType.OFFER);
offerSignal.setSdpOrCandidate(JSON.stringify(offer));
send(offerSignal);
return;
}
}, [activeCall, sessionKeys, duration]);
const openCallsModal = (text : string) => {
modals.open({
centered: true,
children: (
<>
<Text size="sm">
{text}
</Text>
<Flex align={'center'} justify={'flex-end'}>
<Button color={'red'} variant={'subtle'} onClick={() => modals.closeAll()} mt="md">
Close
</Button>
</Flex>
</>
),
withCloseButton: false
});
}
const generateSessionKeys = () => {
const sessionKeys = nacl.box.keyPair();
info("Generated keys for call session, len: " + sessionKeys.publicKey.length);
setSessionKeys(sessionKeys);
return sessionKeys;
}
const call = (dialog: string) => {
if(callState == CallState.ACTIVE
|| callState == CallState.CONNECTING
|| callState == CallState.KEY_EXCHANGE
|| callState == CallState.WEB_RTC_EXCHANGE){
openCallsModal("You are already on a call, please end the current call before starting a new one.");
return;
}
setWindowPriority(false);
setActiveCall(dialog);
setCallState(CallState.CONNECTING);
setShowCallView(true);
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(dialog);
signalPacket.setSignalType(SignalType.CALL);
send(signalPacket);
roleRef.current = CallRole.CALLER;
playSound("calling.mp3", true);
}
const close = () => {
const packetSignal = new PacketSignalPeer();
packetSignal.setSrc(publicKey);
packetSignal.setDst(activeCall);
packetSignal.setSignalType(SignalType.END_CALL);
send(packetSignal);
end();
}
const end = () => {
stopLoopSound();
stopSound();
if (remoteAudioRef.current) {
remoteAudioRef.current.pause();
remoteAudioRef.current.srcObject = null;
}
generateCallAttachment();
setDuration(0);
durationIntervalRef.current && clearInterval(durationIntervalRef.current);
setWindowPriority(false);
playSound("end_call.mp3");
peerConnectionRef.current?.close();
peerConnectionRef.current = null;
roomIdRef.current = "";
mutedRef.current = false;
soundRef.current = true;
setActiveCall("");
setCallState(CallState.ENDED);
setShowCallView(false);
setSessionKeys(null);
setDuration(0);
setMutedState(false);
setSoundState(true);
roleRef.current = null;
}
/**
* Отправляет сообщение в диалог с звонящим с информацией о звонке
*/
const generateCallAttachment = () => {
if(roleRef.current != CallRole.CALLER){
/**
* Только звонящий отправляет информацию о звонке в виде вложения, чтобы ее можно было отобразить в UI диалога, например длительность звонка
*/
return;
}
sendMessage(activeCall, "", [{
id: generateRandomKey(16),
preview: duration.toString(),
type: AttachmentType.CALL,
transport: {
transport_server: "",
transport_tag: ""
},
encoding: {
encoded_for: "",
encoder: ""
},
blob: ""
}], true);
}
const accept = () => {
if(callState != CallState.INCOMING){
/**
* Нечего принимать
*/
return;
}
setWindowPriority(false);
stopLoopSound();
stopSound();
/**
* Звонок принят, генерируем ключи для сессии и отправляем их другой стороне для установления защищенного канала связи
*/
const keys = generateSessionKeys();
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall);
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(keys.publicKey).toString('hex'));
send(signalPacket);
setCallState(CallState.KEY_EXCHANGE);
roleRef.current = CallRole.CALLEE;
}
/**
* Получает слепок ключа для отображения в UI
* чтобы не показывать настоящий ключ
* @returns
*/
const getKeyCast = () => {
if(!sharedSecretRef.current){
return "";
}
return sharedSecretRef.current;
}
const setMuted = (nextMuted: boolean) => {
if (mutedRef.current === nextMuted) {
return;
}
mutedRef.current = nextMuted;
playSound(nextMuted ? "micro_enable.mp3" : "micro_disable.mp3");
if(peerConnectionRef.current){
peerConnectionRef.current.getSenders().forEach(sender => {
if(sender.track?.kind == "audio"){
sender.track.enabled = !nextMuted;
}
});
}
setMutedState(nextMuted);
}
const setSound = (nextSound: boolean) => {
if (soundRef.current === nextSound) {
return;
}
soundRef.current = nextSound;
playSound(nextSound ? "sound_enable.mp3" : "sound_disable.mp3");
if(remoteAudioRef.current){
remoteAudioRef.current.muted = !nextSound;
if (nextSound) {
void remoteAudioRef.current.play().catch((e) => {
console.error("Failed to resume remote audio:", e);
});
}
}
setSoundState(nextSound);
}
const context = {
call,
close,
activeCall,
callState,
muted,
sound,
setMuted,
setSound,
duration,
setShowCallView,
getKeyCast,
accept
};
return (
<CallContext.Provider value={context}>
{props.children}
<audio ref={remoteAudioRef} autoPlay playsInline style={{ display: 'none' }} />
{showCallView && <Call context={context}></Call>}
</CallContext.Provider>
)
}