Files
desktop/app/providers/CallProvider/CallProvider.tsx
RoyceDa cf29cecfd6
All checks were successful
SP Builds / build (push) Successful in 5m58s
Поднятие версии
2026-04-04 18:59:52 +02:00

623 lines
26 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import { Call } from "@/app/components/Call/Call";
import { useConsoleLogger } from "@/app/hooks/useConsoleLogger";
import { createContext, useEffect, useRef, useState } from "react";
import nacl from 'tweetnacl';
import { useSender } from "../ProtocolProvider/useSender";
import { PacketSignalPeer, SignalType } from "../ProtocolProvider/protocol/packets/packet.signal.peer";
import { usePacket } from "../ProtocolProvider/usePacket";
import { usePublicKey } from "../AccountProvider/usePublicKey";
import { PacketWebRTC, WebRTCSignalType } from "../ProtocolProvider/protocol/packets/packet.webrtc";
import { PacketIceServers } from "../ProtocolProvider/protocol/packets/packet.ice.servers";
import { modals } from "@mantine/modals";
import { Button, Flex, Text } from "@mantine/core";
import { useSound } from "@/app/hooks/useSound";
import useWindow from "@/app/hooks/useWindow";
import { attachReceiverE2EE, attachSenderE2EE } from "./audioE2EE";
import { useDeattachedSender } from "../DialogProvider/useDeattachedSender";
import { AttachmentType } from "../ProtocolProvider/protocol/packets/packet.message";
import { generateRandomKey } from "@/app/utils/utils";
export interface CallContextValue {
call: (callable: string) => void;
close: () => void;
activeCall: string;
callState: CallState;
muted: boolean;
sound: boolean;
setMuted: (muted: boolean) => void;
setSound: (sound: boolean) => void;
duration: number;
setShowCallView: (show: boolean) => void;
getKeyCast: () => string;
accept: () => void;
}
export enum CallState {
CONNECTING,
KEY_EXCHANGE,
/**
* Финальная стадия сигналинга, на которой обе стороны обменялись ключами и теперь устанавливают защищенный канал связи для звонка,
* через WebRTC, и готовятся к активному звонку.
*/
WEB_RTC_EXCHANGE,
ACTIVE,
ENDED,
INCOMING
}
export enum CallRole {
/**
* Вызывающая сторона, которая инициирует звонок
*/
CALLER,
/**
* Принимающая сторона, которая отвечает на звонок и принимает его
*/
CALLEE
}
export const CallContext = createContext<CallContextValue | null>(null);
export interface CallProviderProps {
children: React.ReactNode;
}
export function CallProvider(props : CallProviderProps) {
const [activeCall, setActiveCall] = useState<string>("");
const [callState, setCallState] = useState<CallState>(CallState.ENDED);
const [muted, setMutedState] = useState<boolean>(false);
const [sound, setSoundState] = useState<boolean>(true);
const durationIntervalRef = useRef<NodeJS.Timeout | null>(null);
const [duration, setDuration] = useState<number>(0);
const [showCallView, setShowCallView] = useState<boolean>(callState == CallState.INCOMING);
const {info} = useConsoleLogger("CallProvider");
const [sessionKeys, setSessionKeys] = useState<nacl.BoxKeyPair | null>(null);
const send = useSender();
const publicKey = usePublicKey();
const peerConnectionRef = useRef<RTCPeerConnection | null>(null);
const roomIdRef = useRef<string>("");
const roleRef = useRef<CallRole | null>(null);
const sharedSecretRef = useRef<string>("");
const iceServersRef = useRef<RTCIceServer[]>([]);
const remoteAudioRef = useRef<HTMLAudioElement | null>(null);
const iceCandidatesBufferRef = useRef<RTCIceCandidate[]>([]);
const mutedRef = useRef<boolean>(false);
const soundRef = useRef<boolean>(true);
const {sendMessage} = useDeattachedSender();
const hasRemoteTrackRef = useRef<boolean>(false);
/**
* Используются для входа в звонок
*/
const callSessionIdRef = useRef<string>("");
const callTokenRef = useRef<string>("");
const {playSound, stopSound, stopLoopSound} = useSound();
const {setWindowPriority} = useWindow();
useEffect(() => {
if(callState == CallState.ACTIVE){
stopLoopSound();
stopSound();
playSound("connected.mp3");
setWindowPriority(false);
durationIntervalRef.current = setInterval(() => {
setDuration(prev => prev + 1);
}, 1000);
}
}, [callState]);
useEffect(() => {
/**
* Нам нужно получить ICE серверы для установки соединения из разных сетей
* Получаем их от сервера
*/
let packet = new PacketIceServers();
send(packet);
return () => {
stopSound();
if (remoteAudioRef.current) {
remoteAudioRef.current.pause();
remoteAudioRef.current.srcObject = null;
}
peerConnectionRef.current?.close();
peerConnectionRef.current = null;
};
}, []);
usePacket(28, async (packet: PacketIceServers) => {
let iceServers = packet.getIceServers();
/**
* ICE серверы получены, теперь нужно привести их к форматку клиента и добавить udp и tcp варианты
*/
let formattedIceServers: RTCIceServer[] = [];
for(let i = 0; i < iceServers.length; i++){
let server = iceServers[i];
formattedIceServers.push({
urls: "turn:" + server.url + "?transport=" + server.transport,
username: server.username,
credential: server.credential
});
}
iceServersRef.current = formattedIceServers;
info("Received ICE servers from server, count: " + formattedIceServers.length);
}, []);
usePacket(27, async (packet: PacketWebRTC) => {
if(callState != CallState.WEB_RTC_EXCHANGE && callState != CallState.ACTIVE){
/**
* Нет активного звонка или мы не на стадии обмена WebRTC сигналами, игнорируем
*/
return;
}
const signalType = packet.getSignalType();
if(signalType == WebRTCSignalType.ANSWER){
/**
* Другая сторона (сервер SFU) отправил нам SDP ответ на наш оффер
*/
const sdp = JSON.parse(packet.getSdpOrCandidate());
await peerConnectionRef.current?.setRemoteDescription(new RTCSessionDescription(sdp));
if(iceCandidatesBufferRef.current.length > 0){
/**
* У нас есть буферизированные ICE кандидаты, которые мы получили до установки удаленного описания, теперь мы можем их добавить в PeerConnection
*/
for(let i = 0; i < iceCandidatesBufferRef.current.length; i++){
await peerConnectionRef.current?.addIceCandidate(iceCandidatesBufferRef.current[i]);
}
iceCandidatesBufferRef.current = [];
}
info("Received WebRTC answer and set remote description");
return;
}
if(signalType == WebRTCSignalType.ICE_CANDIDATE){
/**
* Другая сторона отправила нам ICE кандидата для установления WebRTC соединения
*/
const candidate = JSON.parse(packet.getSdpOrCandidate());
if(peerConnectionRef.current?.remoteDescription == null){
/**
* Удаленное описание еще не установлено, буферизуем кандидата, чтобы добавить его после установки удаленного описания
*/
iceCandidatesBufferRef.current.push(new RTCIceCandidate(candidate));
info("Received WebRTC ICE candidate but remote description is not set yet, buffering candidate");
return;
}
await peerConnectionRef.current?.addIceCandidate(new RTCIceCandidate(candidate));
info("Received WebRTC ICE candidate and added to peer connection");
return;
}
if(signalType == WebRTCSignalType.OFFER && peerConnectionRef.current){
/**
* SFU сервер отправил нам оффер, например при renegotiation, нам нужно его принять и
* отправить ответ (ANSWER)
*/
const sdp = JSON.parse(packet.getSdpOrCandidate());
await peerConnectionRef.current?.setRemoteDescription(new RTCSessionDescription(sdp));
let answer = await peerConnectionRef.current?.createAnswer();
await peerConnectionRef.current?.setLocalDescription(answer);
let answerSignal = new PacketWebRTC();
answerSignal.setSignalType(WebRTCSignalType.ANSWER);
answerSignal.setSdpOrCandidate(JSON.stringify(answer));
send(answerSignal);
info("Received WebRTC offer, set remote description and sent answer");
return;
}
}, [activeCall, sessionKeys, callState, roomIdRef]);
usePacket(26, async (packet: PacketSignalPeer) => {
const signalType = packet.getSignalType();
if(signalType == SignalType.END_CALL_BECAUSE_BUSY) {
openCallsModal("Line is busy, the user is currently on another call. Please try again later.");
end();
}
if(signalType == SignalType.END_CALL_BECAUSE_PEER_DISCONNECTED) {
openCallsModal("The connection with the user was lost. The call has ended.")
end();
}
if(signalType == SignalType.RINGING_TIMEOUT) {
/**
* Другой стороне был отправлен сигнал звонка, но она не ответила на него в течении определенного времени
*/
openCallsModal("The user did not answer the call in time. Please try again later.");
end();
return;
}
if(signalType == SignalType.END_CALL){
/**
* Сбросили звонок
*/
end();
return;
}
if(signalType == SignalType.CALL){
/**
* Нам поступает звонок
*/
if(callState != CallState.ENDED){
/**
* У нас уже есть активный звонок, отправляем сигнал другой стороне, что линия занята
*/
return;
}
callSessionIdRef.current = packet.getCallId();
callTokenRef.current = packet.getJoinToken();
setWindowPriority(true);
playSound("ringtone.mp3", true);
setActiveCall(packet.getSrc());
setCallState(CallState.INCOMING);
setShowCallView(true);
}
if(signalType == SignalType.KEY_EXCHANGE){
console.info("EXCHANGE SIGNAL RECEIVED, CALLEE ROLE");
/**
* Другая сторона отправила нам ключи, теперь отправляем ей свои для генерации общего секрета
*/
const sharedPublic = packet.getSharedPublic();
if(!sharedPublic){
info("Received key exchange signal without shared public key");
return;
}
if(!sessionKeys){
info("Received key exchange signal but session keys are not generated");
return;
}
const computedSharedSecret = nacl.box.before(Buffer.from(sharedPublic, 'hex'), sessionKeys.secretKey);
sharedSecretRef.current = Buffer.from(computedSharedSecret).toString('hex');
info("Generated shared secret for call session: " + sharedSecretRef.current);
setCallState(CallState.WEB_RTC_EXCHANGE);
if(roleRef.current == CallRole.CALLER){
/**
* Вызывающий уже отправил ключ, сессия сгенерирована, сообщаем серверу что звонок активен
*/
const activeSignal = new PacketSignalPeer();
activeSignal.setSrc(publicKey);
activeSignal.setDst(activeCall);
activeSignal.setSignalType(SignalType.ACTIVE);
send(activeSignal);
return;
}
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall);
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(sessionKeys.publicKey).toString('hex'));
send(signalPacket);
}
if(signalType == SignalType.ACCEPT){
/**
* Другая сторона приняла наш звонок, комната на SFU создалась, нужно сгенерировать ключи
*/
const keys = generateSessionKeys();
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall);
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(keys.publicKey).toString('hex'));
send(signalPacket);
}
if(signalType == SignalType.ACTIVE) {
if(!sessionKeys){
/**
* Сервер может отправить CREATE_ROOM сигнал, даже если мы приняли звонок на другом устройстве, по этому проверяем,
* на этом ли устройстве звонок принят посредством проверки наличия сгенерированных ключей шифрования
*/
stopLoopSound();
stopSound();
end();
return;
}
/**
* Нужно отправить свой SDP оффер другой стороне, чтобы установить WebRTC соединение
*/
peerConnectionRef.current = new RTCPeerConnection({
iceServers: iceServersRef.current,
// @ts-ignore
encodedInsertableStreams: true
});
/**
* Подписываемся на ICE кандидат
*/
peerConnectionRef.current.onicecandidate = (event) => {
if(event.candidate){
let candidateSignal = new PacketWebRTC();
candidateSignal.setSignalType(WebRTCSignalType.ICE_CANDIDATE);
candidateSignal.setSdpOrCandidate(JSON.stringify(event.candidate));
send(candidateSignal);
}
}
/**
* Соединение установлено, можно начинать звонок, переходим в активное состояние звонка
*/
peerConnectionRef.current.onconnectionstatechange = () => {
console.info("Peer connection state changed: " + peerConnectionRef.current?.connectionState);
if(peerConnectionRef.current?.connectionState == "connected"){
/**
* WebRTC соединение установлено, звонок активен, останавливаем все остальные звуки
* системы
*/
tryActivateCall();
info("WebRTC connection established, call is active");
}
}
peerConnectionRef.current.ontrack = async (event) => {
try {
await attachReceiverE2EE(event.receiver, Buffer.from(sharedSecretRef.current, "hex"));
} catch (e) {
console.error("attachReceiverE2EE failed:", e);
}
/**
* При получении медиа-трека с другой стороны
*/
if(remoteAudioRef.current && event.streams[0]){
hasRemoteTrackRef.current = true;
tryActivateCall();
remoteAudioRef.current.srcObject = event.streams[0];
remoteAudioRef.current.muted = !soundRef.current;
void remoteAudioRef.current.play().catch((e) => {
console.error("Failed to play remote audio:", e);
});
}
}
/**
* Запрашиваем Аудио поток с микрофона и добавляем его в PeerConnection, чтобы другая сторона могла его получить и воспроизвести,
* когда мы установим WebRTC соединение
*/
const localStream = await navigator.mediaDevices.getUserMedia({ audio: true });
const audioTrack = localStream.getAudioTracks()[0];
const tx = peerConnectionRef.current.addTransceiver(audioTrack, {
direction: "sendrecv",
streams: [localStream]
});
await attachSenderE2EE(tx.sender, Buffer.from(sharedSecretRef.current, "hex"));
/**
* Отправляем свой оффер другой стороне
*/
let offer = await peerConnectionRef.current.createOffer();
await peerConnectionRef.current.setLocalDescription(offer);
let offerSignal = new PacketWebRTC();
offerSignal.setSignalType(WebRTCSignalType.OFFER);
offerSignal.setSdpOrCandidate(JSON.stringify(offer));
send(offerSignal);
return;
}
}, [activeCall, sessionKeys, duration]);
const tryActivateCall = () => {
if(hasRemoteTrackRef.current && peerConnectionRef.current?.connectionState == "connected"){
stopLoopSound();
stopSound();
setCallState(CallState.ACTIVE);
info("Call is now active");
}
}
const openCallsModal = (text : string) => {
modals.open({
centered: true,
children: (
<>
<Text size="sm">
{text}
</Text>
<Flex align={'center'} justify={'flex-end'}>
<Button style={{
outline: 'none'
}} color={'red'} variant={'subtle'} onClick={() => modals.closeAll()} mt="md">
Close
</Button>
</Flex>
</>
),
withCloseButton: false
});
}
const generateSessionKeys = () => {
const sessionKeys = nacl.box.keyPair();
info("Generated keys for call session, len: " + sessionKeys.publicKey.length);
setSessionKeys(sessionKeys);
return sessionKeys;
}
const call = (dialog: string) => {
if(callState == CallState.ACTIVE
|| callState == CallState.CONNECTING
|| callState == CallState.KEY_EXCHANGE
|| callState == CallState.WEB_RTC_EXCHANGE){
openCallsModal("You are already on a call, please end the current call before starting a new one.");
return;
}
setWindowPriority(false);
setActiveCall(dialog);
setCallState(CallState.CONNECTING);
setShowCallView(true);
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(dialog);
signalPacket.setSignalType(SignalType.CALL);
send(signalPacket);
roleRef.current = CallRole.CALLER;
playSound("calling.mp3", true);
}
const close = () => {
const packetSignal = new PacketSignalPeer();
packetSignal.setSrc(publicKey);
packetSignal.setDst(activeCall);
packetSignal.setCallId(callSessionIdRef.current);
packetSignal.setJoinToken(callTokenRef.current);
packetSignal.setSignalType(SignalType.END_CALL);
send(packetSignal);
end();
}
const end = () => {
if(callState == CallState.ACTIVE){
/**
* Только если звонок был активен воспроизводим звуки
*/
playSound("end_call.mp3");
}
if (remoteAudioRef.current) {
remoteAudioRef.current.pause();
remoteAudioRef.current.srcObject = null;
}
generateCallAttachment();
setDuration(0);
durationIntervalRef.current && clearInterval(durationIntervalRef.current);
setWindowPriority(false);
peerConnectionRef.current?.close();
peerConnectionRef.current = null;
roomIdRef.current = "";
mutedRef.current = false;
soundRef.current = true;
setActiveCall("");
setCallState(CallState.ENDED);
setShowCallView(false);
setSessionKeys(null);
setDuration(0);
setMutedState(false);
setSoundState(true);
stopLoopSound();
stopSound();
roleRef.current = null;
}
/**
* Отправляет сообщение в диалог с звонящим с информацией о звонке
*/
const generateCallAttachment = () => {
if(roleRef.current != CallRole.CALLER){
/**
* Только звонящий отправляет информацию о звонке в виде вложения, чтобы ее можно было отобразить в UI диалога, например длительность звонка
*/
return;
}
sendMessage(activeCall, "", [{
id: generateRandomKey(16),
preview: duration.toString(),
type: AttachmentType.CALL,
transport: {
transport_server: "",
transport_tag: ""
},
blob: ""
}], true);
}
const accept = () => {
if(callState != CallState.INCOMING){
/**
* Нечего принимать
*/
return;
}
setWindowPriority(false);
stopLoopSound();
stopSound();
/**
* Звонок принят, генерируем свой ключ для будующего обмена
*/
generateSessionKeys();
/**
* Отправляем сигнал что звонок принят другой стороне, чтобы она могла начать обмен ключами и установку соединения
*/
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall);
signalPacket.setCallId(callSessionIdRef.current);
signalPacket.setJoinToken(callTokenRef.current);
signalPacket.setSignalType(SignalType.ACCEPT);
send(signalPacket);
/**
* Устанавливаем состояние звонка и стадию обмена ключами
*/
setCallState(CallState.KEY_EXCHANGE);
roleRef.current = CallRole.CALLEE;
}
/**
* Получает слепок ключа для отображения в UI
* чтобы не показывать настоящий ключ
* @returns
*/
const getKeyCast = () => {
if(!sharedSecretRef.current){
return "";
}
return sharedSecretRef.current;
}
const setMuted = (nextMuted: boolean) => {
if (mutedRef.current === nextMuted) {
return;
}
mutedRef.current = nextMuted;
playSound(nextMuted ? "micro_enable.mp3" : "micro_disable.mp3");
if(peerConnectionRef.current){
peerConnectionRef.current.getSenders().forEach(sender => {
if(sender.track?.kind == "audio"){
sender.track.enabled = !nextMuted;
}
});
}
setMutedState(nextMuted);
}
const setSound = (nextSound: boolean) => {
if (soundRef.current === nextSound) {
return;
}
soundRef.current = nextSound;
playSound(nextSound ? "sound_enable.mp3" : "sound_disable.mp3");
if(remoteAudioRef.current){
remoteAudioRef.current.muted = !nextSound;
if (nextSound) {
void remoteAudioRef.current.play().catch((e) => {
console.error("Failed to resume remote audio:", e);
});
}
}
setSoundState(nextSound);
}
const context = {
call,
close,
activeCall,
callState,
muted,
sound,
setMuted,
setSound,
duration,
setShowCallView,
getKeyCast,
accept
};
return (
<CallContext.Provider value={context}>
{props.children}
<audio ref={remoteAudioRef} autoPlay playsInline style={{ display: 'none' }} />
{showCallView && <Call context={context}></Call>}
</CallContext.Provider>
)
}