Compare commits

...

36 Commits

Author SHA1 Message Date
RoyceDa
f4592d03b0 Поднятие версии
All checks were successful
SP Builds / build (push) Successful in 3m41s
2026-04-10 18:56:13 +02:00
RoyceDa
6554483939 С binary на hex 2026-04-10 18:52:58 +02:00
RoyceDa
ba12db3c72 OPUS сборка 2026-04-10 17:54:48 +02:00
RoyceDa
b596d36543 Базовая версия голосовых сообщений и аудиоплеер. Кодирование OPUS 2026-04-10 17:20:44 +02:00
RoyceDa
93ef692eb5 Подготовка голосовых сообщений 2026-04-09 16:53:57 +02:00
RoyceDa
8fdfe9b786 Исправдение failed to decompress data 2026-04-09 16:36:23 +02:00
RoyceDa
547ac89987 Поднятие версии
All checks were successful
SP Builds / build (push) Successful in 4m4s
2026-04-08 23:05:03 +02:00
RoyceDa
130ad9c35a Исправление парсинга emoji и составных emoji (skin tones) 2026-04-08 22:58:49 +02:00
RoyceDa
adfc6add6f Исправление битых вложений в группах (на декодинг) 2026-04-08 21:55:59 +02:00
RoyceDa
cf29cecfd6 Поднятие версии
All checks were successful
SP Builds / build (push) Successful in 5m58s
2026-04-04 18:59:52 +02:00
RoyceDa
779c265851 Поднятие версии, outline
All checks were successful
SP Builds / build (push) Successful in 4m25s
2026-04-04 18:34:18 +02:00
RoyceDa
8ac952071d Фикс протокола 2026-04-04 18:23:24 +02:00
RoyceDa
e1f5cb7eb8 Перевод звонка в активную стадию 2026-04-04 18:18:05 +02:00
RoyceDa
30f2c90015 Фикс ассиметричного обмена 2026-04-04 18:07:24 +02:00
RoyceDa
a341aedd8d Фикс бесконечного обмена ключами 2026-04-04 18:04:11 +02:00
RoyceDa
a9164c7087 Фикс обмена ключами 2026-04-04 18:01:52 +02:00
RoyceDa
04dd23dd5c Фикс обмена ключами 2026-04-04 18:01:09 +02:00
RoyceDa
5979c31120 Таймаут вызова 2026-04-04 17:46:38 +02:00
RoyceDa
c8c85991c7 Исправление протокола для правильного END_CALL 2026-04-04 17:19:43 +02:00
RoyceDa
c052fdae41 Реализация нового протокола звонков 2026-04-04 16:48:26 +02:00
RoyceDa
3492a881cc Поднятие версии
All checks were successful
SP Builds / build (push) Successful in 3m36s
2026-04-02 18:31:46 +02:00
RoyceDa
febeb58778 Фикс ICE кандидатов
Some checks failed
SP Builds / build (push) Has been cancelled
2026-04-02 17:57:38 +02:00
RoyceDa
93e4898bec Правильный deviceId 2026-04-02 17:38:34 +02:00
RoyceDa
de7a00f37a Обновление протокола звонков без авторизации с сохранением защиты 2026-04-02 17:26:38 +02:00
RoyceDa
7b3dd6c566 Поднятие версии
All checks were successful
SP Builds / build (push) Successful in 3m38s
2026-04-01 16:31:31 +02:00
RoyceDa
70af076248 Исправлен цвет аватарки при ее отсутствии в профиле 2026-04-01 16:31:27 +02:00
RoyceDa
92c9dc03c9 Время звонка начинается тогда, когда начинается аудио-дорожка, а не тогда, когда установлено соединение с SFU 2026-04-01 16:28:41 +02:00
RoyceDa
7e8d086a74 Исправление встречных звонков 2026-04-01 14:39:45 +02:00
RoyceDa
0a0c810105 Защита от принятия звонка принятого на другом устройстве 2026-04-01 14:21:11 +02:00
RoyceDa
8fbfb4fa5c Поднятие версии
All checks were successful
SP Builds / build (push) Successful in 3m43s
2026-03-30 19:53:08 +02:00
RoyceDa
2b9e28ee4a Исправление системных звуков в звонке
Some checks failed
SP Builds / build (push) Has been cancelled
2026-03-30 19:41:06 +02:00
RoyceDa
d2a506119c Исправление невозможности выбора сообщений в диалоге 2026-03-30 19:37:17 +02:00
RoyceDa
269f66fdc5 Улучшение CI/CD 2026-03-29 17:03:30 +02:00
RoyceDa
5113d18d70 Фикс дергания при старте интерфейса
Some checks failed
Linux Kernel Build / build (arm64, arm64) (push) Failing after 2m40s
Linux Kernel Build / build (x64, x86_64) (push) Successful in 3m4s
MacOS Kernel Build / build (arm64) (push) Successful in 6m9s
SP Builds / build (push) Successful in 3m53s
Windows Kernel Build / build (push) Successful in 12m15s
MacOS Kernel Build / build (x64) (push) Successful in 6m56s
2026-03-29 16:09:28 +02:00
RoyceDa
cd2dee21ab Поднятие версии kernel 2026-03-29 16:07:02 +02:00
RoyceDa
1b14463dbb Поднятие версии kernel 2026-03-29 16:06:44 +02:00
23 changed files with 1471 additions and 192 deletions

View File

@@ -43,7 +43,7 @@ jobs:
mkdir -p dist/builds/linux/x64 mkdir -p dist/builds/linux/x64
mkdir -p dist/builds/linux/${{ matrix.out_dir }} mkdir -p dist/builds/linux/${{ matrix.out_dir }}
npx electron-vite build npx electron-vite build
npx electron-builder --linux --$ARCH npx electron-builder --linux --${{ matrix.arch }}
- name: Check if files exist - name: Check if files exist
run: | run: |

View File

@@ -22,6 +22,7 @@ import { DialogStateProvider } from './providers/DialogStateProvider.tsx/DialogS
import { DeviceConfirm } from './views/DeviceConfirm/DeviceConfirm'; import { DeviceConfirm } from './views/DeviceConfirm/DeviceConfirm';
import { SystemAccountProvider } from './providers/SystemAccountsProvider/SystemAccountsProvider'; import { SystemAccountProvider } from './providers/SystemAccountsProvider/SystemAccountsProvider';
import { DeviceProvider } from './providers/DeviceProvider/DeviceProvider'; import { DeviceProvider } from './providers/DeviceProvider/DeviceProvider';
import { PlayerProvider } from './providers/PlayerProvider/PlayerProvider';
window.Buffer = Buffer; window.Buffer = Buffer;
export default function App() { export default function App() {
@@ -58,6 +59,7 @@ export default function App() {
<Topbar></Topbar> <Topbar></Topbar>
<ContextMenuProvider> <ContextMenuProvider>
<ImageViwerProvider> <ImageViwerProvider>
<PlayerProvider>
<AvatarProvider> <AvatarProvider>
<Routes> <Routes>
<Route path="/" element={ <Route path="/" element={
@@ -71,6 +73,7 @@ export default function App() {
<Route path="/deviceconfirm" element={<DeviceConfirm />} /> <Route path="/deviceconfirm" element={<DeviceConfirm />} />
</Routes> </Routes>
</AvatarProvider> </AvatarProvider>
</PlayerProvider>
</ImageViwerProvider> </ImageViwerProvider>
</ContextMenuProvider> </ContextMenuProvider>
</Box> </Box>

View File

@@ -60,7 +60,7 @@ export function ActionAvatar(props : ActionAvatarProps) {
size={120} size={120}
radius={120} radius={120}
mx="auto" mx="auto"
bg={'#fff'} bg={avatars.length > 0 ? '#fff' : undefined}
name={props.title.trim() || props.publicKey} name={props.title.trim() || props.publicKey}
color={'initials'} color={'initials'}
src={avatars.length > 0 ? src={avatars.length > 0 ?

View File

@@ -1,7 +1,7 @@
import { useDialog } from "@/app/providers/DialogProvider/useDialog"; import { useDialog } from "@/app/providers/DialogProvider/useDialog";
import { useRosettaColors } from "@/app/hooks/useRosettaColors"; import { useRosettaColors } from "@/app/hooks/useRosettaColors";
import { Box, Divider, Flex, Menu, Popover, Text, Transition, useComputedColorScheme } from "@mantine/core"; import { Box, Divider, Flex, Menu, Popover, Text, Transition, useComputedColorScheme } from "@mantine/core";
import { IconBarrierBlock, IconCamera, IconDoorExit, IconFile, IconMoodSmile, IconPaperclip, IconSend } from "@tabler/icons-react"; import { IconBarrierBlock, IconCamera, IconDoorExit, IconFile, IconMicrophone, IconMoodSmile, IconPaperclip, IconSend, IconTrash } from "@tabler/icons-react";
import { useEffect, useRef, useState } from "react"; import { useEffect, useRef, useState } from "react";
import { useBlacklist } from "@/app/providers/BlacklistProvider/useBlacklist"; import { useBlacklist } from "@/app/providers/BlacklistProvider/useBlacklist";
import { filePrapareForNetworkTransfer, generateRandomKey, imagePrepareForNetworkTransfer } from "@/app/utils/utils"; import { filePrapareForNetworkTransfer, generateRandomKey, imagePrepareForNetworkTransfer } from "@/app/utils/utils";
@@ -25,7 +25,8 @@ import { AnimatedButton } from "../AnimatedButton/AnimatedButton";
import { useUserCacheFunc } from "@/app/providers/InformationProvider/useUserCacheFunc"; import { useUserCacheFunc } from "@/app/providers/InformationProvider/useUserCacheFunc";
import { MentionList, Mention } from "../MentionList/MentionList"; import { MentionList, Mention } from "../MentionList/MentionList";
import { useDrafts } from "@/app/providers/DialogProvider/useDrafts"; import { useDrafts } from "@/app/providers/DialogProvider/useDrafts";
import { useVoiceMessage } from "./useVoiceMessage";
import { VoiceRecorder } from "../VoiceRecorder/VoiceRecorder";
export function DialogInput() { export function DialogInput() {
const colors = useRosettaColors(); const colors = useRosettaColors();
@@ -47,6 +48,7 @@ export function DialogInput() {
const [mentionList, setMentionList] = useState<Mention[]>([]); const [mentionList, setMentionList] = useState<Mention[]>([]);
const mentionHandling = useRef<string>(""); const mentionHandling = useRef<string>("");
const {getDraft, saveDraft} = useDrafts(dialog); const {getDraft, saveDraft} = useDrafts(dialog);
const {start, stop, isRecording, duration, waves, getAudioBlob, interpolateCompressWaves} = useVoiceMessage();
const avatars = useAvatars( const avatars = useAvatars(
@@ -64,6 +66,15 @@ export function DialogInput() {
}] }]
], [], true); ], [], true);
const hasText = message.trim().length > 0;
const showSendIcon = hasText || attachments.length > 0 || isRecording;
const onMicroClick = () => {
if(!isRecording) {
start();
}
};
const fileDialog = useFileDialog({ const fileDialog = useFileDialog({
multiple: false, multiple: false,
//naccept: '*', //naccept: '*',
@@ -188,8 +199,28 @@ export function DialogInput() {
mentionHandling.current = username; mentionHandling.current = username;
} }
const send = () => { const send = async () => {
if(blocked || (message.trim() == "" && attachments.length <= 0)) { if(blocked || (message.trim() == "" && attachments.length <= 0 && !isRecording)){
return;
}
if(isRecording){
const audioBlob = getAudioBlob();
stop();
if(!audioBlob){
return;
}
sendMessage("", [
{
blob: Buffer.from(await audioBlob.arrayBuffer()).toString('hex'),
id: generateRandomKey(8),
type: AttachmentType.VOICE,
preview: duration + "::" + interpolateCompressWaves(35).join(","),
transport: {
transport_server: "",
transport_tag: ""
}
}
]);
return; return;
} }
sendMessage(message, attachments); sendMessage(message, attachments);
@@ -365,6 +396,12 @@ export function DialogInput() {
{!blocked && {!blocked &&
<Flex h={'100%'} p={'xs'} direction={'row'} bg={colors.boxColor}> <Flex h={'100%'} p={'xs'} direction={'row'} bg={colors.boxColor}>
<Flex w={25} mt={10} justify={'center'}> <Flex w={25} mt={10} justify={'center'}>
{isRecording && (
<IconTrash onClick={stop} style={{
cursor: 'pointer'
}} color={colors.error} stroke={1.5} size={25}></IconTrash>
)}
{!isRecording && (
<Menu width={150} withArrow> <Menu width={150} withArrow>
<Menu.Target> <Menu.Target>
<IconPaperclip stroke={1.5} style={{ <IconPaperclip stroke={1.5} style={{
@@ -385,12 +422,14 @@ export function DialogInput() {
} onClick={onClickCamera}>Avatar {hasGroup(dialog) && 'group'}</Menu.Item>} } onClick={onClickCamera}>Avatar {hasGroup(dialog) && 'group'}</Menu.Item>}
</Menu.Dropdown> </Menu.Dropdown>
</Menu> </Menu>
)}
</Flex> </Flex>
<Flex <Flex
w={'calc(100% - (25px + 50px + var(--mantine-spacing-xs)))'} w={'calc(100% - (25px + 50px + var(--mantine-spacing-xs)))'}
maw={'calc(100% - (25px + 50px + var(--mantine-spacing-xs)))'} maw={'calc(100% - (25px + 50px + var(--mantine-spacing-xs)))'}
align={'center'} align={'center'}
> >
{!isRecording && <>
<RichTextInput <RichTextInput
ref={editableDivRef} ref={editableDivRef}
style={{ style={{
@@ -407,19 +446,17 @@ export function DialogInput() {
}} }}
placeholder="Type message..." placeholder="Type message..."
autoFocus autoFocus
//ref={textareaRef}
//onPaste={onPaste}
//maxLength={2500}
//w={'100%'}
//h={'100%'}
onKeyDown={handleKeyDown} onKeyDown={handleKeyDown}
onChange={setMessage} onChange={setMessage}
onPaste={onPaste} onPaste={onPaste}
//dangerouslySetInnerHTML={{__html: message}}
></RichTextInput> ></RichTextInput>
</>}
{isRecording && <>
<VoiceRecorder duration={duration} waves={waves}></VoiceRecorder>
</>}
</Flex> </Flex>
<Flex mt={10} w={'calc(50px + var(--mantine-spacing-xs))'} gap={'xs'}> <Flex mt={10} w={'calc(50px + var(--mantine-spacing-xs))'} gap={'xs'}>
{!isRecording && <>
<Popover withArrow> <Popover withArrow>
<Popover.Target> <Popover.Target>
<IconMoodSmile color={colors.chevrons.active} stroke={1.5} size={25} style={{ <IconMoodSmile color={colors.chevrons.active} stroke={1.5} size={25} style={{
@@ -435,9 +472,42 @@ export function DialogInput() {
/> />
</Popover.Dropdown> </Popover.Dropdown>
</Popover> </Popover>
<IconSend stroke={1.5} color={message.trim() == "" && attachments.length <= 0 ? colors.chevrons.active : colors.brandColor} onClick={send} style={{ </>}
<Box pos="relative" ml={isRecording ? 35 : 0} w={25} h={25}>
<Transition mounted={showSendIcon} transition="pop" duration={180} timingFunction="ease">
{(styles) => (
<IconSend
stroke={1.5}
color={colors.brandColor}
onClick={send}
style={{
...styles,
position: 'absolute',
inset: 0,
cursor: 'pointer' cursor: 'pointer'
}} size={25}></IconSend> }}
size={25}
/>
)}
</Transition>
<Transition mounted={!showSendIcon} transition="pop" duration={180} timingFunction="ease">
{(styles) => (
<IconMicrophone
stroke={1.5}
color={colors.chevrons.active}
onClick={onMicroClick}
style={{
...styles,
position: 'absolute',
inset: 0,
cursor: 'pointer'
}}
size={25}
/>
)}
</Transition>
</Box>
</Flex> </Flex>
</Flex>} </Flex>}
{blocked && <Box mih={62} bg={colors.boxColor}> {blocked && <Box mih={62} bg={colors.boxColor}>

View File

@@ -0,0 +1,273 @@
import { useState, useRef, useCallback, useEffect } from "react";
export function useVoiceMessage(): {
isRecording: boolean;
isPaused: boolean;
duration: number;
waves: number[];
start: () => Promise<void>;
stop: () => void;
pause: () => void;
play: () => void;
error: string | null;
getAudioBlob: () => Blob | null;
interpolateCompressWaves: (targetLength: number) => number[];
} {
const [isRecording, setIsRecording] = useState(false);
const [isPaused, setIsPaused] = useState(false);
const [duration, setDuration] = useState(0);
const [waves, setWaves] = useState<number[]>([]);
const [error, setError] = useState<string | null>(null);
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const chunksRef = useRef<Blob[]>([]);
const streamRef = useRef<MediaStream | null>(null);
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const waveTimerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const waveDataRef = useRef<Uint8Array<ArrayBuffer> | null>(null);
const clearTimer = useCallback(() => {
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
}, []);
const stopWaveLoop = useCallback(() => {
if (waveTimerRef.current) {
clearInterval(waveTimerRef.current);
waveTimerRef.current = null;
}
}, []);
const startTimer = useCallback(() => {
if (timerRef.current) return;
timerRef.current = setInterval(() => {
setDuration((prev) => prev + 1);
}, 1000);
}, []);
const startWaveLoop = useCallback(() => {
stopWaveLoop();
const analyser = analyserRef.current;
if (!analyser) return;
if (!waveDataRef.current || waveDataRef.current.length !== analyser.frequencyBinCount) {
waveDataRef.current = new Uint8Array(new ArrayBuffer(analyser.frequencyBinCount));
}
const MAX_WAVES = 120;
const tick = () => {
if (!analyserRef.current || !waveDataRef.current) return;
analyserRef.current.getByteFrequencyData(waveDataRef.current);
let peak = 0;
for (let i = 0; i < waveDataRef.current.length; i++) {
const v = waveDataRef.current[i];
if (v > peak) peak = v;
}
const bar = peak / 255;
setWaves((prev) => {
const next = [...prev, bar];
return next.length > MAX_WAVES ? next.slice(next.length - MAX_WAVES) : next;
});
};
tick();
waveTimerRef.current = setInterval(tick, 300);
}, [stopWaveLoop]);
const cleanupAudio = useCallback(() => {
stopWaveLoop();
sourceRef.current?.disconnect();
sourceRef.current = null;
analyserRef.current = null;
waveDataRef.current = null;
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
streamRef.current?.getTracks().forEach((track) => track.stop());
streamRef.current = null;
}, [stopWaveLoop]);
const start = useCallback(async () => {
try {
setError(null);
setDuration(0);
setWaves([]);
chunksRef.current = [];
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
streamRef.current = stream;
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
analyser.smoothingTimeConstant = 0;
analyser.minDecibels = -100;
analyser.maxDecibels = -10;
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
audioContextRef.current = audioContext;
analyserRef.current = analyser;
sourceRef.current = source;
// Выбираем лучший поддерживаемый кодек
const preferredTypes = [
"audio/webm;codecs=opus",
"audio/ogg;codecs=opus",
"audio/webm",
];
const mimeType = preferredTypes.find((t) => MediaRecorder.isTypeSupported(t)) ?? "";
const mediaRecorder = new MediaRecorder(stream, {
...(mimeType ? { mimeType } : {}),
audioBitsPerSecond: 32_000,
});
mediaRecorderRef.current = mediaRecorder;
mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) chunksRef.current.push(e.data);
};
mediaRecorder.onstop = () => {
cleanupAudio();
};
mediaRecorder.start(100);
setIsRecording(true);
setIsPaused(false);
startTimer();
startWaveLoop();
} catch (err) {
setError("Could not start voice recording. Please check microphone permissions.");
console.error("Voice recording error:", err);
}
}, [startTimer, startWaveLoop, cleanupAudio]);
const stop = useCallback(() => {
if (mediaRecorderRef.current && isRecording) {
mediaRecorderRef.current.stop();
mediaRecorderRef.current = null;
setIsRecording(false);
setIsPaused(false);
clearTimer();
stopWaveLoop();
}
}, [isRecording, clearTimer, stopWaveLoop]);
const pause = useCallback(() => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state === "recording") {
mediaRecorderRef.current.pause();
setIsPaused(true);
clearTimer();
stopWaveLoop();
}
}, [clearTimer, stopWaveLoop]);
const play = useCallback(() => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state === "paused") {
mediaRecorderRef.current.resume();
setIsPaused(false);
startTimer();
startWaveLoop();
}
}, [startTimer, startWaveLoop]);
useEffect(() => {
return () => {
clearTimer();
stopWaveLoop();
if (mediaRecorderRef.current && mediaRecorderRef.current?.state !== "inactive") {
mediaRecorderRef.current.stop();
}
cleanupAudio();
};
}, [clearTimer, stopWaveLoop, cleanupAudio]);
const getAudioBlob = useCallback((): Blob | null => {
if (chunksRef.current.length === 0) return null;
const mimeType = mediaRecorderRef.current?.mimeType ?? "audio/webm;codecs=opus";
return new Blob(chunksRef.current, { type: mimeType });
}, []);
const interpolateCompressWaves = useCallback((targetLength: number) => {
if (targetLength <= 0) return [];
if (waves.length === 0) return Array(targetLength).fill(0);
if (waves.length === targetLength) return waves;
if (waves.length > targetLength) {
const compressed: number[] = [];
const bucketSize = waves.length / targetLength;
for (let i = 0; i < targetLength; i++) {
const start = Math.floor(i * bucketSize);
const end = Math.max(start + 1, Math.floor((i + 1) * bucketSize));
let max = 0;
for (let j = start; j < end && j < waves.length; j++) {
if (waves[j] > max) max = waves[j];
}
compressed.push(max);
}
return compressed;
}
if (targetLength === 1) return [waves[0]];
const stretched: number[] = [];
const lastSourceIndex = waves.length - 1;
for (let i = 0; i < targetLength; i++) {
const position = (i * lastSourceIndex) / (targetLength - 1);
const left = Math.floor(position);
const right = Math.min(Math.ceil(position), lastSourceIndex);
if (left === right) {
stretched.push(waves[left]);
continue;
}
const t = position - left;
const value = waves[left] * (1 - t) + waves[right] * t;
stretched.push(value);
}
return stretched;
}, [waves]);
return {
isRecording,
isPaused,
duration,
waves,
start,
stop,
pause,
play,
error,
getAudioBlob,
interpolateCompressWaves,
};
}

View File

@@ -9,6 +9,7 @@ import { AttachmentError } from "../AttachmentError/AttachmentError";
import { MessageAvatar } from "./MessageAvatar"; import { MessageAvatar } from "./MessageAvatar";
import { MessageProps } from "../Messages/Message"; import { MessageProps } from "../Messages/Message";
import { MessageCall } from "./MessageCall"; import { MessageCall } from "./MessageCall";
import { MessageVoice } from "./MessageVoice";
export interface MessageAttachmentsProps { export interface MessageAttachmentsProps {
attachments: Attachment[]; attachments: Attachment[];
@@ -54,6 +55,8 @@ export function MessageAttachments(props: MessageAttachmentsProps) {
return <MessageAvatar {...attachProps} key={index}></MessageAvatar> return <MessageAvatar {...attachProps} key={index}></MessageAvatar>
case AttachmentType.CALL: case AttachmentType.CALL:
return <MessageCall {...attachProps} key={index}></MessageCall> return <MessageCall {...attachProps} key={index}></MessageCall>
case AttachmentType.VOICE:
return <MessageVoice {...attachProps} key={index}></MessageVoice>
default: default:
return <AttachmentError key={index}></AttachmentError>; return <AttachmentError key={index}></AttachmentError>;
} }

View File

@@ -13,9 +13,7 @@ export function MessageReplyMessages(props: AttachmentProps) {
('showAlertInReplyMessages', true); ('showAlertInReplyMessages', true);
const [bgInReplyMessages] = useSetting<string> const [bgInReplyMessages] = useSetting<string>
('bgInReplyMessages', ''); ('bgInReplyMessages', '');
const reply = JSON.parse(props.attachment.blob); const reply = JSON.parse(props.attachment.blob) as MessageReply[];
console.info("Mreply", reply);
const closeAlert = () => { const closeAlert = () => {
modals.openConfirmModal({ modals.openConfirmModal({

View File

@@ -0,0 +1,267 @@
import { DownloadStatus, useAttachment } from "@/app/providers/AttachmentProvider/useAttachment";
import { AttachmentProps } from "./MessageAttachments";
import { Avatar, Box, Flex, Text, useMantineTheme } from "@mantine/core";
import { AnimatedRoundedProgress } from "../AnimatedRoundedProgress/AnimatedRoundedProgress";
import { IconArrowDown, IconPlayerPauseFilled, IconPlayerPlayFilled, IconX } from "@tabler/icons-react";
import { DeliveredMessageState } from "@/app/providers/DialogProvider/DialogProvider";
import { useRosettaColors } from "@/app/hooks/useRosettaColors";
import { useMemo, useRef } from "react";
import { usePlayerContext } from "@/app/providers/PlayerProvider/usePlayerContext";
const WAVE_BARS = 40;
const BAR_WIDTH = 2;
const BAR_GAP = 2;
const MIN_BAR_HEIGHT = 4;
const MAX_BAR_HEIGHT = 24;
function normalizeWaves(source: number[], targetLength: number): number[] {
if (targetLength <= 0) return [];
if (source.length === 0) return Array(targetLength).fill(0);
if (source.length === targetLength) return source;
if (source.length > targetLength) {
const compressed: number[] = [];
const bucketSize = source.length / targetLength;
for (let i = 0; i < targetLength; i++) {
const start = Math.floor(i * bucketSize);
const end = Math.max(start + 1, Math.floor((i + 1) * bucketSize));
let max = 0;
for (let j = start; j < end && j < source.length; j++) {
if (source[j] > max) max = source[j];
}
compressed.push(max);
}
return compressed;
}
if (targetLength === 1) return [source[0]];
const stretched: number[] = [];
const lastSourceIndex = source.length - 1;
for (let i = 0; i < targetLength; i++) {
const position = (i * lastSourceIndex) / (targetLength - 1);
const left = Math.floor(position);
const right = Math.min(Math.ceil(position), lastSourceIndex);
if (left === right) {
stretched.push(source[left]);
continue;
}
const t = position - left;
stretched.push(source[left] * (1 - t) + source[right] * t);
}
return stretched;
}
function formatTime(seconds: number) {
const s = Math.max(0, Math.floor(seconds));
const m = Math.floor(s / 60).toString().padStart(2, "0");
const r = (s % 60).toString().padStart(2, "0");
return `${m}:${r}`;
}
export function MessageVoice(props: AttachmentProps) {
const { downloadPercentage, downloadStatus, uploadedPercentage, download, getPreview } = useAttachment(
props.attachment,
props.parent,
);
const theme = useMantineTheme();
const colors = useRosettaColors();
const preview = getPreview() || "";
const [durationPart = "0", wavesPart = ""] = preview.split("::");
const previewDuration = Number.parseInt(durationPart, 10) || 0;
const rawWaves = useMemo(
() =>
wavesPart
.split(",")
.map((s) => Number.parseFloat(s))
.filter((n) => Number.isFinite(n) && n >= 0),
[wavesPart]
);
const waves = useMemo(() => normalizeWaves(rawWaves, WAVE_BARS), [rawWaves]);
const peak = useMemo(() => {
const max = Math.max(...waves, 0);
return max > 0 ? max : 1;
}, [waves]);
const isUploading =
props.delivered === DeliveredMessageState.WAITING &&
uploadedPercentage > 0 &&
uploadedPercentage < 100;
const error = downloadStatus === DownloadStatus.ERROR;
const waveformWidth = WAVE_BARS * BAR_WIDTH + (WAVE_BARS - 1) * BAR_GAP;
const waveformRef = useRef<HTMLDivElement | null>(null);
const {
playAudio,
pause,
duration: currentDuration,
playing,
setDuration,
totalDuration,
currentMessageId,
} = usePlayerContext();
const messageId = String((props.parent as any)?.id ?? (props.attachment as any)?.messageId ?? props.attachment.id);
const isCurrentTrack = currentMessageId === messageId;
const fullDuration = Math.max(isCurrentTrack && totalDuration > 0 ? totalDuration : previewDuration, 1);
const safeCurrent = isCurrentTrack ? currentDuration : 0;
const playbackProgress = Math.max(0, Math.min(1, safeCurrent / fullDuration));
const createAudioBlob = () => new Blob([Buffer.from(props.attachment.blob, "hex")], { type: "audio/webm;codecs=opus" });
const ensureStarted = (seekToSec?: number) => {
const blob = createAudioBlob();
playAudio("Voice Message", "", blob, messageId);
if (typeof seekToSec === "number") {
requestAnimationFrame(() => setDuration(seekToSec));
}
};
const handleMainAction = () => {
if (error) return;
if (downloadStatus !== DownloadStatus.DOWNLOADED) {
download();
return;
}
if (!isCurrentTrack) {
ensureStarted();
return;
}
if (playing) {
pause();
return;
}
ensureStarted(Math.max(0, safeCurrent));
};
const handleSeek = (e: React.MouseEvent<HTMLDivElement>) => {
if (error || downloadStatus !== DownloadStatus.DOWNLOADED) return;
const rect = waveformRef.current?.getBoundingClientRect();
if (!rect || rect.width <= 0) return;
const x = e.clientX - rect.left;
const progress = Math.max(0, Math.min(1, x / rect.width));
const seekTo = progress * fullDuration;
if (!isCurrentTrack) {
ensureStarted(seekTo);
return;
}
setDuration(seekTo);
};
const timeText =
isCurrentTrack && safeCurrent > 0
? `-${formatTime(Math.max(0, fullDuration - safeCurrent))}`
: formatTime(fullDuration);
return (
<Flex gap="sm" align="center">
<Avatar
bg={error ? colors.error : colors.brandColor}
size={40}
style={{ cursor: "pointer", position: "relative" }}
onClick={handleMainAction}
>
{!error && (
<>
{downloadStatus === DownloadStatus.DOWNLOADING && (
<div style={{ position: "absolute", top: 0, left: 0 }}>
<AnimatedRoundedProgress size={40} value={Math.max(1, downloadPercentage)} />
</div>
)}
{isUploading && (
<div style={{ position: "absolute", top: 0, left: 0 }}>
<AnimatedRoundedProgress color="#fff" size={40} value={uploadedPercentage} />
</div>
)}
{downloadStatus !== DownloadStatus.DOWNLOADED && <IconArrowDown color="white" size={22} />}
{downloadStatus === DownloadStatus.DOWNLOADED && !isUploading &&
(isCurrentTrack && playing ? (
<IconPlayerPauseFilled color="white" size={22} />
) : (
<IconPlayerPlayFilled color="white" size={22} />
))}
</>
)}
{(error || isUploading) && <IconX color="white" size={22} />}
</Avatar>
<Flex direction="column">
<Box
ref={waveformRef}
w={waveformWidth}
h={32}
onClick={handleSeek}
style={{ overflow: "hidden", cursor: "pointer" }}
>
<Flex h="100%" align="center" gap={BAR_GAP} wrap="nowrap">
{waves.map((value, index) => {
const normalized = Math.max(0, Math.min(1, value / peak));
const height = Math.max(
MIN_BAR_HEIGHT,
Math.min(MAX_BAR_HEIGHT, MIN_BAR_HEIGHT + normalized * (MAX_BAR_HEIGHT - MIN_BAR_HEIGHT))
);
const passed = playbackProgress * waves.length - index;
const fillPercent = Math.max(0, Math.min(1, passed));
const inactiveColor = theme.colors.gray[4];
const activeColor = colors.brandColor;
let background = inactiveColor;
if (fillPercent >= 1) {
background = activeColor;
} else if (fillPercent > 0) {
background = `linear-gradient(90deg, ${activeColor} 0%, ${activeColor} ${fillPercent * 100}%, ${inactiveColor} ${fillPercent * 100}%, ${inactiveColor} 100%)`;
}
return (
<Box
key={index}
w={BAR_WIDTH}
h={height}
style={{
flex: `0 0 ${BAR_WIDTH}px`,
borderRadius: 999,
background,
}}
/>
);
})}
</Flex>
</Box>
<Text size="xs" c="dimmed">
{timeText}
</Text>
</Flex>
</Flex>
);
}

View File

@@ -42,6 +42,20 @@ export function TextParser(props: TextParserProps) {
const theme = useMantineTheme(); const theme = useMantineTheme();
let entityCount = 0; let entityCount = 0;
const UNICODE_EMOJI_SEQUENCE_REGEX =
/(?:\p{Regional_Indicator}{2}|[0-9#*]\uFE0F?\u20E3|\p{Extended_Pictographic}(?:\uFE0F|\uFE0E)?(?:\p{Emoji_Modifier})?(?:\u200D\p{Extended_Pictographic}(?:\uFE0F|\uFE0E)?(?:\p{Emoji_Modifier})?)*)/u;
const UNICODE_EMOJI_SEQUENCE_REGEX_GLOBAL = new RegExp(
UNICODE_EMOJI_SEQUENCE_REGEX.source,
"gu"
);
const toUnified = (value: string): string =>
Array.from(value)
.map((ch) => ch.codePointAt(0)?.toString(16))
.filter(Boolean)
.join("-");
const formatRules : FormatRule[] = [ const formatRules : FormatRule[] = [
{ {
pattern: [ pattern: [
@@ -120,17 +134,20 @@ export function TextParser(props: TextParserProps) {
} }
}, },
{ {
// unicode emojis // unicode emojis (including composite sequences)
pattern: [/\p{Emoji_Presentation}/u], pattern: [UNICODE_EMOJI_SEQUENCE_REGEX],
render: (match: string) => { render: (match: string) => {
let textWithoutEmojis = props.text.replace(/\p{Emoji_Presentation}/gu, ''); const textWithoutEmojis = props.text.replace(UNICODE_EMOJI_SEQUENCE_REGEX_GLOBAL, "");
if(textWithoutEmojis.length <= (props.oversizeIfTextSmallerThan ?? 0)) { const unified = toUnified(match);
return <Emoji size={40} unified={match.codePointAt(0)?.toString(16) || ''}></Emoji>;
if (textWithoutEmojis.length <= (props.oversizeIfTextSmallerThan ?? 0)) {
return <Emoji size={40} unified={unified}></Emoji>;
} }
return <Emoji unified={match.codePointAt(0)?.toString(16) || ''}></Emoji>;
return <Emoji unified={unified}></Emoji>;
}, },
flush: (match: string) => { flush: (match: string) => {
return <Emoji unified={match.codePointAt(0)?.toString(16) || ''}></Emoji>; return <Emoji unified={toUnified(match)}></Emoji>;
} }
}, },
{ {

View File

@@ -0,0 +1,278 @@
import { Box, Flex, Text, useMantineTheme } from "@mantine/core";
import { useEffect, useRef, useState } from "react";
interface VoiceRecorderProps {
duration: number;
waves: number[];
}
type AnimatedBar = {
id: number;
value: number;
entered: boolean;
};
const VISIBLE_BARS = 50;
const BAR_WIDTH = 3;
const BAR_GAP = 2;
const STEP_PX = BAR_WIDTH + BAR_GAP;
const COMPONENT_HEIGHT = 45;
const MAX_BAR_HEIGHT = 28;
const MIN_BAR_HEIGHT = 4;
export function VoiceRecorder(props: VoiceRecorderProps) {
const theme = useMantineTheme();
const [bars, setBars] = useState<AnimatedBar[]>([]);
const [subShift, setSubShift] = useState(0);
const prevLengthRef = useRef(0);
const prevWavesRef = useRef<number[]>([]);
const idRef = useRef(0);
const enterFrameRef = useRef<number | null>(null);
const scrollFrameRef = useRef<number | null>(null);
const lastAppendAtRef = useRef<number | null>(null);
const appendIntervalRef = useRef(120);
const barsLengthRef = useRef(0);
const formatDuration = (seconds: number) => {
const mins = Math.floor(seconds / 60)
.toString()
.padStart(2, "0");
const secs = (seconds % 60).toString().padStart(2, "0");
return `${mins}:${secs}`;
};
useEffect(() => {
barsLengthRef.current = bars.length;
}, [bars.length]);
useEffect(() => {
if (props.waves.length === 0) {
setBars([]);
setSubShift(0);
prevLengthRef.current = 0;
prevWavesRef.current = [];
lastAppendAtRef.current = null;
return;
}
if (props.waves.length < prevLengthRef.current) {
const resetBars = props.waves.slice(-VISIBLE_BARS).map((value) => ({
id: idRef.current++,
value,
entered: true,
}));
setBars(resetBars);
setSubShift(0);
prevLengthRef.current = props.waves.length;
prevWavesRef.current = props.waves;
lastAppendAtRef.current = performance.now();
return;
}
const prevWaves = prevWavesRef.current;
let appended: number[] = [];
// Обычный режим: длина выросла
if (props.waves.length > prevLengthRef.current) {
appended = props.waves.slice(prevLengthRef.current);
} else if (props.waves.length === prevLengthRef.current && props.waves.length > 0) {
// Rolling buffer: длина та же, но данные сдвигаются
let changed = false;
if (prevWaves.length !== props.waves.length) {
changed = true;
} else {
for (let i = 0; i < props.waves.length; i++) {
if (props.waves[i] !== prevWaves[i]) {
changed = true;
break;
}
}
}
if (changed) {
appended = [props.waves[props.waves.length - 1]];
}
}
if (appended.length > 0) {
const now = performance.now();
if (lastAppendAtRef.current != null) {
const dt = now - lastAppendAtRef.current;
const perBar = dt / appended.length;
appendIntervalRef.current = appendIntervalRef.current * 0.7 + perBar * 0.3;
}
lastAppendAtRef.current = now;
setSubShift(0);
const newIds: number[] = [];
setBars((prev) => {
const next = [...prev];
appended.forEach((value) => {
const id = idRef.current++;
newIds.push(id);
next.push({
id,
value,
entered: false,
});
});
return next.slice(-VISIBLE_BARS);
});
if (enterFrameRef.current) {
cancelAnimationFrame(enterFrameRef.current);
}
enterFrameRef.current = requestAnimationFrame(() => {
setBars((prev) => {
const ids = new Set(newIds);
return prev.map((bar) => (ids.has(bar.id) ? { ...bar, entered: true } : bar));
});
});
}
prevLengthRef.current = props.waves.length;
prevWavesRef.current = props.waves;
}, [props.waves]);
useEffect(() => {
const tick = () => {
const startedAt = lastAppendAtRef.current;
if (startedAt != null) {
const elapsed = performance.now() - startedAt;
const interval = Math.max(16, appendIntervalRef.current);
const progress = Math.min(1, elapsed / interval);
const smoothShift = barsLengthRef.current >= VISIBLE_BARS ? -progress * STEP_PX : 0;
setSubShift(smoothShift);
} else {
setSubShift(0);
}
scrollFrameRef.current = requestAnimationFrame(tick);
};
scrollFrameRef.current = requestAnimationFrame(tick);
return () => {
if (scrollFrameRef.current) {
cancelAnimationFrame(scrollFrameRef.current);
}
};
}, []);
useEffect(() => {
return () => {
if (enterFrameRef.current) {
cancelAnimationFrame(enterFrameRef.current);
}
if (scrollFrameRef.current) {
cancelAnimationFrame(scrollFrameRef.current);
}
};
}, []);
const waveformWidth = VISIBLE_BARS * BAR_WIDTH + (VISIBLE_BARS - 1) * BAR_GAP;
return (
<Flex
direction="row"
h={COMPONENT_HEIGHT}
mih={COMPONENT_HEIGHT}
mah={COMPONENT_HEIGHT}
align="center"
justify="center"
gap="xs"
px={6}
>
<Text size="xs" c="dimmed" w={36}>
{formatDuration(props.duration)}
</Text>
<Box
w={waveformWidth}
h={COMPONENT_HEIGHT}
style={{
overflow: "hidden",
}}
>
<Flex
h="100%"
align="center"
gap={BAR_GAP}
wrap="nowrap"
style={{ transform: `translateX(${subShift}px)` }}
>
{Array.from({ length: VISIBLE_BARS }).map((_, index) => {
const bar = bars[index];
if (!bar) {
return (
<Box
key={`empty-${index}`}
w={BAR_WIDTH}
h={MIN_BAR_HEIGHT}
style={{
flex: `0 0 ${BAR_WIDTH}px`,
borderRadius: 999,
background: theme.colors.gray[3],
opacity: 0.22,
}}
/>
);
}
const normalized = Math.max(0, Math.min(1, bar.value));
const height = Math.max(
MIN_BAR_HEIGHT,
Math.min(MAX_BAR_HEIGHT, MIN_BAR_HEIGHT + normalized * (MAX_BAR_HEIGHT - MIN_BAR_HEIGHT))
);
const isLast = index === bars.length - 1;
const isNearTail = index >= bars.length - 3;
return (
<Box
key={bar.id}
w={BAR_WIDTH}
h={height}
style={{
flex: `0 0 ${BAR_WIDTH}px`,
alignSelf: "center",
borderRadius: 999,
background: isLast
? `linear-gradient(180deg, ${theme.colors.blue[3]} 0%, ${theme.colors.blue[5]} 100%)`
: `linear-gradient(180deg, ${theme.colors.blue[4]} 0%, ${theme.colors.blue[6]} 100%)`,
boxShadow: isLast
? `0 0 10px ${theme.colors.blue[4]}55`
: isNearTail
? `0 0 6px ${theme.colors.blue[4]}22`
: "none",
transform: bar.entered ? "scaleY(1)" : "scaleY(0.18)",
transformOrigin: "center center",
transition: [
"height 260ms cubic-bezier(0.2, 0.8, 0.2, 1)",
"transform 260ms cubic-bezier(0.2, 0.8, 0.2, 1)",
"opacity 220ms ease",
"box-shadow 220ms ease",
].join(", "),
willChange: "height, transform, opacity",
}}
/>
);
})}
</Flex>
</Box>
</Flex>
);
}

View File

@@ -84,6 +84,13 @@ export function CallProvider(props : CallProviderProps) {
const mutedRef = useRef<boolean>(false); const mutedRef = useRef<boolean>(false);
const soundRef = useRef<boolean>(true); const soundRef = useRef<boolean>(true);
const {sendMessage} = useDeattachedSender(); const {sendMessage} = useDeattachedSender();
const hasRemoteTrackRef = useRef<boolean>(false);
/**
* Используются для входа в звонок
*/
const callSessionIdRef = useRef<string>("");
const callTokenRef = useRef<string>("");
const {playSound, stopSound, stopLoopSound} = useSound(); const {playSound, stopSound, stopLoopSound} = useSound();
const {setWindowPriority} = useWindow(); const {setWindowPriority} = useWindow();
@@ -169,7 +176,6 @@ export function CallProvider(props : CallProviderProps) {
* Другая сторона отправила нам ICE кандидата для установления WebRTC соединения * Другая сторона отправила нам ICE кандидата для установления WebRTC соединения
*/ */
const candidate = JSON.parse(packet.getSdpOrCandidate()); const candidate = JSON.parse(packet.getSdpOrCandidate());
console.info(candidate);
if(peerConnectionRef.current?.remoteDescription == null){ if(peerConnectionRef.current?.remoteDescription == null){
/** /**
* Удаленное описание еще не установлено, буферизуем кандидата, чтобы добавить его после установки удаленного описания * Удаленное описание еще не установлено, буферизуем кандидата, чтобы добавить его после установки удаленного описания
@@ -210,16 +216,14 @@ export function CallProvider(props : CallProviderProps) {
openCallsModal("The connection with the user was lost. The call has ended.") openCallsModal("The connection with the user was lost. The call has ended.")
end(); end();
} }
if(activeCall){ if(signalType == SignalType.RINGING_TIMEOUT) {
/** /**
* У нас уже есть активный звонок, игнорируем все сигналы, кроме сигналов от текущего звонка * Другой стороне был отправлен сигнал звонка, но она не ответила на него в течении определенного времени
*/ */
if(packet.getSrc() != activeCall && packet.getSrc() != publicKey){ openCallsModal("The user did not answer the call in time. Please try again later.");
console.info("Received signal from " + packet.getSrc() + " but active call is with " + activeCall + ", ignoring"); end();
info("Received signal for another call, ignoring");
return; return;
} }
}
if(signalType == SignalType.END_CALL){ if(signalType == SignalType.END_CALL){
/** /**
* Сбросили звонок * Сбросили звонок
@@ -231,52 +235,24 @@ export function CallProvider(props : CallProviderProps) {
/** /**
* Нам поступает звонок * Нам поступает звонок
*/ */
if(callState != CallState.ENDED){
/**
* У нас уже есть активный звонок, отправляем сигнал другой стороне, что линия занята
*/
return;
}
callSessionIdRef.current = packet.getCallId();
callTokenRef.current = packet.getJoinToken();
setWindowPriority(true); setWindowPriority(true);
playSound("ringtone.mp3", true); playSound("ringtone.mp3", true);
setActiveCall(packet.getSrc()); setActiveCall(packet.getSrc());
setCallState(CallState.INCOMING); setCallState(CallState.INCOMING);
setShowCallView(true); setShowCallView(true);
} }
if(signalType == SignalType.KEY_EXCHANGE && roleRef.current == CallRole.CALLER){ if(signalType == SignalType.KEY_EXCHANGE){
console.info("EXCHANGE SIGNAL RECEIVED, CALLER ROLE");
/**
* Другая сторона сгенерировала ключи для сессии и отправила нам публичную часть,
* теперь мы можем создать общую секретную сессию для шифрования звонка
*/
const sharedPublic = packet.getSharedPublic();
if(!sharedPublic){
info("Received key exchange signal without shared public key");
return;
}
const sessionKeys = generateSessionKeys();
const computedSharedSecret = nacl.box.before(Buffer.from(sharedPublic, 'hex'), sessionKeys.secretKey);
sharedSecretRef.current = Buffer.from(computedSharedSecret).toString('hex');
info("Generated shared secret for call session: " + sharedSecretRef.current);
/**
* Нам нужно отправить свой публичный ключ другой стороне, чтобы она тоже могла создать общую секретную сессию
*/
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(packet.getSrc());
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(sessionKeys.publicKey).toString('hex'));
send(signalPacket);
setCallState(CallState.WEB_RTC_EXCHANGE);
/**
* Создаем комнату на сервере SFU, комнату создает звонящий
*/
let webRtcSignal = new PacketSignalPeer();
webRtcSignal.setSignalType(SignalType.CREATE_ROOM);
webRtcSignal.setSrc(publicKey);
webRtcSignal.setDst(packet.getSrc());
send(webRtcSignal);
}
if(signalType == SignalType.KEY_EXCHANGE && roleRef.current == CallRole.CALLEE){
console.info("EXCHANGE SIGNAL RECEIVED, CALLEE ROLE"); console.info("EXCHANGE SIGNAL RECEIVED, CALLEE ROLE");
/** /**
* Мы отправили свою публичную часть ключа другой стороне, * Другая сторона отправила нам ключи, теперь отправляем ей свои для генерации общего секрета
* теперь мы получили ее публичную часть и можем создать общую
* секретную сессию для шифрования звонка
*/ */
const sharedPublic = packet.getSharedPublic(); const sharedPublic = packet.getSharedPublic();
if(!sharedPublic){ if(!sharedPublic){
@@ -287,17 +263,53 @@ export function CallProvider(props : CallProviderProps) {
info("Received key exchange signal but session keys are not generated"); info("Received key exchange signal but session keys are not generated");
return; return;
} }
const computedSharedSecret = nacl.box.before(Buffer.from(sharedPublic, 'hex'), sessionKeys.secretKey); const computedSharedSecret = nacl.box.before(Buffer.from(sharedPublic, 'hex'), sessionKeys.secretKey);
sharedSecretRef.current = Buffer.from(computedSharedSecret).toString('hex'); sharedSecretRef.current = Buffer.from(computedSharedSecret).toString('hex');
info("Generated shared secret for call session: " + sharedSecretRef.current); info("Generated shared secret for call session: " + sharedSecretRef.current);
setCallState(CallState.WEB_RTC_EXCHANGE); setCallState(CallState.WEB_RTC_EXCHANGE);
}
if(signalType == SignalType.CREATE_ROOM) { if(roleRef.current == CallRole.CALLER){
/** /**
* Создана комната для обмена WebRTC потоками * Вызывающий уже отправил ключ, сессия сгенерирована, сообщаем серверу что звонок активен
*/ */
roomIdRef.current = packet.getRoomId(); const activeSignal = new PacketSignalPeer();
info("WebRTC room created with id: " + packet.getRoomId()); activeSignal.setSrc(publicKey);
activeSignal.setDst(activeCall);
activeSignal.setSignalType(SignalType.ACTIVE);
send(activeSignal);
return;
}
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall);
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(sessionKeys.publicKey).toString('hex'));
send(signalPacket);
}
if(signalType == SignalType.ACCEPT){
/**
* Другая сторона приняла наш звонок, комната на SFU создалась, нужно сгенерировать ключи
*/
const keys = generateSessionKeys();
const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall);
signalPacket.setSignalType(SignalType.KEY_EXCHANGE);
signalPacket.setSharedPublic(Buffer.from(keys.publicKey).toString('hex'));
send(signalPacket);
}
if(signalType == SignalType.ACTIVE) {
if(!sessionKeys){
/**
* Сервер может отправить CREATE_ROOM сигнал, даже если мы приняли звонок на другом устройстве, по этому проверяем,
* на этом ли устройстве звонок принят посредством проверки наличия сгенерированных ключей шифрования
*/
stopLoopSound();
stopSound();
end();
return;
}
/** /**
* Нужно отправить свой SDP оффер другой стороне, чтобы установить WebRTC соединение * Нужно отправить свой SDP оффер другой стороне, чтобы установить WebRTC соединение
*/ */
@@ -323,7 +335,11 @@ export function CallProvider(props : CallProviderProps) {
peerConnectionRef.current.onconnectionstatechange = () => { peerConnectionRef.current.onconnectionstatechange = () => {
console.info("Peer connection state changed: " + peerConnectionRef.current?.connectionState); console.info("Peer connection state changed: " + peerConnectionRef.current?.connectionState);
if(peerConnectionRef.current?.connectionState == "connected"){ if(peerConnectionRef.current?.connectionState == "connected"){
setCallState(CallState.ACTIVE); /**
* WebRTC соединение установлено, звонок активен, останавливаем все остальные звуки
* системы
*/
tryActivateCall();
info("WebRTC connection established, call is active"); info("WebRTC connection established, call is active");
} }
} }
@@ -338,7 +354,8 @@ export function CallProvider(props : CallProviderProps) {
* При получении медиа-трека с другой стороны * При получении медиа-трека с другой стороны
*/ */
if(remoteAudioRef.current && event.streams[0]){ if(remoteAudioRef.current && event.streams[0]){
console.info(event.streams); hasRemoteTrackRef.current = true;
tryActivateCall();
remoteAudioRef.current.srcObject = event.streams[0]; remoteAudioRef.current.srcObject = event.streams[0];
remoteAudioRef.current.muted = !soundRef.current; remoteAudioRef.current.muted = !soundRef.current;
void remoteAudioRef.current.play().catch((e) => { void remoteAudioRef.current.play().catch((e) => {
@@ -375,6 +392,15 @@ export function CallProvider(props : CallProviderProps) {
} }
}, [activeCall, sessionKeys, duration]); }, [activeCall, sessionKeys, duration]);
const tryActivateCall = () => {
if(hasRemoteTrackRef.current && peerConnectionRef.current?.connectionState == "connected"){
stopLoopSound();
stopSound();
setCallState(CallState.ACTIVE);
info("Call is now active");
}
}
const openCallsModal = (text : string) => { const openCallsModal = (text : string) => {
modals.open({ modals.open({
centered: true, centered: true,
@@ -384,7 +410,9 @@ export function CallProvider(props : CallProviderProps) {
{text} {text}
</Text> </Text>
<Flex align={'center'} justify={'flex-end'}> <Flex align={'center'} justify={'flex-end'}>
<Button color={'red'} variant={'subtle'} onClick={() => modals.closeAll()} mt="md"> <Button style={{
outline: 'none'
}} color={'red'} variant={'subtle'} onClick={() => modals.closeAll()} mt="md">
Close Close
</Button> </Button>
</Flex> </Flex>
@@ -426,14 +454,20 @@ export function CallProvider(props : CallProviderProps) {
const packetSignal = new PacketSignalPeer(); const packetSignal = new PacketSignalPeer();
packetSignal.setSrc(publicKey); packetSignal.setSrc(publicKey);
packetSignal.setDst(activeCall); packetSignal.setDst(activeCall);
packetSignal.setCallId(callSessionIdRef.current);
packetSignal.setJoinToken(callTokenRef.current);
packetSignal.setSignalType(SignalType.END_CALL); packetSignal.setSignalType(SignalType.END_CALL);
send(packetSignal); send(packetSignal);
end(); end();
} }
const end = () => { const end = () => {
stopLoopSound(); if(callState == CallState.ACTIVE){
stopSound(); /**
* Только если звонок был активен воспроизводим звуки
*/
playSound("end_call.mp3");
}
if (remoteAudioRef.current) { if (remoteAudioRef.current) {
remoteAudioRef.current.pause(); remoteAudioRef.current.pause();
remoteAudioRef.current.srcObject = null; remoteAudioRef.current.srcObject = null;
@@ -442,7 +476,6 @@ export function CallProvider(props : CallProviderProps) {
setDuration(0); setDuration(0);
durationIntervalRef.current && clearInterval(durationIntervalRef.current); durationIntervalRef.current && clearInterval(durationIntervalRef.current);
setWindowPriority(false); setWindowPriority(false);
playSound("end_call.mp3");
peerConnectionRef.current?.close(); peerConnectionRef.current?.close();
peerConnectionRef.current = null; peerConnectionRef.current = null;
roomIdRef.current = ""; roomIdRef.current = "";
@@ -455,6 +488,8 @@ export function CallProvider(props : CallProviderProps) {
setDuration(0); setDuration(0);
setMutedState(false); setMutedState(false);
setSoundState(true); setSoundState(true);
stopLoopSound();
stopSound();
roleRef.current = null; roleRef.current = null;
} }
@@ -491,15 +526,22 @@ export function CallProvider(props : CallProviderProps) {
stopLoopSound(); stopLoopSound();
stopSound(); stopSound();
/** /**
* Звонок принят, генерируем ключи для сессии и отправляем их другой стороне для установления защищенного канала связи * Звонок принят, генерируем свой ключ для будующего обмена
*/
generateSessionKeys();
/**
* Отправляем сигнал что звонок принят другой стороне, чтобы она могла начать обмен ключами и установку соединения
*/ */
const keys = generateSessionKeys();
const signalPacket = new PacketSignalPeer(); const signalPacket = new PacketSignalPeer();
signalPacket.setSrc(publicKey); signalPacket.setSrc(publicKey);
signalPacket.setDst(activeCall); signalPacket.setDst(activeCall);
signalPacket.setSignalType(SignalType.KEY_EXCHANGE); signalPacket.setCallId(callSessionIdRef.current);
signalPacket.setSharedPublic(Buffer.from(keys.publicKey).toString('hex')); signalPacket.setJoinToken(callTokenRef.current);
signalPacket.setSignalType(SignalType.ACCEPT);
send(signalPacket); send(signalPacket);
/**
* Устанавливаем состояние звонка и стадию обмена ключами
*/
setCallState(CallState.KEY_EXCHANGE); setCallState(CallState.KEY_EXCHANGE);
roleRef.current = CallRole.CALLEE; roleRef.current = CallRole.CALLEE;
} }

View File

@@ -238,6 +238,10 @@ export function DialogProvider(props: DialogProviderProps) {
* Если это групповое сообщение, то получаем ключ группы * Если это групповое сообщение, то получаем ключ группы
*/ */
decryptKey = await getGroupKey(props.dialog); decryptKey = await getGroupKey(props.dialog);
/**
* Приводим к HEX так как этого требует формат расшифровки вложений в приложении
*/
decryptKey = Buffer.from(decryptKey).toString('hex');
} }
if(!message.from_me && !hasGroup(props.dialog)){ if(!message.from_me && !hasGroup(props.dialog)){
/** /**
@@ -569,7 +573,7 @@ export function DialogProvider(props: DialogProviderProps) {
content: content, content: content,
timestamp: timestamp, timestamp: timestamp,
readed: 0, readed: 0,
chacha_key: groupKey, chacha_key: Buffer.from(groupKey).toString('hex'),
from_me: 1, from_me: 1,
plain_message: decryptedContent, plain_message: decryptedContent,
delivered: DeliveredMessageState.DELIVERED, delivered: DeliveredMessageState.DELIVERED,
@@ -723,7 +727,7 @@ export function DialogProvider(props: DialogProviderProps) {
content: content, content: content,
timestamp: timestamp, timestamp: timestamp,
readed: idle ? 0 : 1, readed: idle ? 0 : 1,
chacha_key: groupKey, chacha_key: Buffer.from(groupKey).toString('hex'),
from_me: fromPublicKey == publicKey ? 1 : 0, from_me: fromPublicKey == publicKey ? 1 : 0,
plain_message: decryptedContent, plain_message: decryptedContent,
delivered: DeliveredMessageState.DELIVERED, delivered: DeliveredMessageState.DELIVERED,

View File

@@ -146,6 +146,7 @@ export function useDialog() : {
} }
console.info("Sending key for message ", key.toString('hex')); console.info("Sending key for message ", key.toString('hex'));
console.info(attachemnts);
let preparedToNetworkSendAttachements : Attachment[] = await prepareAttachmentsToSend(messageId, dialog, key.toString('hex'), attachemnts); let preparedToNetworkSendAttachements : Attachment[] = await prepareAttachmentsToSend(messageId, dialog, key.toString('hex'), attachemnts);
if(attachemnts.length <= 0 && message.trim() == ""){ if(attachemnts.length <= 0 && message.trim() == ""){
runQuery("UPDATE messages SET delivered = ? WHERE message_id = ?", [DeliveredMessageState.ERROR, messageId]); runQuery("UPDATE messages SET delivered = ? WHERE message_id = ?", [DeliveredMessageState.ERROR, messageId]);

View File

@@ -140,7 +140,7 @@ export function useDialogFiber() {
content: content, content: content,
timestamp: timestamp, timestamp: timestamp,
readed: idle ? 0 : 1, readed: idle ? 0 : 1,
chacha_key: groupKey, chacha_key: Buffer.from(groupKey).toString('hex'),
from_me: fromPublicKey == publicKey ? 1 : 0, from_me: fromPublicKey == publicKey ? 1 : 0,
plain_message: decryptedContent, plain_message: decryptedContent,
delivered: DeliveredMessageState.DELIVERED, delivered: DeliveredMessageState.DELIVERED,

View File

@@ -52,6 +52,7 @@ export function useReplyMessages() {
*/ */
return; return;
} }
replyMessages.messages.push(message);
const sortedByTime = replyMessages.messages.sort((a, b) => a.timestamp - b.timestamp); const sortedByTime = replyMessages.messages.sort((a, b) => a.timestamp - b.timestamp);
setReplyMessages({ setReplyMessages({
publicKey: dialog, publicKey: dialog,

View File

@@ -0,0 +1,300 @@
import { createContext, useEffect, useRef, useState } from "react";
export interface PlayerContextValue {
playAudio: (
artist: string,
title: string,
audio: string | Blob | File,
messageId?: string | null
) => void;
playing: boolean;
pause: () => void;
resume: () => void;
stop: () => void;
setDuration: (duration: number) => void;
duration: number;
totalDuration: number;
currentMessageId: string | null;
lastMessageId: string | null;
lastError: string | null;
}
export const PlayerContext = createContext<PlayerContextValue | null>(null);
interface PlayerProviderProps {
children: React.ReactNode;
}
export function PlayerProvider(props: PlayerProviderProps) {
const audioRef = useRef<HTMLAudioElement>(null);
const objectUrlRef = useRef<string | null>(null);
const rafTimeUpdateRef = useRef<number | null>(null);
const isLoadingRef = useRef(false);
const isSeekingRef = useRef(false);
const durationRef = useRef(0);
const totalDurationRef = useRef(0);
const isPlayingRef = useRef(false);
const [isPlaying, setIsPlaying] = useState(false);
const [duration, setDurationState] = useState(0);
const [totalDuration, setTotalDuration] = useState(0);
const [lastError, setLastError] = useState<string | null>(null);
const [currentMessageId, setCurrentMessageId] = useState<string | null>(null);
const [lastMessageId, setLastMessageId] = useState<string | null>(null);
const commitPlaying = (next: boolean) => {
if (isPlayingRef.current === next) return;
isPlayingRef.current = next;
setIsPlaying(next);
};
const commitDuration = (next: number) => {
const safe = Number.isFinite(next) && next >= 0 ? next : 0;
if (Math.abs(safe - durationRef.current) < 0.033) return;
durationRef.current = safe;
setDurationState(safe);
};
const commitTotalDuration = (next: number) => {
const safe = Number.isFinite(next) && next > 0 ? next : 0;
if (Math.abs(safe - totalDurationRef.current) < 0.05) return;
totalDurationRef.current = safe;
setTotalDuration(safe);
};
const decodeMediaError = (err: MediaError | null) => {
if (!err) return "Unknown media error";
switch (err.code) {
case MediaError.MEDIA_ERR_ABORTED:
return "Playback aborted";
case MediaError.MEDIA_ERR_NETWORK:
return "Network error while loading audio";
case MediaError.MEDIA_ERR_DECODE:
return "Audio decode error";
case MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED:
return "Audio source is not supported";
default:
return `Unknown media error (${err.code})`;
}
};
useEffect(() => {
const audio = audioRef.current;
if (!audio) return;
const onPlay = () => {
if (isLoadingRef.current) return;
commitPlaying(true);
};
const onPause = () => {
if (isLoadingRef.current) return;
commitPlaying(false);
};
const onEnded = () => {
commitPlaying(false);
durationRef.current = 0;
setDurationState(0);
setCurrentMessageId(null);
};
const onTimeUpdate = () => {
if (isLoadingRef.current) return;
if (isSeekingRef.current) return;
if (rafTimeUpdateRef.current != null) return;
rafTimeUpdateRef.current = requestAnimationFrame(() => {
rafTimeUpdateRef.current = null;
if (!isLoadingRef.current && !isSeekingRef.current) {
commitDuration(audio.currentTime || 0);
}
});
};
const onLoadedMetadata = () => commitTotalDuration(audio.duration);
const onDurationChange = () => commitTotalDuration(audio.duration);
const onSeeked = () => {
if (isSeekingRef.current) {
isSeekingRef.current = false;
if (!isLoadingRef.current) commitDuration(audio.currentTime || 0);
return;
}
if (isLoadingRef.current) return;
commitDuration(audio.currentTime || 0);
};
const onCanPlay = () => {
if (isLoadingRef.current) isLoadingRef.current = false;
};
const onError = (_e: Event) => {
const message = decodeMediaError(audio.error);
setLastError(message);
console.error("Audio playback error", {
message,
mediaError: audio.error,
currentSrc: audio.currentSrc,
readyState: audio.readyState,
networkState: audio.networkState,
});
};
audio.addEventListener("play", onPlay);
audio.addEventListener("pause", onPause);
audio.addEventListener("ended", onEnded);
audio.addEventListener("timeupdate", onTimeUpdate);
audio.addEventListener("loadedmetadata", onLoadedMetadata);
audio.addEventListener("durationchange", onDurationChange);
audio.addEventListener("seeked", onSeeked);
audio.addEventListener("canplay", onCanPlay);
audio.addEventListener("error", onError);
return () => {
audio.removeEventListener("play", onPlay);
audio.removeEventListener("pause", onPause);
audio.removeEventListener("ended", onEnded);
audio.removeEventListener("timeupdate", onTimeUpdate);
audio.removeEventListener("loadedmetadata", onLoadedMetadata);
audio.removeEventListener("durationchange", onDurationChange);
audio.removeEventListener("seeked", onSeeked);
audio.removeEventListener("canplay", onCanPlay);
audio.removeEventListener("error", onError);
if (rafTimeUpdateRef.current != null) {
cancelAnimationFrame(rafTimeUpdateRef.current);
rafTimeUpdateRef.current = null;
}
};
}, []);
useEffect(() => {
return () => {
if (objectUrlRef.current) {
URL.revokeObjectURL(objectUrlRef.current);
objectUrlRef.current = null;
}
};
}, []);
const playAudio = (
artist: string,
title: string,
audio: string | Blob | File,
messageId?: string | null
) => {
const el = audioRef.current;
if (!el) return;
// чтобы не было warning о неиспользуемых args при строгих правилах
void artist;
void title;
setLastError(null);
if (objectUrlRef.current) {
URL.revokeObjectURL(objectUrlRef.current);
objectUrlRef.current = null;
}
const audioSrc = typeof audio === "string" ? audio : URL.createObjectURL(audio);
if (typeof audio !== "string") {
objectUrlRef.current = audioSrc;
}
isLoadingRef.current = true;
isSeekingRef.current = false;
el.src = audioSrc;
durationRef.current = 0;
const msgId = messageId ?? null;
setCurrentMessageId(msgId);
if (msgId) setLastMessageId(msgId);
isPlayingRef.current = true;
setIsPlaying(true);
const prevDuration = durationRef.current;
requestAnimationFrame(() => {
if (durationRef.current === prevDuration) {
setDurationState(0);
}
});
void el.play().catch((err) => {
isLoadingRef.current = false;
commitPlaying(false);
setLastError(err instanceof Error ? err.message : "play() failed");
});
};
const pause = () => {
const el = audioRef.current;
if (!el) return;
el.pause();
};
const resume = () => {
const el = audioRef.current;
if (!el) return;
commitPlaying(true);
void el.play().catch((err) => {
commitPlaying(false);
setLastError(err instanceof Error ? err.message : "resume() failed");
});
};
const stop = () => {
const el = audioRef.current;
if (!el) return;
isLoadingRef.current = true;
el.pause();
el.currentTime = 0;
isLoadingRef.current = false;
durationRef.current = 0;
setDurationState(0);
commitPlaying(false);
setCurrentMessageId(null);
};
const setDuration = (sec: number) => {
const el = audioRef.current;
if (!el) return;
isSeekingRef.current = true;
el.currentTime = Math.max(0, sec);
commitDuration(el.currentTime || 0);
};
return (
<PlayerContext.Provider
value={{
playAudio,
playing: isPlaying,
pause,
resume,
stop,
setDuration,
duration,
totalDuration,
currentMessageId,
lastMessageId,
lastError,
}}
>
{props.children}
<audio ref={audioRef} />
</PlayerContext.Provider>
);
}

View File

@@ -0,0 +1,10 @@
import { useContext } from "react";
import { PlayerContext, PlayerContextValue } from "./PlayerProvider";
export function usePlayerContext() : PlayerContextValue {
const context = useContext(PlayerContext);
if (!context) {
throw new Error("useAudioPlayer must be used within a PlayerProvider");
}
return context;
}

View File

@@ -6,7 +6,8 @@ export enum AttachmentType {
MESSAGES = 1, MESSAGES = 1,
FILE = 2, FILE = 2,
AVATAR = 3, AVATAR = 3,
CALL = 4 CALL = 4,
VOICE = 5
} }
/** /**

View File

@@ -6,9 +6,14 @@ export enum SignalType {
KEY_EXCHANGE = 1, KEY_EXCHANGE = 1,
ACTIVE_CALL = 2, ACTIVE_CALL = 2,
END_CALL = 3, END_CALL = 3,
CREATE_ROOM = 4, /**
* Переведен в стадию активного, значит комната на SFU уже создана и можно начинать обмен сигналами WebRTC
*/
ACTIVE = 4,
END_CALL_BECAUSE_PEER_DISCONNECTED = 5, END_CALL_BECAUSE_PEER_DISCONNECTED = 5,
END_CALL_BECAUSE_BUSY = 6 END_CALL_BECAUSE_BUSY = 6,
ACCEPT = 7,
RINGING_TIMEOUT = 8
} }
/** /**
@@ -28,12 +33,8 @@ export class PacketSignalPeer extends Packet {
private signalType: SignalType = SignalType.CALL; private signalType: SignalType = SignalType.CALL;
/** private callId: string = "";
* Используется если SignalType == CREATE_ROOM, private joinToken: string = "";
* для идентификации комнаты на SFU сервере, в которой будет происходить обмен сигналами
* WebRTC для установления P2P соединения между участниками звонка
*/
private roomId: string = "";
public getPacketId(): number { public getPacketId(): number {
@@ -42,7 +43,9 @@ export class PacketSignalPeer extends Packet {
public _receive(stream: Stream): void { public _receive(stream: Stream): void {
this.signalType = stream.readInt8(); this.signalType = stream.readInt8();
if(this.signalType == SignalType.END_CALL_BECAUSE_BUSY || this.signalType == SignalType.END_CALL_BECAUSE_PEER_DISCONNECTED){ if(this.signalType == SignalType.END_CALL_BECAUSE_BUSY
|| this.signalType == SignalType.RINGING_TIMEOUT
|| this.signalType == SignalType.END_CALL_BECAUSE_PEER_DISCONNECTED){
return; return;
} }
this.src = stream.readString(); this.src = stream.readString();
@@ -50,8 +53,9 @@ export class PacketSignalPeer extends Packet {
if(this.signalType == SignalType.KEY_EXCHANGE){ if(this.signalType == SignalType.KEY_EXCHANGE){
this.sharedPublic = stream.readString(); this.sharedPublic = stream.readString();
} }
if(this.signalType == SignalType.CREATE_ROOM){ if(this.signalType == SignalType.CALL || this.signalType == SignalType.ACCEPT || this.signalType == SignalType.END_CALL){
this.roomId = stream.readString(); this.callId = stream.readString();
this.joinToken = stream.readString();
} }
} }
@@ -59,7 +63,9 @@ export class PacketSignalPeer extends Packet {
const stream = new Stream(); const stream = new Stream();
stream.writeInt16(this.getPacketId()); stream.writeInt16(this.getPacketId());
stream.writeInt8(this.signalType); stream.writeInt8(this.signalType);
if(this.signalType == SignalType.END_CALL_BECAUSE_BUSY || this.signalType == SignalType.END_CALL_BECAUSE_PEER_DISCONNECTED){ if(this.signalType == SignalType.END_CALL_BECAUSE_BUSY
|| this.signalType == SignalType.RINGING_TIMEOUT
|| this.signalType == SignalType.END_CALL_BECAUSE_PEER_DISCONNECTED){
return stream; return stream;
} }
stream.writeString(this.src); stream.writeString(this.src);
@@ -67,8 +73,9 @@ export class PacketSignalPeer extends Packet {
if(this.signalType == SignalType.KEY_EXCHANGE){ if(this.signalType == SignalType.KEY_EXCHANGE){
stream.writeString(this.sharedPublic); stream.writeString(this.sharedPublic);
} }
if(this.signalType == SignalType.CREATE_ROOM){ if(this.signalType == SignalType.CALL || this.signalType == SignalType.ACCEPT || this.signalType == SignalType.END_CALL){
stream.writeString(this.roomId); stream.writeString(this.callId);
stream.writeString(this.joinToken);
} }
return stream; return stream;
} }
@@ -105,12 +112,20 @@ export class PacketSignalPeer extends Packet {
this.src = src; this.src = src;
} }
public getRoomId(): string { public getCallId(): string {
return this.roomId; return this.callId;
} }
public setRoomId(roomId: string) { public setCallId(callId: string) {
this.roomId = roomId; this.callId = callId;
}
public getJoinToken(): string {
return this.joinToken;
}
public setJoinToken(joinToken: string) {
this.joinToken = joinToken;
} }
} }

View File

@@ -17,6 +17,8 @@ export const constructLastMessageTextByAttachments = (attachment: string) => {
return "$a=Avatar"; return "$a=Avatar";
case AttachmentType.CALL: case AttachmentType.CALL:
return "$a=Call"; return "$a=Call";
case AttachmentType.VOICE:
return "$a=Voice message";
default: default:
return "[Unsupported attachment]"; return "[Unsupported attachment]";
} }

View File

@@ -1,13 +1,7 @@
export const APP_VERSION = "1.1.5"; export const APP_VERSION = "1.2.2";
export const CORE_MIN_REQUIRED_VERSION = "1.5.4"; export const CORE_MIN_REQUIRED_VERSION = "1.5.5";
export const RELEASE_NOTICE = ` export const RELEASE_NOTICE = `
**Обновление v1.1.5** :emoji_1f631: **Обновление v1.2.2** :emoji_1f631:
- Улучшена передача данных по сети (сокращение сетевых расходов на 10%). - Поддержка записи и прослушивания голосовых сообщений
- Улучшена пересылка вложений, теперь можно пересылать любые вложения в том числе большие,
без перекодирования.
- Улучшен протокол общения с сервером по части передачи сообщений.
- Улучшена логика, почищен код.
- Исправлен баг интерфейса связанный с долгой загрузкой вложений.
- Исправлена возможность позвонить в системный аккаунт.
`; `;

View File

@@ -28,8 +28,8 @@ export function createPreloaderWindow() {
export function createAppWindow(preloaderWindow?: BrowserWindow): void { export function createAppWindow(preloaderWindow?: BrowserWindow): void {
const mainWindow = new BrowserWindow({ const mainWindow = new BrowserWindow({
width: 900, width: 385,
height: 670, height: 555,
minWidth: 385, minWidth: 385,
minHeight: 555, minHeight: 555,
show: false, show: false,

View File

@@ -1,6 +1,6 @@
{ {
"name": "Rosetta", "name": "Rosetta",
"version": "1.5.4", "version": "1.5.5",
"description": "Rosetta Messenger", "description": "Rosetta Messenger",
"main": "./out/main/main.js", "main": "./out/main/main.js",
"license": "MIT", "license": "MIT",