Базовая версия голосовых сообщений и аудиоплеер. Кодирование OPUS

This commit is contained in:
RoyceDa
2026-04-10 17:20:44 +02:00
parent 93ef692eb5
commit b596d36543
11 changed files with 1204 additions and 96 deletions

View File

@@ -22,6 +22,7 @@ import { DialogStateProvider } from './providers/DialogStateProvider.tsx/DialogS
import { DeviceConfirm } from './views/DeviceConfirm/DeviceConfirm';
import { SystemAccountProvider } from './providers/SystemAccountsProvider/SystemAccountsProvider';
import { DeviceProvider } from './providers/DeviceProvider/DeviceProvider';
import { PlayerProvider } from './providers/PlayerProvider/PlayerProvider';
window.Buffer = Buffer;
export default function App() {
@@ -58,6 +59,7 @@ export default function App() {
<Topbar></Topbar>
<ContextMenuProvider>
<ImageViwerProvider>
<PlayerProvider>
<AvatarProvider>
<Routes>
<Route path="/" element={
@@ -71,6 +73,7 @@ export default function App() {
<Route path="/deviceconfirm" element={<DeviceConfirm />} />
</Routes>
</AvatarProvider>
</PlayerProvider>
</ImageViwerProvider>
</ContextMenuProvider>
</Box>

View File

@@ -1,7 +1,7 @@
import { useDialog } from "@/app/providers/DialogProvider/useDialog";
import { useRosettaColors } from "@/app/hooks/useRosettaColors";
import { Box, Divider, Flex, Menu, Popover, Text, Transition, useComputedColorScheme } from "@mantine/core";
import { IconBarrierBlock, IconCamera, IconDoorExit, IconFile, IconMicrophone, IconMoodSmile, IconPaperclip, IconSend } from "@tabler/icons-react";
import { IconBarrierBlock, IconCamera, IconDoorExit, IconFile, IconMicrophone, IconMoodSmile, IconPaperclip, IconSend, IconTrash } from "@tabler/icons-react";
import { useEffect, useRef, useState } from "react";
import { useBlacklist } from "@/app/providers/BlacklistProvider/useBlacklist";
import { filePrapareForNetworkTransfer, generateRandomKey, imagePrepareForNetworkTransfer } from "@/app/utils/utils";
@@ -25,7 +25,8 @@ import { AnimatedButton } from "../AnimatedButton/AnimatedButton";
import { useUserCacheFunc } from "@/app/providers/InformationProvider/useUserCacheFunc";
import { MentionList, Mention } from "../MentionList/MentionList";
import { useDrafts } from "@/app/providers/DialogProvider/useDrafts";
import { useVoiceMessage } from "./useVoiceMessage";
import { VoiceRecorder } from "../VoiceRecorder/VoiceRecorder";
export function DialogInput() {
const colors = useRosettaColors();
@@ -47,6 +48,7 @@ export function DialogInput() {
const [mentionList, setMentionList] = useState<Mention[]>([]);
const mentionHandling = useRef<string>("");
const {getDraft, saveDraft} = useDrafts(dialog);
const {start, stop, isRecording, duration, waves, getAudioBlob, interpolateCompressWaves} = useVoiceMessage();
const avatars = useAvatars(
@@ -65,10 +67,12 @@ export function DialogInput() {
], [], true);
const hasText = message.trim().length > 0;
const showSendIcon = hasText || attachments.length > 0;
const showSendIcon = hasText || attachments.length > 0 || isRecording;
const onMicClick = () => {
console.info("Start voice record");
const onMicroClick = () => {
if(!isRecording) {
start();
}
};
const fileDialog = useFileDialog({
@@ -195,8 +199,28 @@ export function DialogInput() {
mentionHandling.current = username;
}
const send = () => {
if(blocked || (message.trim() == "" && attachments.length <= 0)) {
const send = async () => {
if(blocked || (message.trim() == "" && attachments.length <= 0 && !isRecording)){
return;
}
if(isRecording){
const audioBlob = getAudioBlob();
stop();
if(!audioBlob){
return;
}
sendMessage("", [
{
blob: Buffer.from(await audioBlob.arrayBuffer()).toString('binary'),
id: generateRandomKey(8),
type: AttachmentType.VOICE,
preview: duration + "::" + interpolateCompressWaves(35).join(","),
transport: {
transport_server: "",
transport_tag: ""
}
}
]);
return;
}
sendMessage(message, attachments);
@@ -372,6 +396,12 @@ export function DialogInput() {
{!blocked &&
<Flex h={'100%'} p={'xs'} direction={'row'} bg={colors.boxColor}>
<Flex w={25} mt={10} justify={'center'}>
{isRecording && (
<IconTrash onClick={stop} style={{
cursor: 'pointer'
}} color={colors.error} stroke={1.5} size={25}></IconTrash>
)}
{!isRecording && (
<Menu width={150} withArrow>
<Menu.Target>
<IconPaperclip stroke={1.5} style={{
@@ -392,12 +422,14 @@ export function DialogInput() {
} onClick={onClickCamera}>Avatar {hasGroup(dialog) && 'group'}</Menu.Item>}
</Menu.Dropdown>
</Menu>
)}
</Flex>
<Flex
w={'calc(100% - (25px + 50px + var(--mantine-spacing-xs)))'}
maw={'calc(100% - (25px + 50px + var(--mantine-spacing-xs)))'}
align={'center'}
>
{!isRecording && <>
<RichTextInput
ref={editableDivRef}
style={{
@@ -414,19 +446,17 @@ export function DialogInput() {
}}
placeholder="Type message..."
autoFocus
//ref={textareaRef}
//onPaste={onPaste}
//maxLength={2500}
//w={'100%'}
//h={'100%'}
onKeyDown={handleKeyDown}
onChange={setMessage}
onPaste={onPaste}
//dangerouslySetInnerHTML={{__html: message}}
></RichTextInput>
</>}
{isRecording && <>
<VoiceRecorder duration={duration} waves={waves}></VoiceRecorder>
</>}
</Flex>
<Flex mt={10} w={'calc(50px + var(--mantine-spacing-xs))'} gap={'xs'}>
{!isRecording && <>
<Popover withArrow>
<Popover.Target>
<IconMoodSmile color={colors.chevrons.active} stroke={1.5} size={25} style={{
@@ -442,7 +472,8 @@ export function DialogInput() {
/>
</Popover.Dropdown>
</Popover>
<Box pos="relative" w={25} h={25}>
</>}
<Box pos="relative" ml={isRecording ? 35 : 0} w={25} h={25}>
<Transition mounted={showSendIcon} transition="pop" duration={180} timingFunction="ease">
{(styles) => (
<IconSend
@@ -465,7 +496,7 @@ export function DialogInput() {
<IconMicrophone
stroke={1.5}
color={colors.chevrons.active}
onClick={onMicClick}
onClick={onMicroClick}
style={{
...styles,
position: 'absolute',

View File

@@ -0,0 +1,273 @@
import { useState, useRef, useCallback, useEffect } from "react";
export function useVoiceMessage(): {
isRecording: boolean;
isPaused: boolean;
duration: number;
waves: number[];
start: () => Promise<void>;
stop: () => void;
pause: () => void;
play: () => void;
error: string | null;
getAudioBlob: () => Blob | null;
interpolateCompressWaves: (targetLength: number) => number[];
} {
const [isRecording, setIsRecording] = useState(false);
const [isPaused, setIsPaused] = useState(false);
const [duration, setDuration] = useState(0);
const [waves, setWaves] = useState<number[]>([]);
const [error, setError] = useState<string | null>(null);
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const chunksRef = useRef<Blob[]>([]);
const streamRef = useRef<MediaStream | null>(null);
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const waveTimerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const waveDataRef = useRef<Uint8Array<ArrayBuffer> | null>(null);
const clearTimer = useCallback(() => {
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
}
}, []);
const stopWaveLoop = useCallback(() => {
if (waveTimerRef.current) {
clearInterval(waveTimerRef.current);
waveTimerRef.current = null;
}
}, []);
const startTimer = useCallback(() => {
if (timerRef.current) return;
timerRef.current = setInterval(() => {
setDuration((prev) => prev + 1);
}, 1000);
}, []);
const startWaveLoop = useCallback(() => {
stopWaveLoop();
const analyser = analyserRef.current;
if (!analyser) return;
if (!waveDataRef.current || waveDataRef.current.length !== analyser.frequencyBinCount) {
waveDataRef.current = new Uint8Array(new ArrayBuffer(analyser.frequencyBinCount));
}
const MAX_WAVES = 120;
const tick = () => {
if (!analyserRef.current || !waveDataRef.current) return;
analyserRef.current.getByteFrequencyData(waveDataRef.current);
let peak = 0;
for (let i = 0; i < waveDataRef.current.length; i++) {
const v = waveDataRef.current[i];
if (v > peak) peak = v;
}
const bar = peak / 255;
setWaves((prev) => {
const next = [...prev, bar];
return next.length > MAX_WAVES ? next.slice(next.length - MAX_WAVES) : next;
});
};
tick();
waveTimerRef.current = setInterval(tick, 300);
}, [stopWaveLoop]);
const cleanupAudio = useCallback(() => {
stopWaveLoop();
sourceRef.current?.disconnect();
sourceRef.current = null;
analyserRef.current = null;
waveDataRef.current = null;
if (audioContextRef.current) {
audioContextRef.current.close();
audioContextRef.current = null;
}
streamRef.current?.getTracks().forEach((track) => track.stop());
streamRef.current = null;
}, [stopWaveLoop]);
const start = useCallback(async () => {
try {
setError(null);
setDuration(0);
setWaves([]);
chunksRef.current = [];
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
streamRef.current = stream;
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
analyser.smoothingTimeConstant = 0;
analyser.minDecibels = -100;
analyser.maxDecibels = -10;
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
audioContextRef.current = audioContext;
analyserRef.current = analyser;
sourceRef.current = source;
// Выбираем лучший поддерживаемый кодек
const preferredTypes = [
"audio/webm;codecs=opus",
"audio/ogg;codecs=opus",
"audio/webm",
];
const mimeType = preferredTypes.find((t) => MediaRecorder.isTypeSupported(t)) ?? "";
const mediaRecorder = new MediaRecorder(stream, {
...(mimeType ? { mimeType } : {}),
audioBitsPerSecond: 32_000,
});
mediaRecorderRef.current = mediaRecorder;
mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) chunksRef.current.push(e.data);
};
mediaRecorder.onstop = () => {
cleanupAudio();
};
mediaRecorder.start(100);
setIsRecording(true);
setIsPaused(false);
startTimer();
startWaveLoop();
} catch (err) {
setError("Could not start voice recording. Please check microphone permissions.");
console.error("Voice recording error:", err);
}
}, [startTimer, startWaveLoop, cleanupAudio]);
const stop = useCallback(() => {
if (mediaRecorderRef.current && isRecording) {
mediaRecorderRef.current.stop();
mediaRecorderRef.current = null;
setIsRecording(false);
setIsPaused(false);
clearTimer();
stopWaveLoop();
}
}, [isRecording, clearTimer, stopWaveLoop]);
const pause = useCallback(() => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state === "recording") {
mediaRecorderRef.current.pause();
setIsPaused(true);
clearTimer();
stopWaveLoop();
}
}, [clearTimer, stopWaveLoop]);
const play = useCallback(() => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state === "paused") {
mediaRecorderRef.current.resume();
setIsPaused(false);
startTimer();
startWaveLoop();
}
}, [startTimer, startWaveLoop]);
useEffect(() => {
return () => {
clearTimer();
stopWaveLoop();
if (mediaRecorderRef.current && mediaRecorderRef.current?.state !== "inactive") {
mediaRecorderRef.current.stop();
}
cleanupAudio();
};
}, [clearTimer, stopWaveLoop, cleanupAudio]);
const getAudioBlob = useCallback((): Blob | null => {
if (chunksRef.current.length === 0) return null;
const mimeType = mediaRecorderRef.current?.mimeType ?? "audio/webm;codecs=opus";
return new Blob(chunksRef.current, { type: mimeType });
}, []);
const interpolateCompressWaves = useCallback((targetLength: number) => {
if (targetLength <= 0) return [];
if (waves.length === 0) return Array(targetLength).fill(0);
if (waves.length === targetLength) return waves;
if (waves.length > targetLength) {
const compressed: number[] = [];
const bucketSize = waves.length / targetLength;
for (let i = 0; i < targetLength; i++) {
const start = Math.floor(i * bucketSize);
const end = Math.max(start + 1, Math.floor((i + 1) * bucketSize));
let max = 0;
for (let j = start; j < end && j < waves.length; j++) {
if (waves[j] > max) max = waves[j];
}
compressed.push(max);
}
return compressed;
}
if (targetLength === 1) return [waves[0]];
const stretched: number[] = [];
const lastSourceIndex = waves.length - 1;
for (let i = 0; i < targetLength; i++) {
const position = (i * lastSourceIndex) / (targetLength - 1);
const left = Math.floor(position);
const right = Math.min(Math.ceil(position), lastSourceIndex);
if (left === right) {
stretched.push(waves[left]);
continue;
}
const t = position - left;
const value = waves[left] * (1 - t) + waves[right] * t;
stretched.push(value);
}
return stretched;
}, [waves]);
return {
isRecording,
isPaused,
duration,
waves,
start,
stop,
pause,
play,
error,
getAudioBlob,
interpolateCompressWaves,
};
}

View File

@@ -9,6 +9,7 @@ import { AttachmentError } from "../AttachmentError/AttachmentError";
import { MessageAvatar } from "./MessageAvatar";
import { MessageProps } from "../Messages/Message";
import { MessageCall } from "./MessageCall";
import { MessageVoice } from "./MessageVoice";
export interface MessageAttachmentsProps {
attachments: Attachment[];
@@ -54,6 +55,8 @@ export function MessageAttachments(props: MessageAttachmentsProps) {
return <MessageAvatar {...attachProps} key={index}></MessageAvatar>
case AttachmentType.CALL:
return <MessageCall {...attachProps} key={index}></MessageCall>
case AttachmentType.VOICE:
return <MessageVoice {...attachProps} key={index}></MessageVoice>
default:
return <AttachmentError key={index}></AttachmentError>;
}

View File

@@ -0,0 +1,270 @@
import { DownloadStatus, useAttachment } from "@/app/providers/AttachmentProvider/useAttachment";
import { AttachmentProps } from "./MessageAttachments";
import { Avatar, Box, Flex, Text, useMantineTheme } from "@mantine/core";
import { AnimatedRoundedProgress } from "../AnimatedRoundedProgress/AnimatedRoundedProgress";
import { IconArrowDown, IconPlayerPauseFilled, IconPlayerPlayFilled, IconX } from "@tabler/icons-react";
import { DeliveredMessageState } from "@/app/providers/DialogProvider/DialogProvider";
import { useRosettaColors } from "@/app/hooks/useRosettaColors";
import { useMemo, useRef } from "react";
import { usePlayerContext } from "@/app/providers/PlayerProvider/usePlayerContext";
const WAVE_BARS = 40;
const BAR_WIDTH = 2;
const BAR_GAP = 2;
const MIN_BAR_HEIGHT = 4;
const MAX_BAR_HEIGHT = 24;
function normalizeWaves(source: number[], targetLength: number): number[] {
if (targetLength <= 0) return [];
if (source.length === 0) return Array(targetLength).fill(0);
if (source.length === targetLength) return source;
if (source.length > targetLength) {
const compressed: number[] = [];
const bucketSize = source.length / targetLength;
for (let i = 0; i < targetLength; i++) {
const start = Math.floor(i * bucketSize);
const end = Math.max(start + 1, Math.floor((i + 1) * bucketSize));
let max = 0;
for (let j = start; j < end && j < source.length; j++) {
if (source[j] > max) max = source[j];
}
compressed.push(max);
}
return compressed;
}
if (targetLength === 1) return [source[0]];
const stretched: number[] = [];
const lastSourceIndex = source.length - 1;
for (let i = 0; i < targetLength; i++) {
const position = (i * lastSourceIndex) / (targetLength - 1);
const left = Math.floor(position);
const right = Math.min(Math.ceil(position), lastSourceIndex);
if (left === right) {
stretched.push(source[left]);
continue;
}
const t = position - left;
stretched.push(source[left] * (1 - t) + source[right] * t);
}
return stretched;
}
function formatTime(seconds: number) {
const s = Math.max(0, Math.floor(seconds));
const m = Math.floor(s / 60).toString().padStart(2, "0");
const r = (s % 60).toString().padStart(2, "0");
return `${m}:${r}`;
}
export function MessageVoice(props: AttachmentProps) {
const { downloadPercentage, downloadStatus, uploadedPercentage, download, getPreview } = useAttachment(
props.attachment,
props.parent,
);
const theme = useMantineTheme();
const colors = useRosettaColors();
const preview = getPreview() || "";
const [durationPart = "0", wavesPart = ""] = preview.split("::");
const previewDuration = Number.parseInt(durationPart, 10) || 0;
const rawWaves = useMemo(
() =>
wavesPart
.split(",")
.map((s) => Number.parseFloat(s))
.filter((n) => Number.isFinite(n) && n >= 0),
[wavesPart]
);
const waves = useMemo(() => normalizeWaves(rawWaves, WAVE_BARS), [rawWaves]);
const peak = useMemo(() => {
const max = Math.max(...waves, 0);
return max > 0 ? max : 1;
}, [waves]);
const isUploading =
props.delivered === DeliveredMessageState.WAITING &&
uploadedPercentage > 0 &&
uploadedPercentage < 100;
const error = downloadStatus === DownloadStatus.ERROR;
const waveformWidth = WAVE_BARS * BAR_WIDTH + (WAVE_BARS - 1) * BAR_GAP;
const waveformRef = useRef<HTMLDivElement | null>(null);
const {
playAudio,
pause,
duration: currentDuration,
playing,
setDuration,
totalDuration,
currentMessageId,
} = usePlayerContext();
// Важно: состояние "активности" теперь берется из глобального плеера
const messageId = String((props.parent as any)?.id ?? (props.attachment as any)?.messageId ?? props.attachment.id);
const isCurrentTrack = currentMessageId === messageId;
const fullDuration = Math.max(isCurrentTrack && totalDuration > 0 ? totalDuration : previewDuration, 1);
const safeCurrent = isCurrentTrack ? currentDuration : 0;
const playbackProgress = Math.max(0, Math.min(1, safeCurrent / fullDuration));
const createAudioBlob = () => new Blob([Buffer.from(props.attachment.blob, "binary")], { type: "audio/webm" });
const ensureStarted = (seekToSec?: number) => {
const blob = createAudioBlob();
playAudio("Voice Message", "", blob, messageId);
if (typeof seekToSec === "number") {
requestAnimationFrame(() => setDuration(seekToSec));
}
};
const handleMainAction = () => {
if (error) return;
if (downloadStatus !== DownloadStatus.DOWNLOADED) {
download();
return;
}
if (!isCurrentTrack) {
ensureStarted();
return;
}
if (playing) {
pause();
return;
}
ensureStarted(Math.max(0, safeCurrent));
};
const handleSeek = (e: React.MouseEvent<HTMLDivElement>) => {
if (error || downloadStatus !== DownloadStatus.DOWNLOADED) return;
const rect = waveformRef.current?.getBoundingClientRect();
if (!rect || rect.width <= 0) return;
const x = e.clientX - rect.left;
const progress = Math.max(0, Math.min(1, x / rect.width));
const seekTo = progress * fullDuration;
if (!isCurrentTrack) {
ensureStarted(seekTo);
return;
}
setDuration(seekTo);
};
const timeText =
isCurrentTrack && safeCurrent > 0
? `-${formatTime(Math.max(0, fullDuration - safeCurrent))}`
: formatTime(fullDuration);
return (
<Flex gap="sm" align="center">
<Avatar
bg={error ? colors.error : colors.brandColor}
size={40}
style={{ cursor: "pointer", position: "relative" }}
onClick={handleMainAction}
>
{!error && (
<>
{downloadStatus === DownloadStatus.DOWNLOADING &&
downloadPercentage > 0 &&
downloadPercentage < 100 && (
<div style={{ position: "absolute", top: 0, left: 0 }}>
<AnimatedRoundedProgress size={40} value={downloadPercentage} />
</div>
)}
{isUploading && (
<div style={{ position: "absolute", top: 0, left: 0 }}>
<AnimatedRoundedProgress color="#fff" size={40} value={uploadedPercentage} />
</div>
)}
{downloadStatus !== DownloadStatus.DOWNLOADED && <IconArrowDown color="white" size={22} />}
{downloadStatus === DownloadStatus.DOWNLOADED &&
(isCurrentTrack && playing ? (
<IconPlayerPauseFilled color="white" size={22} />
) : (
<IconPlayerPlayFilled color="white" size={22} />
))}
</>
)}
{(error || isUploading) && <IconX color="white" size={22} />}
</Avatar>
<Flex direction="column">
<Box
ref={waveformRef}
w={waveformWidth}
h={32}
onClick={handleSeek}
style={{ overflow: "hidden", cursor: "pointer" }}
>
<Flex h="100%" align="center" gap={BAR_GAP} wrap="nowrap">
{waves.map((value, index) => {
const normalized = Math.max(0, Math.min(1, value / peak));
const height = Math.max(
MIN_BAR_HEIGHT,
Math.min(MAX_BAR_HEIGHT, MIN_BAR_HEIGHT + normalized * (MAX_BAR_HEIGHT - MIN_BAR_HEIGHT))
);
const passed = playbackProgress * waves.length - index;
const fillPercent = Math.max(0, Math.min(1, passed));
const inactiveColor = theme.colors.gray[4];
const activeColor = colors.brandColor;
let background = inactiveColor;
if (fillPercent >= 1) {
background = activeColor;
} else if (fillPercent > 0) {
background = `linear-gradient(90deg, ${activeColor} 0%, ${activeColor} ${fillPercent * 100}%, ${inactiveColor} ${fillPercent * 100}%, ${inactiveColor} 100%)`;
}
return (
<Box
key={index}
w={BAR_WIDTH}
h={height}
style={{
flex: `0 0 ${BAR_WIDTH}px`,
borderRadius: 999,
background,
}}
/>
);
})}
</Flex>
</Box>
<Text size="xs" c="dimmed">
{timeText}
</Text>
</Flex>
</Flex>
);
}

View File

@@ -0,0 +1,278 @@
import { Box, Flex, Text, useMantineTheme } from "@mantine/core";
import { useEffect, useRef, useState } from "react";
interface VoiceRecorderProps {
duration: number;
waves: number[];
}
type AnimatedBar = {
id: number;
value: number;
entered: boolean;
};
const VISIBLE_BARS = 50;
const BAR_WIDTH = 3;
const BAR_GAP = 2;
const STEP_PX = BAR_WIDTH + BAR_GAP;
const COMPONENT_HEIGHT = 45;
const MAX_BAR_HEIGHT = 28;
const MIN_BAR_HEIGHT = 4;
export function VoiceRecorder(props: VoiceRecorderProps) {
const theme = useMantineTheme();
const [bars, setBars] = useState<AnimatedBar[]>([]);
const [subShift, setSubShift] = useState(0);
const prevLengthRef = useRef(0);
const prevWavesRef = useRef<number[]>([]);
const idRef = useRef(0);
const enterFrameRef = useRef<number | null>(null);
const scrollFrameRef = useRef<number | null>(null);
const lastAppendAtRef = useRef<number | null>(null);
const appendIntervalRef = useRef(120);
const barsLengthRef = useRef(0);
const formatDuration = (seconds: number) => {
const mins = Math.floor(seconds / 60)
.toString()
.padStart(2, "0");
const secs = (seconds % 60).toString().padStart(2, "0");
return `${mins}:${secs}`;
};
useEffect(() => {
barsLengthRef.current = bars.length;
}, [bars.length]);
useEffect(() => {
if (props.waves.length === 0) {
setBars([]);
setSubShift(0);
prevLengthRef.current = 0;
prevWavesRef.current = [];
lastAppendAtRef.current = null;
return;
}
if (props.waves.length < prevLengthRef.current) {
const resetBars = props.waves.slice(-VISIBLE_BARS).map((value) => ({
id: idRef.current++,
value,
entered: true,
}));
setBars(resetBars);
setSubShift(0);
prevLengthRef.current = props.waves.length;
prevWavesRef.current = props.waves;
lastAppendAtRef.current = performance.now();
return;
}
const prevWaves = prevWavesRef.current;
let appended: number[] = [];
// Обычный режим: длина выросла
if (props.waves.length > prevLengthRef.current) {
appended = props.waves.slice(prevLengthRef.current);
} else if (props.waves.length === prevLengthRef.current && props.waves.length > 0) {
// Rolling buffer: длина та же, но данные сдвигаются
let changed = false;
if (prevWaves.length !== props.waves.length) {
changed = true;
} else {
for (let i = 0; i < props.waves.length; i++) {
if (props.waves[i] !== prevWaves[i]) {
changed = true;
break;
}
}
}
if (changed) {
appended = [props.waves[props.waves.length - 1]];
}
}
if (appended.length > 0) {
const now = performance.now();
if (lastAppendAtRef.current != null) {
const dt = now - lastAppendAtRef.current;
const perBar = dt / appended.length;
appendIntervalRef.current = appendIntervalRef.current * 0.7 + perBar * 0.3;
}
lastAppendAtRef.current = now;
setSubShift(0);
const newIds: number[] = [];
setBars((prev) => {
const next = [...prev];
appended.forEach((value) => {
const id = idRef.current++;
newIds.push(id);
next.push({
id,
value,
entered: false,
});
});
return next.slice(-VISIBLE_BARS);
});
if (enterFrameRef.current) {
cancelAnimationFrame(enterFrameRef.current);
}
enterFrameRef.current = requestAnimationFrame(() => {
setBars((prev) => {
const ids = new Set(newIds);
return prev.map((bar) => (ids.has(bar.id) ? { ...bar, entered: true } : bar));
});
});
}
prevLengthRef.current = props.waves.length;
prevWavesRef.current = props.waves;
}, [props.waves]);
useEffect(() => {
const tick = () => {
const startedAt = lastAppendAtRef.current;
if (startedAt != null) {
const elapsed = performance.now() - startedAt;
const interval = Math.max(16, appendIntervalRef.current);
const progress = Math.min(1, elapsed / interval);
const smoothShift = barsLengthRef.current >= VISIBLE_BARS ? -progress * STEP_PX : 0;
setSubShift(smoothShift);
} else {
setSubShift(0);
}
scrollFrameRef.current = requestAnimationFrame(tick);
};
scrollFrameRef.current = requestAnimationFrame(tick);
return () => {
if (scrollFrameRef.current) {
cancelAnimationFrame(scrollFrameRef.current);
}
};
}, []);
useEffect(() => {
return () => {
if (enterFrameRef.current) {
cancelAnimationFrame(enterFrameRef.current);
}
if (scrollFrameRef.current) {
cancelAnimationFrame(scrollFrameRef.current);
}
};
}, []);
const waveformWidth = VISIBLE_BARS * BAR_WIDTH + (VISIBLE_BARS - 1) * BAR_GAP;
return (
<Flex
direction="row"
h={COMPONENT_HEIGHT}
mih={COMPONENT_HEIGHT}
mah={COMPONENT_HEIGHT}
align="center"
justify="center"
gap="xs"
px={6}
>
<Text size="xs" c="dimmed" w={36}>
{formatDuration(props.duration)}
</Text>
<Box
w={waveformWidth}
h={COMPONENT_HEIGHT}
style={{
overflow: "hidden",
}}
>
<Flex
h="100%"
align="center"
gap={BAR_GAP}
wrap="nowrap"
style={{ transform: `translateX(${subShift}px)` }}
>
{Array.from({ length: VISIBLE_BARS }).map((_, index) => {
const bar = bars[index];
if (!bar) {
return (
<Box
key={`empty-${index}`}
w={BAR_WIDTH}
h={MIN_BAR_HEIGHT}
style={{
flex: `0 0 ${BAR_WIDTH}px`,
borderRadius: 999,
background: theme.colors.gray[3],
opacity: 0.22,
}}
/>
);
}
const normalized = Math.max(0, Math.min(1, bar.value));
const height = Math.max(
MIN_BAR_HEIGHT,
Math.min(MAX_BAR_HEIGHT, MIN_BAR_HEIGHT + normalized * (MAX_BAR_HEIGHT - MIN_BAR_HEIGHT))
);
const isLast = index === bars.length - 1;
const isNearTail = index >= bars.length - 3;
return (
<Box
key={bar.id}
w={BAR_WIDTH}
h={height}
style={{
flex: `0 0 ${BAR_WIDTH}px`,
alignSelf: "center",
borderRadius: 999,
background: isLast
? `linear-gradient(180deg, ${theme.colors.blue[3]} 0%, ${theme.colors.blue[5]} 100%)`
: `linear-gradient(180deg, ${theme.colors.blue[4]} 0%, ${theme.colors.blue[6]} 100%)`,
boxShadow: isLast
? `0 0 10px ${theme.colors.blue[4]}55`
: isNearTail
? `0 0 6px ${theme.colors.blue[4]}22`
: "none",
transform: bar.entered ? "scaleY(1)" : "scaleY(0.18)",
transformOrigin: "center center",
transition: [
"height 260ms cubic-bezier(0.2, 0.8, 0.2, 1)",
"transform 260ms cubic-bezier(0.2, 0.8, 0.2, 1)",
"opacity 220ms ease",
"box-shadow 220ms ease",
].join(", "),
willChange: "height, transform, opacity",
}}
/>
);
})}
</Flex>
</Box>
</Flex>
);
}

View File

@@ -146,6 +146,7 @@ export function useDialog() : {
}
console.info("Sending key for message ", key.toString('hex'));
console.info(attachemnts);
let preparedToNetworkSendAttachements : Attachment[] = await prepareAttachmentsToSend(messageId, dialog, key.toString('hex'), attachemnts);
if(attachemnts.length <= 0 && message.trim() == ""){
runQuery("UPDATE messages SET delivered = ? WHERE message_id = ?", [DeliveredMessageState.ERROR, messageId]);

View File

@@ -1,25 +1,261 @@
import { createContext, useRef } from "react";
import { createContext, useEffect, useRef, useState } from "react";
export interface PlayerContextValue {
playAudio: (
artist: string,
title: string,
audio: string | Blob | File,
messageId?: string | null
) => void;
playing: boolean;
pause: () => void;
resume: () => void;
stop: () => void;
setDuration: (duration: number) => void;
duration: number;
totalDuration: number;
currentMessageId: string | null;
lastMessageId: string | null;
}
export const PlayerContext = createContext<PlayerContextValue | null>(null);
const PlayerContext = createContext(null);
/**
* Провайдер для Audio/Video плеера
*/
interface PlayerProviderProps {
children: React.ReactNode;
}
export function PlayerProvider(props : PlayerProviderProps) {
export function PlayerProvider(props: PlayerProviderProps) {
const audioRef = useRef<HTMLAudioElement>(null);
const playVoice = () => {
const objectUrlRef = useRef<string | null>(null);
const rafTimeUpdateRef = useRef<number | null>(null);
const isLoadingRef = useRef(false);
const isSeekingRef = useRef(false);
const durationRef = useRef(0);
const totalDurationRef = useRef(0);
const isPlayingRef = useRef(false);
const [isPlaying, setIsPlaying] = useState(false);
const [duration, setDurationState] = useState(0);
const [totalDuration, setTotalDuration] = useState(0);
const [currentMessageId, setCurrentMessageId] = useState<string | null>(null);
const [lastMessageId, setLastMessageId] = useState<string | null>(null);
const commitPlaying = (next: boolean) => {
if (isPlayingRef.current === next) return;
isPlayingRef.current = next;
setIsPlaying(next);
};
const commitDuration = (next: number) => {
const safe = Number.isFinite(next) && next >= 0 ? next : 0;
if (Math.abs(safe - durationRef.current) < 0.033) return;
durationRef.current = safe;
setDurationState(safe);
};
const commitTotalDuration = (next: number) => {
const safe = Number.isFinite(next) && next > 0 ? next : 0;
if (Math.abs(safe - totalDurationRef.current) < 0.05) return;
totalDurationRef.current = safe;
setTotalDuration(safe);
};
useEffect(() => {
const audio = audioRef.current;
if (!audio) return;
const onPlay = () => {
if (isLoadingRef.current) return;
commitPlaying(true);
};
const onPause = () => {
if (isLoadingRef.current) return;
commitPlaying(false);
};
const onEnded = () => {
commitPlaying(false);
durationRef.current = 0;
setDurationState(0);
setCurrentMessageId(null);
};
const onTimeUpdate = () => {
if (isLoadingRef.current) return;
if (isSeekingRef.current) return;
if (rafTimeUpdateRef.current != null) return;
rafTimeUpdateRef.current = requestAnimationFrame(() => {
rafTimeUpdateRef.current = null;
if (!isLoadingRef.current && !isSeekingRef.current) {
commitDuration(audio.currentTime || 0);
}
});
};
const onLoadedMetadata = () => commitTotalDuration(audio.duration);
const onDurationChange = () => commitTotalDuration(audio.duration);
const onSeeked = () => {
if (isSeekingRef.current) {
isSeekingRef.current = false;
if (!isLoadingRef.current) {
commitDuration(audio.currentTime || 0);
}
return;
}
if (isLoadingRef.current) return;
commitDuration(audio.currentTime || 0);
};
const onCanPlay = () => {
if (isLoadingRef.current) {
isLoadingRef.current = false;
}
};
audio.addEventListener("play", onPlay);
audio.addEventListener("pause", onPause);
audio.addEventListener("ended", onEnded);
audio.addEventListener("timeupdate", onTimeUpdate);
audio.addEventListener("loadedmetadata", onLoadedMetadata);
audio.addEventListener("durationchange", onDurationChange);
audio.addEventListener("seeked", onSeeked);
audio.addEventListener("canplay", onCanPlay);
return () => {
audio.removeEventListener("play", onPlay);
audio.removeEventListener("pause", onPause);
audio.removeEventListener("ended", onEnded);
audio.removeEventListener("timeupdate", onTimeUpdate);
audio.removeEventListener("loadedmetadata", onLoadedMetadata);
audio.removeEventListener("durationchange", onDurationChange);
audio.removeEventListener("seeked", onSeeked);
audio.removeEventListener("canplay", onCanPlay);
if (rafTimeUpdateRef.current != null) {
cancelAnimationFrame(rafTimeUpdateRef.current);
rafTimeUpdateRef.current = null;
}
};
}, []);
useEffect(() => {
return () => {
if (objectUrlRef.current) {
URL.revokeObjectURL(objectUrlRef.current);
objectUrlRef.current = null;
}
};
}, []);
const playAudio = (
artist: string,
title: string,
audio: string | Blob | File,
messageId?: string | null
) => {
const el = audioRef.current;
if (!el) return;
if (objectUrlRef.current) {
URL.revokeObjectURL(objectUrlRef.current);
objectUrlRef.current = null;
}
const audioSrc = typeof audio === "string" ? audio : URL.createObjectURL(audio);
if (typeof audio !== "string") {
objectUrlRef.current = audioSrc;
}
isLoadingRef.current = true;
isSeekingRef.current = false;
el.src = audioSrc;
durationRef.current = 0;
const msgId = messageId ?? null;
setCurrentMessageId(msgId);
if (msgId) setLastMessageId(msgId);
isPlayingRef.current = true;
setIsPlaying(true);
const prevDuration = durationRef.current;
requestAnimationFrame(() => {
if (durationRef.current === prevDuration) {
setDurationState(0);
}
});
void el.play().catch(() => {
isLoadingRef.current = false;
commitPlaying(false);
});
};
const pause = () => {
const el = audioRef.current;
if (!el) return;
el.pause();
};
const resume = () => {
const el = audioRef.current;
if (!el) return;
commitPlaying(true);
void el.play().catch(() => {
commitPlaying(false);
});
};
const stop = () => {
const el = audioRef.current;
if (!el) return;
isLoadingRef.current = true;
el.pause();
el.currentTime = 0;
isLoadingRef.current = false;
durationRef.current = 0;
setDurationState(0);
commitPlaying(false);
setCurrentMessageId(null);
};
const setDuration = (sec: number) => {
const el = audioRef.current;
if (!el) return;
isSeekingRef.current = true;
el.currentTime = Math.max(0, sec);
commitDuration(el.currentTime || 0);
};
return (
<PlayerContext.Provider value={null}>
<PlayerContext.Provider
value={{
playAudio,
playing: isPlaying,
pause,
resume,
stop,
setDuration,
duration,
totalDuration,
currentMessageId,
lastMessageId,
}}
>
{props.children}
<audio ref={audioRef} />
</PlayerContext.Provider>
)
);
}

View File

@@ -0,0 +1,10 @@
import { useContext } from "react";
import { PlayerContext, PlayerContextValue } from "./PlayerProvider";
export function usePlayerContext() : PlayerContextValue {
const context = useContext(PlayerContext);
if (!context) {
throw new Error("useAudioPlayer must be used within a PlayerProvider");
}
return context;
}

View File

@@ -6,7 +6,8 @@ export enum AttachmentType {
MESSAGES = 1,
FILE = 2,
AVATAR = 3,
CALL = 4
CALL = 4,
VOICE = 5
}
/**

View File

@@ -17,6 +17,8 @@ export const constructLastMessageTextByAttachments = (attachment: string) => {
return "$a=Avatar";
case AttachmentType.CALL:
return "$a=Call";
case AttachmentType.VOICE:
return "$a=Voice message";
default:
return "[Unsupported attachment]";
}