273 lines
8.7 KiB
TypeScript
273 lines
8.7 KiB
TypeScript
import { useState, useRef, useCallback, useEffect } from "react";
|
|
|
|
export function useVoiceMessage(): {
|
|
isRecording: boolean;
|
|
isPaused: boolean;
|
|
duration: number;
|
|
waves: number[];
|
|
start: () => Promise<void>;
|
|
stop: () => void;
|
|
pause: () => void;
|
|
play: () => void;
|
|
error: string | null;
|
|
getAudioBlob: () => Blob | null;
|
|
interpolateCompressWaves: (targetLength: number) => number[];
|
|
} {
|
|
const [isRecording, setIsRecording] = useState(false);
|
|
const [isPaused, setIsPaused] = useState(false);
|
|
const [duration, setDuration] = useState(0);
|
|
const [waves, setWaves] = useState<number[]>([]);
|
|
const [error, setError] = useState<string | null>(null);
|
|
|
|
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
|
const chunksRef = useRef<Blob[]>([]);
|
|
const streamRef = useRef<MediaStream | null>(null);
|
|
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
|
const waveTimerRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
|
|
|
const audioContextRef = useRef<AudioContext | null>(null);
|
|
const analyserRef = useRef<AnalyserNode | null>(null);
|
|
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
|
|
const waveDataRef = useRef<Uint8Array<ArrayBuffer> | null>(null);
|
|
|
|
const clearTimer = useCallback(() => {
|
|
if (timerRef.current) {
|
|
clearInterval(timerRef.current);
|
|
timerRef.current = null;
|
|
}
|
|
}, []);
|
|
|
|
const stopWaveLoop = useCallback(() => {
|
|
if (waveTimerRef.current) {
|
|
clearInterval(waveTimerRef.current);
|
|
waveTimerRef.current = null;
|
|
}
|
|
}, []);
|
|
|
|
const startTimer = useCallback(() => {
|
|
if (timerRef.current) return;
|
|
timerRef.current = setInterval(() => {
|
|
setDuration((prev) => prev + 1);
|
|
}, 1000);
|
|
}, []);
|
|
|
|
const startWaveLoop = useCallback(() => {
|
|
stopWaveLoop();
|
|
|
|
const analyser = analyserRef.current;
|
|
if (!analyser) return;
|
|
|
|
if (!waveDataRef.current || waveDataRef.current.length !== analyser.frequencyBinCount) {
|
|
waveDataRef.current = new Uint8Array(new ArrayBuffer(analyser.frequencyBinCount));
|
|
}
|
|
|
|
const MAX_WAVES = 120;
|
|
|
|
const tick = () => {
|
|
if (!analyserRef.current || !waveDataRef.current) return;
|
|
|
|
analyserRef.current.getByteFrequencyData(waveDataRef.current);
|
|
|
|
let peak = 0;
|
|
for (let i = 0; i < waveDataRef.current.length; i++) {
|
|
const v = waveDataRef.current[i];
|
|
if (v > peak) peak = v;
|
|
}
|
|
|
|
const bar = peak / 255;
|
|
|
|
setWaves((prev) => {
|
|
const next = [...prev, bar];
|
|
return next.length > MAX_WAVES ? next.slice(next.length - MAX_WAVES) : next;
|
|
});
|
|
};
|
|
|
|
tick();
|
|
waveTimerRef.current = setInterval(tick, 300);
|
|
}, [stopWaveLoop]);
|
|
|
|
const cleanupAudio = useCallback(() => {
|
|
stopWaveLoop();
|
|
|
|
sourceRef.current?.disconnect();
|
|
sourceRef.current = null;
|
|
analyserRef.current = null;
|
|
waveDataRef.current = null;
|
|
|
|
if (audioContextRef.current) {
|
|
audioContextRef.current.close();
|
|
audioContextRef.current = null;
|
|
}
|
|
|
|
streamRef.current?.getTracks().forEach((track) => track.stop());
|
|
streamRef.current = null;
|
|
}, [stopWaveLoop]);
|
|
|
|
const start = useCallback(async () => {
|
|
try {
|
|
setError(null);
|
|
setDuration(0);
|
|
setWaves([]);
|
|
chunksRef.current = [];
|
|
|
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
streamRef.current = stream;
|
|
|
|
const audioContext = new AudioContext();
|
|
const analyser = audioContext.createAnalyser();
|
|
analyser.fftSize = 2048;
|
|
analyser.smoothingTimeConstant = 0;
|
|
analyser.minDecibels = -100;
|
|
analyser.maxDecibels = -10;
|
|
|
|
const source = audioContext.createMediaStreamSource(stream);
|
|
source.connect(analyser);
|
|
|
|
audioContextRef.current = audioContext;
|
|
analyserRef.current = analyser;
|
|
sourceRef.current = source;
|
|
|
|
// Выбираем лучший поддерживаемый кодек
|
|
const preferredTypes = [
|
|
"audio/webm;codecs=opus",
|
|
"audio/ogg;codecs=opus",
|
|
"audio/webm",
|
|
];
|
|
|
|
const mimeType = preferredTypes.find((t) => MediaRecorder.isTypeSupported(t)) ?? "";
|
|
|
|
const mediaRecorder = new MediaRecorder(stream, {
|
|
...(mimeType ? { mimeType } : {}),
|
|
audioBitsPerSecond: 32_000,
|
|
});
|
|
|
|
mediaRecorderRef.current = mediaRecorder;
|
|
|
|
mediaRecorder.ondataavailable = (e) => {
|
|
if (e.data.size > 0) chunksRef.current.push(e.data);
|
|
};
|
|
|
|
mediaRecorder.onstop = () => {
|
|
cleanupAudio();
|
|
};
|
|
|
|
mediaRecorder.start(100);
|
|
setIsRecording(true);
|
|
setIsPaused(false);
|
|
startTimer();
|
|
startWaveLoop();
|
|
} catch (err) {
|
|
setError("Could not start voice recording. Please check microphone permissions.");
|
|
console.error("Voice recording error:", err);
|
|
}
|
|
}, [startTimer, startWaveLoop, cleanupAudio]);
|
|
|
|
const stop = useCallback(() => {
|
|
if (mediaRecorderRef.current && isRecording) {
|
|
mediaRecorderRef.current.stop();
|
|
mediaRecorderRef.current = null;
|
|
setIsRecording(false);
|
|
setIsPaused(false);
|
|
clearTimer();
|
|
stopWaveLoop();
|
|
}
|
|
}, [isRecording, clearTimer, stopWaveLoop]);
|
|
|
|
const pause = useCallback(() => {
|
|
if (mediaRecorderRef.current && mediaRecorderRef.current.state === "recording") {
|
|
mediaRecorderRef.current.pause();
|
|
setIsPaused(true);
|
|
clearTimer();
|
|
stopWaveLoop();
|
|
}
|
|
}, [clearTimer, stopWaveLoop]);
|
|
|
|
const play = useCallback(() => {
|
|
if (mediaRecorderRef.current && mediaRecorderRef.current.state === "paused") {
|
|
mediaRecorderRef.current.resume();
|
|
setIsPaused(false);
|
|
startTimer();
|
|
startWaveLoop();
|
|
}
|
|
}, [startTimer, startWaveLoop]);
|
|
|
|
useEffect(() => {
|
|
return () => {
|
|
clearTimer();
|
|
stopWaveLoop();
|
|
|
|
if (mediaRecorderRef.current && mediaRecorderRef.current?.state !== "inactive") {
|
|
mediaRecorderRef.current.stop();
|
|
}
|
|
|
|
cleanupAudio();
|
|
};
|
|
}, [clearTimer, stopWaveLoop, cleanupAudio]);
|
|
|
|
const getAudioBlob = useCallback((): Blob | null => {
|
|
if (chunksRef.current.length === 0) return null;
|
|
const mimeType = mediaRecorderRef.current?.mimeType ?? "audio/webm;codecs=opus";
|
|
return new Blob(chunksRef.current, { type: mimeType });
|
|
}, []);
|
|
|
|
const interpolateCompressWaves = useCallback((targetLength: number) => {
|
|
if (targetLength <= 0) return [];
|
|
if (waves.length === 0) return Array(targetLength).fill(0);
|
|
if (waves.length === targetLength) return waves;
|
|
|
|
if (waves.length > targetLength) {
|
|
const compressed: number[] = [];
|
|
const bucketSize = waves.length / targetLength;
|
|
|
|
for (let i = 0; i < targetLength; i++) {
|
|
const start = Math.floor(i * bucketSize);
|
|
const end = Math.max(start + 1, Math.floor((i + 1) * bucketSize));
|
|
|
|
let max = 0;
|
|
for (let j = start; j < end && j < waves.length; j++) {
|
|
if (waves[j] > max) max = waves[j];
|
|
}
|
|
|
|
compressed.push(max);
|
|
}
|
|
|
|
return compressed;
|
|
}
|
|
|
|
if (targetLength === 1) return [waves[0]];
|
|
|
|
const stretched: number[] = [];
|
|
const lastSourceIndex = waves.length - 1;
|
|
|
|
for (let i = 0; i < targetLength; i++) {
|
|
const position = (i * lastSourceIndex) / (targetLength - 1);
|
|
const left = Math.floor(position);
|
|
const right = Math.min(Math.ceil(position), lastSourceIndex);
|
|
|
|
if (left === right) {
|
|
stretched.push(waves[left]);
|
|
continue;
|
|
}
|
|
|
|
const t = position - left;
|
|
const value = waves[left] * (1 - t) + waves[right] * t;
|
|
stretched.push(value);
|
|
}
|
|
|
|
return stretched;
|
|
}, [waves]);
|
|
|
|
return {
|
|
isRecording,
|
|
isPaused,
|
|
duration,
|
|
waves,
|
|
start,
|
|
stop,
|
|
pause,
|
|
play,
|
|
error,
|
|
getAudioBlob,
|
|
interpolateCompressWaves,
|
|
};
|
|
} |