Голосовые сообщения — фикс аудио, layout preview panel, склейка сегментов

This commit is contained in:
2026-04-13 03:57:22 +05:00
parent dd80c7d7e3
commit 472b9a23f5
3 changed files with 483 additions and 167 deletions

View File

@@ -1,4 +1,5 @@
import AVFAudio
import AVFoundation
import Foundation
import QuartzCore
import os
@@ -33,6 +34,18 @@ final class AudioRecorder: NSObject {
private var lastSampleTime: TimeInterval = 0
private let sampleInterval: TimeInterval = 1.0 / 30.0
// Multi-segment support for "record more" after preview
private var previousSegments: [(url: URL, duration: TimeInterval)] = []
private var accumulatedDuration: TimeInterval = 0
private static let recordingSettings: [String: Any] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 48000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
AVEncoderBitRateKey: 64000
]
private var fileURL: URL {
let tmp = FileManager.default.temporaryDirectory
return tmp.appendingPathComponent("rosetta_voice_\(UUID().uuidString).m4a")
@@ -55,16 +68,8 @@ final class AudioRecorder: NSObject {
}
let url = fileURL
let settings: [String: Any] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 48000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
AVEncoderBitRateKey: 64000
]
do {
let rec = try AVAudioRecorder(url: url, settings: settings)
let rec = try AVAudioRecorder(url: url, settings: Self.recordingSettings)
rec.isMeteringEnabled = true
rec.delegate = self
rec.prepareToRecord()
@@ -76,6 +81,8 @@ final class AudioRecorder: NSObject {
waveformSamples = []
lastSampleTime = 0
micLevel = 0
previousSegments = []
accumulatedDuration = 0
state = .recording(duration: 0, micLevel: 0)
startDisplayLink()
logger.info("[AudioRecorder] Started: \(url.lastPathComponent)")
@@ -87,42 +94,183 @@ final class AudioRecorder: NSObject {
}
func stopRecording() {
guard let rec = recorder else { return }
let duration = rec.currentTime
rec.stop()
stopDisplayLink()
let url = rec.url
state = .finished(url: url, duration: duration, waveform: waveformSamples)
onFinished?(url, duration, waveformSamples)
logger.info("[AudioRecorder] Stopped: \(String(format: "%.1f", duration))s")
recorder = nil
if let rec = recorder {
let currentDuration = rec.currentTime
rec.stop()
stopDisplayLink()
let currentURL = rec.url
recorder = nil
finishWithFile(url: currentURL, fileDuration: currentDuration)
} else if case .paused(let url, let duration, _) = state, !previousSegments.isEmpty {
// Recorder already stopped (preview after record more) concatenate segments
finishWithFile(url: url, fileDuration: duration)
} else if case .paused(let url, let duration, _) = state {
// Single segment, recorder stopped just finish
state = .finished(url: url, duration: duration, waveform: waveformSamples)
onFinished?(url, duration, waveformSamples)
logger.info("[AudioRecorder] Stopped (from preview): \(String(format: "%.1f", duration))s")
}
}
/// Pauses recording without losing the current file/waveform.
/// Used by preview flow (`lock -> stop -> preview -> record more`).
private func finishWithFile(url: URL, fileDuration: TimeInterval) {
if previousSegments.isEmpty {
state = .finished(url: url, duration: fileDuration, waveform: waveformSamples)
onFinished?(url, fileDuration, waveformSamples)
logger.info("[AudioRecorder] Stopped: \(String(format: "%.1f", fileDuration))s")
} else {
var allURLs = previousSegments.map(\.url)
allURLs.append(url)
let totalDuration = accumulatedDuration + fileDuration
concatenateAudioFiles(urls: allURLs) { [weak self] resultURL in
guard let self else { return }
if let resultURL {
self.state = .finished(url: resultURL, duration: totalDuration, waveform: self.waveformSamples)
self.onFinished?(resultURL, totalDuration, self.waveformSamples)
self.logger.info("[AudioRecorder] Stopped (merged): \(String(format: "%.1f", totalDuration))s")
} else {
self.state = .finished(url: url, duration: fileDuration, waveform: self.waveformSamples)
self.onFinished?(url, fileDuration, self.waveformSamples)
self.logger.warning("[AudioRecorder] Merge failed, using last segment")
}
for segment in self.previousSegments {
try? FileManager.default.removeItem(at: segment.url)
}
self.previousSegments = []
self.accumulatedDuration = 0
}
}
}
/// Stops recording and finalizes the M4A file for preview playback.
/// The file is fully written (moov atom included) so AVAudioPlayer can load it.
/// For single-segment: returns immediately with the file URL.
/// For multi-segment (after "record more"): merges all segments and calls completion with merged URL.
func pauseRecordingForPreview(completion: @escaping ((url: URL, duration: TimeInterval, waveform: [Float])?) -> Void) {
guard let rec = recorder, rec.isRecording else {
completion(nil)
return
}
let currentDuration = rec.currentTime
let currentURL = rec.url
rec.stop() // Finalize M4A writes moov atom so file is playable
stopDisplayLink()
recorder = nil
if previousSegments.isEmpty {
// Single segment return immediately
let totalDuration = currentDuration
state = .paused(url: currentURL, duration: totalDuration, waveform: waveformSamples)
logger.info("[AudioRecorder] Paused for preview: \(String(format: "%.1f", totalDuration))s")
completion((url: currentURL, duration: totalDuration, waveform: waveformSamples))
} else {
// Multi-segment merge all segments + current into one file for preview
var allURLs = previousSegments.map(\.url)
allURLs.append(currentURL)
let totalDuration = accumulatedDuration + currentDuration
let waveform = waveformSamples
concatenateAudioFiles(urls: allURLs) { [weak self] mergedURL in
guard let self else { return }
if let mergedURL {
// Clean up individual segment files
for segment in self.previousSegments {
try? FileManager.default.removeItem(at: segment.url)
}
try? FileManager.default.removeItem(at: currentURL)
self.previousSegments = []
self.accumulatedDuration = 0
self.pendingTrimRange = nil
self.state = .paused(url: mergedURL, duration: totalDuration, waveform: waveform)
self.logger.info("[AudioRecorder] Paused for preview (merged): \(String(format: "%.1f", totalDuration))s")
completion((url: mergedURL, duration: totalDuration, waveform: waveform))
} else {
// Merge failed show only current segment
self.state = .paused(url: currentURL, duration: currentDuration, waveform: waveform)
self.logger.warning("[AudioRecorder] Merge for preview failed, using last segment")
completion((url: currentURL, duration: currentDuration, waveform: waveform))
}
}
}
}
/// Synchronous version for backward compatibility (single-segment only).
@discardableResult
func pauseRecordingForPreview() -> (url: URL, duration: TimeInterval, waveform: [Float])? {
guard let rec = recorder, rec.isRecording else { return nil }
rec.pause()
guard previousSegments.isEmpty else {
// Multi-segment requires async version
logger.warning("[AudioRecorder] pauseRecordingForPreview sync called with segments — use async version")
return nil
}
let duration = rec.currentTime
let url = rec.url
rec.stop()
stopDisplayLink()
let snapshot = (url: rec.url, duration: rec.currentTime, waveform: waveformSamples)
let snapshot = (url: url, duration: duration, waveform: waveformSamples)
state = .paused(url: snapshot.url, duration: snapshot.duration, waveform: snapshot.waveform)
recorder = nil
logger.info("[AudioRecorder] Paused for preview: \(String(format: "%.1f", duration))s")
return snapshot
}
/// Whether there are previous segments from "record more" that need merging.
var hasPreviousSegments: Bool { !previousSegments.isEmpty }
/// Starts a new recording segment for "record more" after preview.
/// If `trimRange` is provided, the previous segment is trimmed before saving.
/// The new recording will be concatenated with the trimmed segment on send.
@discardableResult
func resumeRecording() -> Bool {
guard let rec = recorder else { return false }
guard case .paused = state else { return false }
guard rec.record() else { return false }
state = .recording(duration: rec.currentTime, micLevel: micLevel)
startDisplayLink()
return true
func resumeRecording(trimRange: ClosedRange<TimeInterval>? = nil) -> Bool {
guard case .paused(let prevURL, let prevDuration, _) = state else { return false }
// If trimmed, export only the trim range; otherwise keep full file
if let trim = trimRange, trim.lowerBound > 0.1 || trim.upperBound < prevDuration - 0.1 {
let trimmedDuration = trim.upperBound - trim.lowerBound
previousSegments.append((url: prevURL, duration: trimmedDuration))
pendingTrimRange = trim
} else {
previousSegments.append((url: prevURL, duration: prevDuration))
pendingTrimRange = nil
}
accumulatedDuration = previousSegments.reduce(0) { $0 + $1.duration }
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker])
try session.setActive(true)
} catch {
logger.error("[AudioRecorder] Resume session failed: \(error)")
return false
}
let url = fileURL
do {
let rec = try AVAudioRecorder(url: url, settings: Self.recordingSettings)
rec.isMeteringEnabled = true
rec.delegate = self
rec.prepareToRecord()
guard rec.record() else {
logger.error("[AudioRecorder] Resume record() failed")
return false
}
recorder = rec
state = .recording(duration: accumulatedDuration, micLevel: micLevel)
startDisplayLink()
logger.info("[AudioRecorder] Resumed recording (segment \(self.previousSegments.count + 1), trimmed: \(self.pendingTrimRange != nil))")
return true
} catch {
logger.error("[AudioRecorder] Resume init failed: \(error)")
return false
}
}
/// Trim range to apply to the first segment during concatenation
private var pendingTrimRange: ClosedRange<TimeInterval>?
func currentRecordingSnapshot() -> (url: URL, duration: TimeInterval, waveform: [Float])? {
if let rec = recorder {
return (url: rec.url, duration: rec.currentTime, waveform: waveformSamples)
return (url: rec.url, duration: accumulatedDuration + rec.currentTime, waveform: waveformSamples)
}
switch state {
case .paused(let url, let duration, let waveform):
@@ -135,11 +283,16 @@ final class AudioRecorder: NSObject {
}
func cancelRecording() {
guard let rec = recorder else { reset(); return }
let url = rec.url
rec.stop()
if let rec = recorder {
let url = rec.url
rec.stop()
try? FileManager.default.removeItem(at: url)
}
stopDisplayLink()
try? FileManager.default.removeItem(at: url)
// Clean up previous segment files
for segment in previousSegments {
try? FileManager.default.removeItem(at: segment.url)
}
logger.info("[AudioRecorder] Cancelled")
recorder = nil
reset()
@@ -150,6 +303,9 @@ final class AudioRecorder: NSObject {
recorder = nil
micLevel = 0
waveformSamples = []
previousSegments = []
accumulatedDuration = 0
pendingTrimRange = nil
state = .idle
}
@@ -177,11 +333,11 @@ final class AudioRecorder: NSObject {
let power = rec.averagePower(forChannel: 0)
let normalized = Self.normalizeMicLevel(power)
micLevel = normalized
let duration = rec.currentTime
let duration = accumulatedDuration + rec.currentTime
state = .recording(duration: duration, micLevel: normalized)
if duration - lastSampleTime >= sampleInterval {
if rec.currentTime - lastSampleTime >= sampleInterval {
waveformSamples.append(normalized)
lastSampleTime = duration
lastSampleTime = rec.currentTime
}
onLevelUpdate?(duration, normalized)
}
@@ -207,6 +363,56 @@ final class AudioRecorder: NSObject {
@unknown default: return false
}
}
// MARK: - Audio Concatenation
private func concatenateAudioFiles(urls: [URL], completion: @escaping @MainActor (URL?) -> Void) {
let trimRange = pendingTrimRange
let composition = AVMutableComposition()
guard let track = composition.addMutableTrack(
withMediaType: .audio,
preferredTrackID: kCMPersistentTrackID_Invalid
) else {
completion(nil)
return
}
var insertTime = CMTime.zero
for (index, url) in urls.enumerated() {
let asset = AVURLAsset(url: url)
guard let audioTrack = asset.tracks(withMediaType: .audio).first else { continue }
do {
// Apply trim range to the first segment (the original recording)
let sourceRange: CMTimeRange
if index == 0, let trim = trimRange {
let start = CMTime(seconds: trim.lowerBound, preferredTimescale: 44100)
let end = CMTime(seconds: trim.upperBound, preferredTimescale: 44100)
sourceRange = CMTimeRange(start: start, end: end)
} else {
sourceRange = CMTimeRange(start: .zero, duration: asset.duration)
}
try track.insertTimeRange(sourceRange, of: audioTrack, at: insertTime)
insertTime = CMTimeAdd(insertTime, sourceRange.duration)
} catch {
logger.error("[AudioRecorder] Concat insert failed: \(error)")
}
}
let outputURL = FileManager.default.temporaryDirectory
.appendingPathComponent("rosetta_voice_merged_\(UUID().uuidString).m4a")
guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) else {
completion(nil)
return
}
exporter.outputURL = outputURL
exporter.outputFileType = .m4a
exporter.exportAsynchronously {
Task { @MainActor in
completion(exporter.status == .completed ? outputURL : nil)
}
}
}
}
extension AudioRecorder: AVAudioRecorderDelegate {

View File

@@ -460,6 +460,20 @@ final class ComposerView: UIView, UITextViewDelegate {
recordingFlowState = state
}
// MARK: - Hit Testing (RecordMore floats above inputContainer)
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if let preview = recordingPreviewPanel {
let previewPoint = convert(point, to: preview)
if preview.point(inside: previewPoint, with: event) {
if let hit = preview.hitTest(previewPoint, with: event) {
return hit
}
}
}
return super.hitTest(point, with: event)
}
// MARK: - Layout
override func layoutSubviews() {
@@ -1133,20 +1147,8 @@ extension ComposerView: RecordingMicButtonDelegate {
setRecordingFlowState(.waitingForPreview)
audioRecorder.onLevelUpdate = nil
let paused = audioRecorder.pauseRecordingForPreview() ?? audioRecorder.currentRecordingSnapshot()
guard let snapshot = paused else {
dismissOverlayAndRestore()
return
}
lastRecordedURL = snapshot.url
lastRecordedDuration = snapshot.duration
lastRecordedWaveform = snapshot.waveform
if VoiceRecordingParityMath.shouldDiscard(duration: snapshot.duration) {
dismissOverlayAndRestore()
return
}
// Dismiss recording UI immediately
recordingOverlay?.dismiss()
recordingOverlay = nil
recordingLockView?.dismiss()
@@ -1156,17 +1158,43 @@ extension ComposerView: RecordingMicButtonDelegate {
}
updateRecordingSendAccessibilityArea(isEnabled: false)
guard let url = lastRecordedURL else {
dismissOverlayAndRestore(skipAudioCleanup: true)
if audioRecorder.hasPreviousSegments {
// Multi-segment (after "record more") async merge then show preview
audioRecorder.pauseRecordingForPreview { [weak self] snapshot in
guard let self, let snapshot else {
self?.dismissOverlayAndRestore()
return
}
self.presentPreviewPanel(url: snapshot.url, duration: snapshot.duration, waveform: snapshot.waveform)
}
} else {
// Single segment sync path
let paused = audioRecorder.pauseRecordingForPreview() ?? audioRecorder.currentRecordingSnapshot()
guard let snapshot = paused else {
dismissOverlayAndRestore()
return
}
presentPreviewPanel(url: snapshot.url, duration: snapshot.duration, waveform: snapshot.waveform)
}
}
private func presentPreviewPanel(url: URL, duration: TimeInterval, waveform: [Float]) {
lastRecordedURL = url
lastRecordedDuration = duration
lastRecordedWaveform = waveform
if VoiceRecordingParityMath.shouldDiscard(duration: duration) {
dismissOverlayAndRestore()
return
}
setPreviewRowReplacement(true)
micButton.resetState()
let preview = RecordingPreviewPanel(
frame: inputContainer.bounds,
fileURL: url,
duration: lastRecordedDuration,
waveform: lastRecordedWaveform
duration: duration,
waveform: waveform
)
preview.delegate = self
inputContainer.addSubview(preview)
@@ -1389,16 +1417,21 @@ extension ComposerView: RecordingMicButtonDelegate {
restoreComposerChrome()
// For cancel: play bin animation inside attach button, then restore icon
// Slide-to-cancel (discardRecording) = red bin, Preview delete (preserveRecordedDraft) = white bin
if dismissStyle == .cancel {
playBinAnimationInAttachButton()
let useRedBin = cleanup == .discardRecording
playBinAnimationInAttachButton(useRedBin: useRedBin)
}
}
private func playBinAnimationInAttachButton() {
private func playBinAnimationInAttachButton(useRedBin: Bool = true) {
// Hide paperclip icon, play bin Lottie inside attach button, then restore
attachIconLayer?.opacity = 0
guard let animation = LottieAnimation.named(VoiceRecordingLottieAsset.binRed.rawValue) else {
// Slide-to-cancel: BinRed (red animation, no tint)
// Preview delete: BinBlue with panelControlColor tint (white in dark theme)
let assetName = useRedBin ? VoiceRecordingLottieAsset.binRed.rawValue : VoiceRecordingLottieAsset.binBlue.rawValue
guard let animation = LottieAnimation.named(assetName) else {
// No Lottie asset just fade icon back
CATransaction.begin()
CATransaction.setAnimationDuration(0.25)
@@ -1407,11 +1440,24 @@ extension ComposerView: RecordingMicButtonDelegate {
return
}
let binView = LottieAnimationView(animation: animation)
let config = LottieConfiguration(renderingEngine: .mainThread)
let binView = LottieAnimationView(animation: animation, configuration: config)
binView.frame = attachButton.bounds
binView.contentMode = .scaleAspectFit
binView.backgroundBehavior = .pauseAndRestore
binView.loopMode = .playOnce
// Apply theme tint only for BinBlue (preview delete white in dark, black in light)
// BinRed (slide-to-cancel) uses its original red color, no tint needed.
if !useRedBin {
var r: CGFloat = 0, g: CGFloat = 0, b: CGFloat = 0, a: CGFloat = 0
UIColor.label.getRed(&r, green: &g, blue: &b, alpha: &a)
binView.setValueProvider(
ColorValueProvider(LottieColor(r: Double(r), g: Double(g), b: Double(b), a: Double(a))),
keypath: AnimationKeypath(keypath: "**.Color")
)
}
attachButton.addSubview(binView)
binView.play { [weak self] _ in
@@ -1465,9 +1511,10 @@ extension ComposerView: RecordingMicButtonDelegate {
}
private func resumeRecordingFromPreview() {
let trimRange = recordingPreviewPanel?.selectedTrimRange
setPreviewRowReplacement(false)
micButton.resetState()
guard audioRecorder.resumeRecording() else {
guard audioRecorder.resumeRecording(trimRange: trimRange) else {
dismissOverlayAndRestore()
return
}
@@ -1607,12 +1654,40 @@ extension ComposerView: RecordingPreviewPanelDelegate {
func previewPanelDidTapDelete(_ panel: RecordingPreviewPanel) {
audioRecorder.cancelRecording()
clearLastRecordedDraftFile()
dismissOverlayAndRestore(skipAudioCleanup: true)
finalizeVoiceSession(cleanup: .preserveRecordedDraft, dismissStyle: .cancel)
delegate?.composerDidCancelRecording(self)
}
func previewPanelDidTapRecordMore(_ panel: RecordingPreviewPanel) {
resumeRecordingFromPreview()
private static let trimWarningShownKey = "voice_trim_resume_warning_shown"
func previewPanelDidTapRecordMore(_ panel: RecordingPreviewPanel, trimRange: ClosedRange<TimeInterval>, isTrimmed: Bool) {
let alreadyShown = UserDefaults.standard.bool(forKey: Self.trimWarningShownKey)
if isTrimmed, !alreadyShown {
let alert = UIAlertController(
title: "Trim to selected range?",
message: "Audio outside that range will be discarded, and recording will start immediately.",
preferredStyle: .alert
)
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel))
alert.addAction(UIAlertAction(title: "Proceed", style: .default) { [weak self] _ in
UserDefaults.standard.set(true, forKey: Self.trimWarningShownKey)
self?.resumeRecordingFromPreview()
})
if let vc = presentingViewController() {
vc.present(alert, animated: true)
}
} else {
resumeRecordingFromPreview()
}
}
private func presentingViewController() -> UIViewController? {
var responder: UIResponder? = self
while let next = responder?.next {
if let vc = next as? UIViewController { return vc }
responder = next
}
return nil
}
}

View File

@@ -9,14 +9,15 @@ import UIKit
protocol RecordingPreviewPanelDelegate: AnyObject {
func previewPanelDidTapSend(_ panel: RecordingPreviewPanel, trimRange: ClosedRange<TimeInterval>)
func previewPanelDidTapDelete(_ panel: RecordingPreviewPanel)
func previewPanelDidTapRecordMore(_ panel: RecordingPreviewPanel)
func previewPanelDidTapRecordMore(_ panel: RecordingPreviewPanel, trimRange: ClosedRange<TimeInterval>, isTrimmed: Bool)
}
// MARK: - RecordingPreviewPanel
/// Telegram-parity recording preview: glass delete circle + dark glass panel + blue send circle.
/// Blue accent fill covers only the trim range. Play button floats inside waveform.
final class RecordingPreviewPanel: UIView {
/// Telegram-parity recording preview.
/// Layout: [delete glass circle] [dark glass panel with waveform + accent fill + trim + play pill + send]
/// RecordMore floats directly above the send button.
final class RecordingPreviewPanel: UIView, UIGestureRecognizerDelegate {
private enum PanMode {
case scrub
@@ -31,7 +32,7 @@ final class RecordingPreviewPanel: UIView {
weak var delegate: RecordingPreviewPanelDelegate?
// MARK: - Background elements (3 separate visual blocks)
// MARK: - Background elements
private let deleteGlassCircle = TelegramGlassUIView(frame: .zero)
private let centerGlassBackground = TelegramGlassUIView(frame: .zero)
@@ -48,14 +49,15 @@ final class RecordingPreviewPanel: UIView {
private let waveformContainer = UIView()
private let waveformView = WaveformView()
private let leftTrimMask = UIView()
private let rightTrimMask = UIView()
// MARK: - Trim handles (at panel level)
private let leftTrimHandle = UIView()
private let rightTrimHandle = UIView()
private let leftCapsuleView = UIView()
private let rightCapsuleView = UIView()
// MARK: - Play button pill (floats inside waveform)
// MARK: - Play button pill (at panel level)
private let playButtonPill = UIButton(type: .custom)
private let playPillBackground = UIImageView()
@@ -85,6 +87,8 @@ final class RecordingPreviewPanel: UIView {
// MARK: - Layout cache
private var centerPanelFrame: CGRect = .zero
private var waveformOriginX: CGFloat = 0
private var waveformWidth: CGFloat = 0
// MARK: - Colors
@@ -95,7 +99,7 @@ final class RecordingPreviewPanel: UIView {
}
private var panelControlAccentColor: UIColor {
UIColor(red: 0, green: 136 / 255.0, blue: 1.0, alpha: 1.0)
UIColor(red: 0.2, green: 0.565, blue: 0.925, alpha: 1) // #3390EC same as outgoing message bubble
}
private var panelSecondaryTextColor: UIColor {
@@ -130,6 +134,40 @@ final class RecordingPreviewPanel: UIView {
@available(*, unavailable)
required init?(coder: NSCoder) { fatalError() }
// MARK: - Hit testing (RecordMore floats above bounds)
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
if recordMoreButton.frame.contains(point) { return true }
return super.point(inside: point, with: event)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
// RecordMore floating button
if !recordMoreButton.isHidden, recordMoreButton.frame.contains(point) {
return recordMoreButton
}
// Play pill (ensure it receives taps over pan gesture)
if !playButtonPill.isHidden, playButtonPill.alpha > 0.1 {
let pillPoint = convert(point, to: playButtonPill)
if playButtonPill.bounds.contains(pillPoint) {
return playButtonPill
}
}
return super.hitTest(point, with: event)
}
// MARK: - Gesture delegate (don't intercept button taps)
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
let loc = gestureRecognizer.location(in: self)
// Don't start pan on buttons
if deleteButton.frame.contains(loc) { return false }
if sendButton.frame.contains(loc) { return false }
if recordMoreButton.frame.contains(loc) { return false }
if playButtonPill.alpha > 0.1, playButtonPill.frame.contains(loc) { return false }
return true
}
// MARK: - Setup
private func setupSubviews() {
@@ -145,19 +183,15 @@ final class RecordingPreviewPanel: UIView {
deleteButton.accessibilityIdentifier = "voice.preview.delete"
addSubview(deleteButton)
// B) Central dark glass panel
// B) Central dark glass panel (includes send button area)
centerGlassBackground.isUserInteractionEnabled = false
addSubview(centerGlassBackground)
// Blue accent fill (dynamic trim range)
accentFillView.image = Self.makeStretchablePill(
diameter: 34,
color: panelControlAccentColor
)
// Blue accent fill (dynamic trim range, BELOW waveform in z-order)
accentFillView.isUserInteractionEnabled = false
addSubview(accentFillView)
// Waveform container
// Waveform container (ABOVE accent fill bars visible on top of blue)
waveformContainer.clipsToBounds = true
addSubview(waveformContainer)
@@ -165,19 +199,12 @@ final class RecordingPreviewPanel: UIView {
waveformView.progress = 0
waveformContainer.addSubview(waveformView)
// Trim masks
leftTrimMask.backgroundColor = UIColor.black.withAlphaComponent(0.25)
rightTrimMask.backgroundColor = UIColor.black.withAlphaComponent(0.25)
waveformContainer.addSubview(leftTrimMask)
waveformContainer.addSubview(rightTrimMask)
// Trim handles (transparent, 16pt wide)
// Trim handles at panel level
leftTrimHandle.backgroundColor = .clear
rightTrimHandle.backgroundColor = .clear
addSubview(leftTrimHandle)
addSubview(rightTrimHandle)
// White capsule indicators inside handles
leftCapsuleView.backgroundColor = .white
leftCapsuleView.layer.cornerRadius = 1.5
leftTrimHandle.addSubview(leftCapsuleView)
@@ -186,13 +213,7 @@ final class RecordingPreviewPanel: UIView {
rightCapsuleView.layer.cornerRadius = 1.5
rightTrimHandle.addSubview(rightCapsuleView)
// Pan gesture for waveform
let pan = UIPanGestureRecognizer(target: self, action: #selector(handleWaveformPan(_:)))
waveformContainer.addGestureRecognizer(pan)
waveformContainer.accessibilityLabel = "Waveform trim area"
waveformContainer.accessibilityIdentifier = "voice.preview.waveform"
// Play button pill (inside waveform)
// Play button pill at panel level
playPillBackground.isUserInteractionEnabled = false
playButtonPill.addSubview(playPillBackground)
@@ -211,10 +232,10 @@ final class RecordingPreviewPanel: UIView {
playButtonPill.addTarget(self, action: #selector(playTapped), for: .touchUpInside)
playButtonPill.accessibilityLabel = "Play recording"
playButtonPill.accessibilityIdentifier = "voice.preview.playPause"
waveformContainer.addSubview(playButtonPill)
addSubview(playButtonPill)
configurePlayButton(playing: false, animated: false)
// C) Send button (solid blue circle)
// Send button INSIDE center panel (right edge)
sendButton.setImage(VoiceRecordingAssets.image(.send, templated: true), for: .normal)
sendButton.backgroundColor = panelControlAccentColor
sendButton.layer.cornerRadius = 18
@@ -225,7 +246,7 @@ final class RecordingPreviewPanel: UIView {
sendButton.accessibilityIdentifier = "voice.preview.send"
addSubview(sendButton)
// D) Record More glass circle + button (floating above)
// RecordMore glass circle floating above send button
recordMoreGlassCircle.fixedCornerRadius = 20
recordMoreGlassCircle.isUserInteractionEnabled = false
addSubview(recordMoreGlassCircle)
@@ -237,6 +258,11 @@ final class RecordingPreviewPanel: UIView {
recordMoreButton.accessibilityIdentifier = "voice.preview.recordMore"
addSubview(recordMoreButton)
// Pan gesture on self for waveform scrubbing/trimming
let pan = UIPanGestureRecognizer(target: self, action: #selector(handleWaveformPan(_:)))
pan.delegate = self
addGestureRecognizer(pan)
updateThemeColors()
}
@@ -254,39 +280,45 @@ final class RecordingPreviewPanel: UIView {
deleteGlassCircle.applyCornerRadius()
deleteButton.frame = deleteFrame
// Send solid blue circle, right edge
// Send button inside center panel, at right edge with inset
let sendSize: CGFloat = 36
sendButton.frame = CGRect(x: w - sendSize, y: (h - sendSize) / 2, width: sendSize, height: sendSize)
let sendInset: CGFloat = 3
let sendX = w - sendInset - sendSize
sendButton.frame = CGRect(x: sendX, y: (h - sendSize) / 2, width: sendSize, height: sendSize)
// Central dark glass panel between delete and send
// Central dark glass panel from after delete gap to right edge (includes send)
let panelGap: CGFloat = 6
let panelX = deleteSize + panelGap
let panelW = w - panelX - sendSize - panelGap
let panelH = h - 6
let panelY: CGFloat = 3
let panelW = w - panelX
let panelH = deleteSize // Same height as delete circle (40pt) Telegram parity
let panelY = (h - panelH) / 2
let panelCornerRadius = panelH / 2
centerGlassBackground.frame = CGRect(x: panelX, y: panelY, width: panelW, height: panelH)
centerGlassBackground.fixedCornerRadius = panelCornerRadius
centerGlassBackground.applyCornerRadius()
centerPanelFrame = CGRect(x: panelX, y: panelY, width: panelW, height: panelH)
// Waveform inside central panel, 18pt insets from panel edges
let wfInset: CGFloat = 18
let wfX = panelX + wfInset
let wfW = panelW - wfInset * 2
// Waveform symmetric 21pt inset from panel edges (Telegram: x=21, width=panelW-42)
let wfLeftInset: CGFloat = 21
let wfX = panelX + wfLeftInset
let wfRightEdge = sendButton.frame.minX - 21 // Symmetric with left
let wfW = max(0, wfRightEdge - wfX)
let wfH: CGFloat = 13
let wfY = floor((h - wfH) / 2)
waveformContainer.frame = CGRect(x: wfX, y: wfY, width: max(0, wfW), height: wfH)
waveformContainer.frame = CGRect(x: wfX, y: wfY, width: wfW, height: wfH)
waveformView.frame = waveformContainer.bounds
waveformOriginX = wfX
waveformWidth = wfW
// RecordMore floating above, right side
// RecordMore floating directly above send button
let rmSize: CGFloat = 40
let rmFrame = CGRect(x: w - rmSize - 10, y: -52, width: rmSize, height: rmSize)
let rmX = sendButton.frame.midX - rmSize / 2
let rmFrame = CGRect(x: rmX, y: -52, width: rmSize, height: rmSize)
recordMoreGlassCircle.frame = rmFrame
recordMoreGlassCircle.applyCornerRadius()
recordMoreButton.frame = rmFrame
// Trim computation
// Trim
minTrimDuration = VoiceRecordingParityConstants.minTrimDuration(
duration: duration,
waveformWidth: waveformContainer.bounds.width
@@ -313,15 +345,30 @@ final class RecordingPreviewPanel: UIView {
}
private func startPlayback() {
// After recording, audio route may be earpiece. Switch to speaker for preview.
try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker])
try? AVAudioSession.sharedInstance().setActive(true)
try? AVAudioSession.sharedInstance().overrideOutputAudioPort(.speaker)
if audioPlayer == nil {
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
audioPlayer?.prepareToPlay()
do {
let player = try AVAudioPlayer(contentsOf: fileURL)
player.volume = 1.0
player.prepareToPlay()
audioPlayer = player
} catch {
print("[VoicePreview] AVAudioPlayer init failed: \(error)")
return
}
}
guard let player = audioPlayer else { return }
if player.currentTime < trimStart || player.currentTime > trimEnd {
player.currentTime = trimStart
}
player.play()
guard player.play() else {
print("[VoicePreview] player.play() returned false")
return
}
isPlaying = true
configurePlayButton(playing: true, animated: true)
startDisplayLink()
@@ -409,26 +456,28 @@ final class RecordingPreviewPanel: UIView {
// MARK: - Trim / Scrub
@objc private func handleWaveformPan(_ gesture: UIPanGestureRecognizer) {
guard duration > 0, waveformContainer.bounds.width > 1 else { return }
let location = gesture.location(in: waveformContainer)
let normalized = min(1, max(0, location.x / waveformContainer.bounds.width))
let targetTime = TimeInterval(normalized) * duration
guard duration > 0, waveformWidth > 1 else { return }
let location = gesture.location(in: self)
let wfRelativeX = location.x - waveformOriginX
let fraction = min(1, max(0, wfRelativeX / waveformWidth))
let targetTime = TimeInterval(fraction) * duration
switch gesture.state {
case .began:
let leftX = xForTime(trimStart)
let rightX = xForTime(trimEnd)
if abs(location.x - leftX) <= 17 {
if abs(wfRelativeX - leftX) <= 17 {
activePanMode = .trimLeft
} else if abs(location.x - rightX) <= 17 {
} else if abs(wfRelativeX - rightX) <= 17 {
activePanMode = .trimRight
} else {
} else if wfRelativeX >= -10, wfRelativeX <= waveformWidth + 10 {
activePanMode = .scrub
} else {
return
}
if activePanMode != .scrub {
pausePlayback()
}
// Hide play pill during scrub
if activePanMode == .scrub {
UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut) {
self.playButtonPill.alpha = 0
@@ -465,7 +514,6 @@ final class RecordingPreviewPanel: UIView {
}
default:
activePanMode = nil
// Show play pill again
UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut) {
self.playButtonPill.alpha = 1
}
@@ -474,89 +522,84 @@ final class RecordingPreviewPanel: UIView {
private func updateTrimVisuals() {
let wfW = waveformContainer.bounds.width
let wfH = waveformContainer.bounds.height
guard wfW > 0 else { return }
let startX = xForTime(trimStart)
let endX = xForTime(trimEnd)
// Trim masks (dim areas outside trim range)
leftTrimMask.frame = CGRect(x: 0, y: 0, width: max(0, startX), height: wfH)
rightTrimMask.frame = CGRect(x: min(wfW, endX), y: 0, width: max(0, wfW - endX), height: wfH)
// Trim handles (16pt wide, positioned at panel level)
// Trim handles at panel level, clamped to not overlap send button
let handleW: CGFloat = 16
let handleH = centerPanelFrame.height
let handleY = centerPanelFrame.minY
let wfOriginX = waveformContainer.frame.minX
let maxRightHandleX = sendButton.frame.minX - 4 - handleW
leftTrimHandle.frame = CGRect(
x: wfOriginX + startX - handleW,
x: max(centerPanelFrame.minX, waveformOriginX + startX - handleW),
y: handleY,
width: handleW,
height: handleH
)
rightTrimHandle.frame = CGRect(
x: wfOriginX + endX,
x: min(maxRightHandleX, waveformOriginX + endX),
y: handleY,
width: handleW,
height: handleH
)
// Capsule indicators (3×12, centered vertically, 8pt from outer edge)
// Capsule indicators
let capsuleW: CGFloat = 3
let capsuleH: CGFloat = 12
let capsuleY = (handleH - capsuleH) / 2
// Left capsule: 8pt from right edge of left handle (outer edge faces left, capsule near waveform)
leftCapsuleView.frame = CGRect(x: handleW - 8 - capsuleW, y: capsuleY, width: capsuleW, height: capsuleH)
// Right capsule: 8pt from left edge of right handle
rightCapsuleView.frame = CGRect(x: 8, y: capsuleY, width: capsuleW, height: capsuleH)
// Hide trim handles when duration < 2s
let showTrim = duration >= 2.0
leftTrimHandle.isHidden = !showTrim
rightTrimHandle.isHidden = !showTrim
// Blue accent fill dynamic between trim handles (covers panel area)
let fillX: CGFloat
let fillW: CGFloat
// Blue accent fill spans from left trim handle to right trim handle (Telegram parity)
// When trimming, the blue area moves with the handles.
let accentInsetH: CGFloat = 0 // horizontal (left) flush with panel edge
let accentInsetV: CGFloat = 3 // vertical (top/bottom) proportional gap
let accentY = centerPanelFrame.minY + accentInsetV
let accentH = centerPanelFrame.height - accentInsetV * 2
if showTrim {
fillX = wfOriginX + startX - 18 // extend to panel-edge inset
let fillEndX = wfOriginX + endX + 18
let clampedX = max(centerPanelFrame.minX, fillX)
let clampedEndX = min(centerPanelFrame.maxX, fillEndX)
fillW = clampedEndX - clampedX
accentFillView.frame = CGRect(x: clampedX, y: centerPanelFrame.minY, width: max(0, fillW), height: centerPanelFrame.height)
let fillX = leftTrimHandle.frame.minX
let fillEndX = rightTrimHandle.frame.maxX
let clampedX = max(centerPanelFrame.minX + accentInsetH, fillX)
let clampedEnd = min(sendButton.frame.minX - 3, fillEndX)
accentFillView.frame = CGRect(x: clampedX, y: accentY, width: max(0, clampedEnd - clampedX), height: accentH)
} else {
// No trim fill entire central panel
accentFillView.frame = centerPanelFrame
let accentX = centerPanelFrame.minX + accentInsetH
let accentW = max(0, sendButton.frame.minX - 3 - accentX)
accentFillView.frame = CGRect(x: accentX, y: accentY, width: accentW, height: accentH)
}
// Play button pill centered between trim handles
// Play button pill centered between trim handles, at panel level
let space = endX - startX
let pillW: CGFloat = space >= 70 ? 63 : 27
let pillH: CGFloat = 22
let pillX = startX + (space - pillW) / 2
let pillY = (wfH - pillH) / 2
let pillX = waveformOriginX + startX + (space - pillW) / 2
let pillY = (bounds.height - pillH) / 2
playButtonPill.frame = CGRect(x: pillX, y: pillY, width: pillW, height: pillH)
// Pill background
let pillCornerRadius = pillH / 2
if playPillBackground.image == nil || playPillBackground.frame.size != playButtonPill.bounds.size {
playPillBackground.image = Self.makeStretchablePill(diameter: pillH, color: .white)?.withRenderingMode(.alwaysTemplate)
if playPillBackground.image == nil {
playPillBackground.image = Self.makeStretchablePill(
diameter: pillH, color: .white
)?.withRenderingMode(.alwaysTemplate)
playPillBackground.tintColor = playPillBackgroundColor
}
playPillBackground.frame = playButtonPill.bounds
playPillBackground.layer.cornerRadius = pillCornerRadius
playPillBackground.layer.cornerRadius = pillH / 2
playPillBackground.clipsToBounds = true
// Lottie icon inside pill
playPauseAnimationView.frame = CGRect(x: 3, y: 1, width: 21, height: 21)
playPauseAnimationView.frame = CGRect(x: 3, y: 0.5, width: 21, height: 21)
// Duration label inside pill
let showDuration = pillW > 27
durationLabel.isHidden = !showDuration
durationLabel.frame = CGRect(x: 18, y: 3, width: 35, height: 20)
durationLabel.isHidden = pillW <= 27
durationLabel.frame = CGRect(x: 18, y: 1, width: 38, height: 20)
}
private func xForTime(_ time: TimeInterval) -> CGFloat {
@@ -580,25 +623,16 @@ final class RecordingPreviewPanel: UIView {
}
private func updateThemeColors() {
// Waveform colors: white bars on blue accent background
waveformView.backgroundColor_ = UIColor.white.withAlphaComponent(0.4)
waveformView.foregroundColor_ = UIColor.white
waveformView.setNeedsDisplay()
// Delete button tint
deleteButton.tintColor = panelControlColor
// Record more tint
recordMoreButton.tintColor = panelControlColor
// Play pill text + icon
durationLabel.textColor = panelSecondaryTextColor
applyPlayPauseTintColor(panelSecondaryTextColor)
// Pill background
playPillBackground.tintColor = playPillBackgroundColor
// Accent fill
accentFillView.image = Self.makeStretchablePill(
diameter: 34,
color: panelControlAccentColor
@@ -626,7 +660,8 @@ final class RecordingPreviewPanel: UIView {
@objc private func recordMoreTapped() {
stopPlayback(resetToTrimStart: false)
delegate?.previewPanelDidTapRecordMore(self)
let isTrimmed = trimStart > 0.1 || trimEnd < duration - 0.1
delegate?.previewPanelDidTapRecordMore(self, trimRange: selectedTrimRange, isTrimmed: isTrimmed)
}
@objc private func sendTapped() {