diff --git a/Rosetta/Features/Chats/ChatDetail/ComposerView.swift b/Rosetta/Features/Chats/ChatDetail/ComposerView.swift index 49fb58c..0f7a417 100644 --- a/Rosetta/Features/Chats/ChatDetail/ComposerView.swift +++ b/Rosetta/Features/Chats/ChatDetail/ComposerView.swift @@ -1,4 +1,3 @@ -import AudioToolbox import AVFAudio @preconcurrency import AVFoundation import Lottie @@ -1179,22 +1178,10 @@ extension ComposerView: RecordingMicButtonDelegate { isRecording = true isRecordingLocked = false setRecordingFlowState(.recordingUnlocked) + // Haptic is fired by RecordingMicButton.beginRecording() (prepared generator) + // BEFORE this delegate call — so it fires before AVAudioSession starts. presentRecordingChrome(locked: false, animatePanel: true) - // Haptic 100ms after chrome — overlay is at alpha ~0.7, visually present. - // Fired outside the Task so AVAudioSession can't suppress it, and - // button state guards can't skip it. - let hapticGenerator = UIImpactFeedbackGenerator(style: .medium) - hapticGenerator.prepare() - print("[HAPTIC] prepare() called, scheduling impactOccurred in 100ms") - DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in - let isStillRecording = self?.isRecording == true - print("[HAPTIC] firing impactOccurred — isRecording=\(isStillRecording) flowState=\(String(describing: self?.recordingFlowState))") - hapticGenerator.impactOccurred() - AudioServicesPlaySystemSound(1519) - print("[HAPTIC] impactOccurred + SystemSound(1519) DONE") - } - recordingStartTask?.cancel() recordingStartTask = Task { @MainActor [weak self] in guard let self else { return } diff --git a/Rosetta/Features/Chats/ChatDetail/RecordingMicButton.swift b/Rosetta/Features/Chats/ChatDetail/RecordingMicButton.swift index 19fb45f..6d7ce75 100644 --- a/Rosetta/Features/Chats/ChatDetail/RecordingMicButton.swift +++ b/Rosetta/Features/Chats/ChatDetail/RecordingMicButton.swift @@ -1,4 +1,3 @@ -import AudioToolbox import QuartzCore import UIKit @@ -127,7 +126,6 @@ final class RecordingMicButton: UIControl { didLockHaptic = false impactFeedback.prepare() - print("[HAPTIC-MIC] beginTracking — prepare() called") recordingDelegate?.micButtonRecordingArmed(self) // Start hold timer — after 0.19s we begin recording @@ -257,14 +255,13 @@ final class RecordingMicButton: UIControl { // MARK: - State Transitions private func beginRecording() { - guard recordingState == .waiting else { - print("[HAPTIC-MIC] beginRecording SKIPPED — state=\(recordingState)") - return - } + guard recordingState == .waiting else { return } recordingState = .recording holdTimer = nil - print("[HAPTIC-MIC] beginRecording — calling delegate") + // Haptic fires BEFORE delegate — delegate starts AVAudioSession + // which suppresses Taptic Engine. + fireHaptic() startDisplayLink() recordingDelegate?.micButtonRecordingBegan(self) } @@ -343,9 +340,7 @@ final class RecordingMicButton: UIControl { /// UIFeedbackGenerator API and hits Taptic Engine directly. /// Telegram uses this as fallback in HapticFeedback.swift. private func fireHaptic() { - print("[HAPTIC-MIC] fireHaptic() — state=\(recordingState)") impactFeedback.impactOccurred() - AudioServicesPlaySystemSound(1519) impactFeedback.prepare() }