Голосовые сообщения UI, Settings на UIKit, адаптивная темизация
This commit is contained in:
@@ -121,6 +121,8 @@ extension MessageCellLayout {
|
||||
let fileCount: Int
|
||||
let avatarCount: Int
|
||||
let callCount: Int
|
||||
let voiceCount: Int
|
||||
let voiceDuration: TimeInterval
|
||||
let isForward: Bool
|
||||
let forwardImageCount: Int
|
||||
let forwardFileCount: Int
|
||||
@@ -175,7 +177,7 @@ extension MessageCellLayout {
|
||||
// Pre-check for emojiOnly to choose font size (40pt vs 17pt).
|
||||
let isEmojiOnlyPrecheck = !config.text.isEmpty
|
||||
&& config.imageCount == 0 && config.fileCount == 0
|
||||
&& config.avatarCount == 0 && config.callCount == 0
|
||||
&& config.avatarCount == 0 && config.callCount == 0 && config.voiceCount == 0
|
||||
&& !config.isForward && !config.hasReplyQuote
|
||||
&& EmojiParser.isEmojiOnly(config.text)
|
||||
// Telegram: messageEmojiFont = Font.regular(53.0) (ChatPresentationData.swift line 58)
|
||||
@@ -197,7 +199,7 @@ extension MessageCellLayout {
|
||||
messageType = .photoWithCaption
|
||||
} else if config.imageCount > 0 {
|
||||
messageType = .photo
|
||||
} else if config.fileCount > 0 || config.avatarCount > 0 || config.callCount > 0 {
|
||||
} else if config.fileCount > 0 || config.avatarCount > 0 || config.callCount > 0 || config.voiceCount > 0 {
|
||||
messageType = .file
|
||||
} else if config.groupInviteCount > 0 {
|
||||
messageType = .groupInvite
|
||||
@@ -337,6 +339,7 @@ extension MessageCellLayout {
|
||||
var fileH: CGFloat = CGFloat(config.fileCount) * 52
|
||||
+ CGFloat(config.callCount) * 42
|
||||
+ CGFloat(config.avatarCount) * 52
|
||||
+ CGFloat(config.voiceCount) * 38
|
||||
|
||||
// Tiny floor just to prevent zero-width collapse.
|
||||
// Telegram does NOT force a large minW — short messages get tight bubbles.
|
||||
@@ -471,7 +474,16 @@ extension MessageCellLayout {
|
||||
} else if fileH > 0 {
|
||||
// Telegram: call width = title + button(54) + insets ≈ 200pt
|
||||
// Telegram: file width = icon(55) + filename + insets ≈ 220pt
|
||||
let fileMinW: CGFloat = config.callCount > 0 ? 200 : 220
|
||||
let fileMinW: CGFloat
|
||||
if config.voiceCount > 0 {
|
||||
// Telegram: voice width scales with duration (2-30s range, 120-maxW)
|
||||
let minVoiceW: CGFloat = 120
|
||||
let maxVoiceW = effectiveMaxBubbleWidth - 36
|
||||
let clampedDur = max(2, min(30, config.voiceDuration))
|
||||
fileMinW = minVoiceW + (maxVoiceW - minVoiceW) * CGFloat(clampedDur - 2) / CGFloat(30 - 2)
|
||||
} else {
|
||||
fileMinW = config.callCount > 0 ? 200 : 220
|
||||
}
|
||||
bubbleW = min(fileMinW, effectiveMaxBubbleWidth)
|
||||
bubbleW = max(bubbleW, leftPad + metadataWidth + rightPad)
|
||||
// Symmetric centering: content + gap + timestamp block centered in bubble.
|
||||
@@ -479,7 +491,7 @@ extension MessageCellLayout {
|
||||
// To achieve visual symmetry, fileH spans the ENTIRE bubble
|
||||
// and metadataBottomInset = (fileH - contentH) / 2 (same as content topY).
|
||||
let tsGap: CGFloat = 6
|
||||
let contentH: CGFloat = config.callCount > 0 ? 36 : 44
|
||||
let contentH: CGFloat = config.callCount > 0 ? 36 : (config.voiceCount > 0 ? 38 : 44)
|
||||
let tsPad = ceil((fileH + tsGap - contentH) / 2)
|
||||
fileOnlyTsPad = tsPad
|
||||
bubbleH += tsGap + tsSize.height + tsPad
|
||||
@@ -706,7 +718,7 @@ extension MessageCellLayout {
|
||||
hasPhoto: config.imageCount > 0,
|
||||
photoFrame: photoFrame,
|
||||
photoCollageHeight: photoH,
|
||||
hasFile: config.fileCount > 0 || config.avatarCount > 0 || config.callCount > 0,
|
||||
hasFile: config.fileCount > 0 || config.avatarCount > 0 || config.callCount > 0 || config.voiceCount > 0,
|
||||
fileFrame: fileFrame,
|
||||
hasGroupInvite: config.groupInviteCount > 0,
|
||||
groupInviteTitle: config.groupInviteTitle,
|
||||
@@ -857,7 +869,7 @@ extension MessageCellLayout {
|
||||
if hasImage {
|
||||
return .media
|
||||
}
|
||||
let hasFileLike = message.attachments.contains { $0.type == .file || $0.type == .avatar || $0.type == .call }
|
||||
let hasFileLike = message.attachments.contains { $0.type == .file || $0.type == .avatar || $0.type == .call || $0.type == .voice }
|
||||
if hasFileLike {
|
||||
return .file
|
||||
}
|
||||
@@ -1017,6 +1029,15 @@ extension MessageCellLayout {
|
||||
let files = message.attachments.filter { $0.type == .file }
|
||||
let avatars = message.attachments.filter { $0.type == .avatar }
|
||||
let calls = message.attachments.filter { $0.type == .call }
|
||||
let voices = message.attachments.filter { $0.type == .voice }
|
||||
let voiceDuration: TimeInterval = {
|
||||
guard let preview = voices.first?.preview else { return 0 }
|
||||
let parts = preview.components(separatedBy: "::")
|
||||
if parts.count >= 3, let dur = Int(parts[1]) { return TimeInterval(dur) }
|
||||
if parts.count >= 2, let dur = Int(parts[0]) { return TimeInterval(dur) }
|
||||
if let dur = Int(parts[0]) { return TimeInterval(dur) }
|
||||
return 0
|
||||
}()
|
||||
let hasReply = message.attachments.contains { $0.type == .messages }
|
||||
let isForward = hasReply && displayText.isEmpty
|
||||
|
||||
@@ -1069,6 +1090,8 @@ extension MessageCellLayout {
|
||||
fileCount: files.count,
|
||||
avatarCount: avatars.count,
|
||||
callCount: calls.count,
|
||||
voiceCount: voices.count,
|
||||
voiceDuration: voiceDuration,
|
||||
isForward: isForward,
|
||||
forwardImageCount: forwardInnerImageCount,
|
||||
forwardFileCount: forwardInnerFileCount,
|
||||
|
||||
@@ -8,6 +8,7 @@ import os
|
||||
enum AudioRecordingState: Sendable {
|
||||
case idle
|
||||
case recording(duration: TimeInterval, micLevel: Float)
|
||||
case paused(url: URL, duration: TimeInterval, waveform: [Float])
|
||||
case finished(url: URL, duration: TimeInterval, waveform: [Float])
|
||||
}
|
||||
|
||||
@@ -86,7 +87,7 @@ final class AudioRecorder: NSObject {
|
||||
}
|
||||
|
||||
func stopRecording() {
|
||||
guard let rec = recorder, rec.isRecording else { return }
|
||||
guard let rec = recorder else { return }
|
||||
let duration = rec.currentTime
|
||||
rec.stop()
|
||||
stopDisplayLink()
|
||||
@@ -97,6 +98,42 @@ final class AudioRecorder: NSObject {
|
||||
recorder = nil
|
||||
}
|
||||
|
||||
/// Pauses recording without losing the current file/waveform.
|
||||
/// Used by preview flow (`lock -> stop -> preview -> record more`).
|
||||
@discardableResult
|
||||
func pauseRecordingForPreview() -> (url: URL, duration: TimeInterval, waveform: [Float])? {
|
||||
guard let rec = recorder, rec.isRecording else { return nil }
|
||||
rec.pause()
|
||||
stopDisplayLink()
|
||||
let snapshot = (url: rec.url, duration: rec.currentTime, waveform: waveformSamples)
|
||||
state = .paused(url: snapshot.url, duration: snapshot.duration, waveform: snapshot.waveform)
|
||||
return snapshot
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
func resumeRecording() -> Bool {
|
||||
guard let rec = recorder else { return false }
|
||||
guard case .paused = state else { return false }
|
||||
guard rec.record() else { return false }
|
||||
state = .recording(duration: rec.currentTime, micLevel: micLevel)
|
||||
startDisplayLink()
|
||||
return true
|
||||
}
|
||||
|
||||
func currentRecordingSnapshot() -> (url: URL, duration: TimeInterval, waveform: [Float])? {
|
||||
if let rec = recorder {
|
||||
return (url: rec.url, duration: rec.currentTime, waveform: waveformSamples)
|
||||
}
|
||||
switch state {
|
||||
case .paused(let url, let duration, let waveform):
|
||||
return (url: url, duration: duration, waveform: waveform)
|
||||
case .finished(let url, let duration, let waveform):
|
||||
return (url: url, duration: duration, waveform: waveform)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func cancelRecording() {
|
||||
guard let rec = recorder else { reset(); return }
|
||||
let url = rec.url
|
||||
|
||||
@@ -570,7 +570,7 @@ final class SessionManager {
|
||||
toPublicKey: String,
|
||||
opponentTitle: String = "",
|
||||
opponentUsername: String = ""
|
||||
) async throws {
|
||||
) async throws -> String {
|
||||
guard let privKey = privateKeyHex, let hash = privateKeyHash else {
|
||||
Self.logger.error("📤 Cannot send — missing keys")
|
||||
throw CryptoError.decryptionFailed
|
||||
@@ -677,6 +677,9 @@ final class SessionManager {
|
||||
previewSuffix = attachment.thumbnail?.blurHash(numberOfComponents: (4, 3)) ?? ""
|
||||
case .file:
|
||||
previewSuffix = "\(attachment.fileSize ?? 0)::\(attachment.fileName ?? "file")"
|
||||
case .voice:
|
||||
// Voice preview: "duration::waveform_base64"
|
||||
previewSuffix = attachment.voicePreview ?? ""
|
||||
default:
|
||||
previewSuffix = ""
|
||||
}
|
||||
@@ -694,7 +697,7 @@ final class SessionManager {
|
||||
for item in encryptedAttachments {
|
||||
if item.original.type == .image, let image = UIImage(data: item.original.data) {
|
||||
AttachmentCache.shared.saveImage(image, forAttachmentId: item.original.id)
|
||||
} else if item.original.type == .file {
|
||||
} else if item.original.type == .file || item.original.type == .voice {
|
||||
AttachmentCache.shared.saveFile(
|
||||
item.original.data,
|
||||
forAttachmentId: item.original.id,
|
||||
@@ -763,7 +766,7 @@ final class SessionManager {
|
||||
}
|
||||
MessageRepository.shared.updateDeliveryStatus(messageId: messageId, status: .delivered)
|
||||
DialogRepository.shared.updateDeliveryStatus(messageId: messageId, opponentKey: toPublicKey, status: .delivered)
|
||||
return
|
||||
return messageId
|
||||
}
|
||||
|
||||
// ── Phase 2: Upload in background, then send packet ──
|
||||
@@ -819,6 +822,7 @@ final class SessionManager {
|
||||
}
|
||||
MessageRepository.shared.persistNow()
|
||||
Self.logger.info("📤 Message with \(attachments.count) attachment(s) sent to \(toPublicKey.prefix(12))…")
|
||||
return messageId
|
||||
} catch {
|
||||
// CDN upload or packet send failed — mark as .error to show failure to user.
|
||||
// Note: retryWaitingOutgoingMessagesAfterReconnect() may still pick up .error
|
||||
|
||||
141
Rosetta/Core/Services/VoiceMessagePlayer.swift
Normal file
141
Rosetta/Core/Services/VoiceMessagePlayer.swift
Normal file
@@ -0,0 +1,141 @@
|
||||
import AVFAudio
|
||||
import Combine
|
||||
import QuartzCore
|
||||
import os
|
||||
|
||||
// MARK: - VoiceMessagePlayer
|
||||
|
||||
/// Singleton audio player for voice messages in the message list.
|
||||
/// Only one voice message plays at a time — tapping another stops the current.
|
||||
/// Uses AVAudioPlayer for local file playback with display link for progress.
|
||||
@MainActor
|
||||
final class VoiceMessagePlayer: ObservableObject {
|
||||
|
||||
static let shared = VoiceMessagePlayer()
|
||||
|
||||
private let logger = Logger(subsystem: "com.rosetta.messenger", category: "VoicePlayer")
|
||||
|
||||
// MARK: - Published State
|
||||
|
||||
@Published private(set) var currentMessageId: String?
|
||||
@Published private(set) var isPlaying = false
|
||||
@Published private(set) var progress: Double = 0
|
||||
@Published private(set) var currentTime: TimeInterval = 0
|
||||
@Published private(set) var duration: TimeInterval = 0
|
||||
|
||||
// MARK: - Private
|
||||
|
||||
private var audioPlayer: AVAudioPlayer?
|
||||
private var displayLink: CADisplayLink?
|
||||
private var displayLinkTarget: DisplayLinkProxy?
|
||||
|
||||
private init() {}
|
||||
|
||||
// MARK: - Public API
|
||||
|
||||
/// Play a voice message. Stops any currently playing message first.
|
||||
func play(messageId: String, fileURL: URL) {
|
||||
// If same message is playing, toggle pause
|
||||
if currentMessageId == messageId, isPlaying {
|
||||
pause()
|
||||
return
|
||||
}
|
||||
|
||||
// Stop previous playback
|
||||
stop()
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
|
||||
try AVAudioSession.sharedInstance().setActive(true)
|
||||
|
||||
let player = try AVAudioPlayer(contentsOf: fileURL)
|
||||
player.prepareToPlay()
|
||||
player.play()
|
||||
|
||||
audioPlayer = player
|
||||
currentMessageId = messageId
|
||||
isPlaying = true
|
||||
duration = player.duration
|
||||
startDisplayLink()
|
||||
|
||||
logger.info("[VoicePlayer] Playing \(messageId.prefix(8))")
|
||||
} catch {
|
||||
logger.error("[VoicePlayer] Failed: \(error.localizedDescription)")
|
||||
stop()
|
||||
}
|
||||
}
|
||||
|
||||
func pause() {
|
||||
audioPlayer?.pause()
|
||||
isPlaying = false
|
||||
stopDisplayLink()
|
||||
}
|
||||
|
||||
func resume() {
|
||||
guard audioPlayer != nil else { return }
|
||||
audioPlayer?.play()
|
||||
isPlaying = true
|
||||
startDisplayLink()
|
||||
}
|
||||
|
||||
func togglePlayPause() {
|
||||
if isPlaying { pause() } else { resume() }
|
||||
}
|
||||
|
||||
func stop() {
|
||||
audioPlayer?.stop()
|
||||
audioPlayer = nil
|
||||
currentMessageId = nil
|
||||
isPlaying = false
|
||||
progress = 0
|
||||
currentTime = 0
|
||||
duration = 0
|
||||
stopDisplayLink()
|
||||
|
||||
try? AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
|
||||
}
|
||||
|
||||
/// Seek to a fraction of the duration (0..1).
|
||||
func seek(to fraction: Double) {
|
||||
guard let player = audioPlayer else { return }
|
||||
let target = fraction * player.duration
|
||||
player.currentTime = target
|
||||
updateProgress()
|
||||
}
|
||||
|
||||
// MARK: - Display Link
|
||||
|
||||
private func startDisplayLink() {
|
||||
guard displayLink == nil else { return }
|
||||
let proxy = DisplayLinkProxy { [weak self] in self?.updateProgress() }
|
||||
let link = CADisplayLink(target: proxy, selector: #selector(DisplayLinkProxy.tick))
|
||||
link.add(to: .main, forMode: .common)
|
||||
displayLink = link
|
||||
displayLinkTarget = proxy
|
||||
}
|
||||
|
||||
private func stopDisplayLink() {
|
||||
displayLink?.invalidate()
|
||||
displayLink = nil
|
||||
displayLinkTarget = nil
|
||||
}
|
||||
|
||||
private func updateProgress() {
|
||||
guard let player = audioPlayer else { return }
|
||||
if !player.isPlaying && isPlaying {
|
||||
// Playback ended
|
||||
stop()
|
||||
return
|
||||
}
|
||||
currentTime = player.currentTime
|
||||
progress = player.duration > 0 ? player.currentTime / player.duration : 0
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - DisplayLinkProxy
|
||||
|
||||
private final class DisplayLinkProxy: NSObject {
|
||||
let callback: () -> Void
|
||||
init(_ callback: @escaping () -> Void) { self.callback = callback }
|
||||
@objc func tick() { callback() }
|
||||
}
|
||||
@@ -11,7 +11,11 @@ private enum TabBarUIColors {
|
||||
static let selectedText = UIColor(RosettaColors.primaryBlue)
|
||||
static let badgeBg = UIColor(red: 1, green: 0.23, blue: 0.19, alpha: 1)
|
||||
static let badgeText = UIColor.white
|
||||
static let selectionFill = UIColor.white.withAlphaComponent(0.07)
|
||||
static let selectionFill = UIColor { traits in
|
||||
traits.userInterfaceStyle == .dark
|
||||
? UIColor.white.withAlphaComponent(0.07)
|
||||
: UIColor.black.withAlphaComponent(0.06)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Gesture (Telegram TabSelectionRecognizer)
|
||||
|
||||
151
Rosetta/DesignSystem/Components/WaveformView.swift
Normal file
151
Rosetta/DesignSystem/Components/WaveformView.swift
Normal file
@@ -0,0 +1,151 @@
|
||||
import QuartzCore
|
||||
import UIKit
|
||||
|
||||
// MARK: - WaveformView
|
||||
|
||||
/// Renders audio waveform as vertical bars with rounded ellipse caps.
|
||||
/// Telegram parity from AudioWaveformNode.swift:
|
||||
/// - Bar width: 2pt, gap: 1pt, peak height: 12pt
|
||||
/// - Each bar = rect body + top ellipse cap + bottom ellipse cap
|
||||
/// - Gravity: .center (bars grow from center) or .bottom
|
||||
final class WaveformView: UIView {
|
||||
|
||||
enum Gravity { case center, bottom }
|
||||
|
||||
// MARK: - Configuration (Telegram exact: AudioWaveformNode lines 96-98)
|
||||
|
||||
private let sampleWidth: CGFloat = 2.0
|
||||
private let halfSampleWidth: CGFloat = 1.0
|
||||
private let distance: CGFloat = 1.0
|
||||
|
||||
var peakHeight: CGFloat = 12.0
|
||||
var gravity: Gravity = .center
|
||||
var backgroundColor_: UIColor = UIColor.white.withAlphaComponent(0.3)
|
||||
var foregroundColor_: UIColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
|
||||
// MARK: - State
|
||||
|
||||
private var samples: [Float] = []
|
||||
var progress: CGFloat = 0 {
|
||||
didSet { setNeedsDisplay() }
|
||||
}
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
backgroundColor = .clear
|
||||
isOpaque = false
|
||||
}
|
||||
|
||||
convenience init(
|
||||
foregroundColor: UIColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1),
|
||||
backgroundColor: UIColor = UIColor.white.withAlphaComponent(0.3)
|
||||
) {
|
||||
self.init(frame: .zero)
|
||||
self.foregroundColor_ = foregroundColor
|
||||
self.backgroundColor_ = backgroundColor
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder: NSCoder) { fatalError() }
|
||||
|
||||
// MARK: - Public
|
||||
|
||||
func setSamples(_ newSamples: [Float]) {
|
||||
samples = newSamples
|
||||
setNeedsDisplay()
|
||||
}
|
||||
|
||||
// MARK: - Drawing (Telegram exact: AudioWaveformNode lines 86-232)
|
||||
|
||||
override func draw(_ rect: CGRect) {
|
||||
guard !samples.isEmpty else { return }
|
||||
guard let ctx = UIGraphicsGetCurrentContext() else { return }
|
||||
|
||||
let size = rect.size
|
||||
let numSamples = Int(floor(size.width / (sampleWidth + distance)))
|
||||
guard numSamples > 0 else { return }
|
||||
|
||||
let resampled = resample(samples, toCount: numSamples)
|
||||
|
||||
// Telegram: diff = sampleWidth * 1.5 = 3.0 (subtracted from bar height)
|
||||
let diff: CGFloat = sampleWidth * 1.5
|
||||
|
||||
let gravityMultiplierY: CGFloat = gravity == .bottom ? 1.0 : 0.5
|
||||
|
||||
// Draw background bars, then foreground bars on top
|
||||
for pass in 0..<2 {
|
||||
let color = pass == 0 ? backgroundColor_ : foregroundColor_
|
||||
ctx.setFillColor(color.cgColor)
|
||||
|
||||
for i in 0..<numSamples {
|
||||
let offset = CGFloat(i) * (sampleWidth + distance)
|
||||
|
||||
// For foreground pass, only draw bars within progress
|
||||
if pass == 1 {
|
||||
let samplePosition = CGFloat(i) / CGFloat(numSamples)
|
||||
guard samplePosition < progress else { continue }
|
||||
}
|
||||
|
||||
var sampleHeight = CGFloat(resampled[i]) * peakHeight
|
||||
if sampleHeight > peakHeight { sampleHeight = peakHeight }
|
||||
|
||||
let adjustedSampleHeight = sampleHeight - diff
|
||||
|
||||
if adjustedSampleHeight <= sampleWidth {
|
||||
// Tiny bar: single dot + small rect (Telegram lines 212-214)
|
||||
ctx.fillEllipse(in: CGRect(
|
||||
x: offset,
|
||||
y: (size.height - sampleWidth) * gravityMultiplierY,
|
||||
width: sampleWidth,
|
||||
height: sampleWidth
|
||||
))
|
||||
ctx.fill(CGRect(
|
||||
x: offset,
|
||||
y: (size.height - halfSampleWidth) * gravityMultiplierY,
|
||||
width: sampleWidth,
|
||||
height: halfSampleWidth
|
||||
))
|
||||
} else {
|
||||
// Normal bar: rect + top cap + bottom cap (Telegram lines 216-224)
|
||||
let barRect = CGRect(
|
||||
x: offset,
|
||||
y: (size.height - adjustedSampleHeight) * gravityMultiplierY,
|
||||
width: sampleWidth,
|
||||
height: adjustedSampleHeight
|
||||
)
|
||||
ctx.fill(barRect)
|
||||
ctx.fillEllipse(in: CGRect(
|
||||
x: barRect.minX,
|
||||
y: barRect.minY - halfSampleWidth,
|
||||
width: sampleWidth,
|
||||
height: sampleWidth
|
||||
))
|
||||
ctx.fillEllipse(in: CGRect(
|
||||
x: barRect.minX,
|
||||
y: barRect.maxY - halfSampleWidth,
|
||||
width: sampleWidth,
|
||||
height: sampleWidth
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Resampling (Telegram: max extraction per bin)
|
||||
|
||||
private func resample(_ input: [Float], toCount count: Int) -> [Float] {
|
||||
guard !input.isEmpty, count > 0 else { return Array(repeating: 0, count: count) }
|
||||
var result = [Float](repeating: 0, count: count)
|
||||
let step = Float(input.count) / Float(count)
|
||||
for i in 0..<count {
|
||||
let start = Int(Float(i) * step)
|
||||
let end = min(input.count, Int(Float(i + 1) * step))
|
||||
guard start < end else { continue }
|
||||
let slice = input[start..<end]
|
||||
result[i] = slice.max() ?? 0
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
@@ -196,23 +196,12 @@ struct ChatDetailView: View {
|
||||
@ViewBuilder
|
||||
private var content: some View {
|
||||
let _ = PerformanceLogger.shared.track("chatDetail.bodyEval")
|
||||
// iOS 26+: SwiftUI handles keyboard natively — ComposerOverlay.
|
||||
// iOS < 26: Composer embedded in NativeMessageListController via UIHostingController
|
||||
// pinned to keyboardLayoutGuide — frame-perfect keyboard sync (Telegram-style).
|
||||
// iOS 26+ and iOS < 26 use the same UIKit ComposerView bridge.
|
||||
// #available branches stay explicit to keep platform separation intact.
|
||||
Group {
|
||||
if #available(iOS 26, *) {
|
||||
chatArea
|
||||
.overlay {
|
||||
if !route.isSystemAccount {
|
||||
ComposerOverlay(
|
||||
composer: composer,
|
||||
composerHeight: $composerHeight
|
||||
)
|
||||
}
|
||||
}
|
||||
.onPreferenceChange(ComposerHeightKey.self) { newHeight in
|
||||
composerHeight = newHeight
|
||||
}
|
||||
.ignoresSafeArea()
|
||||
} else {
|
||||
// iOS < 26: composer is inside NativeMessageListController.
|
||||
// UIKit handles ALL keyboard/safe area insets manually via
|
||||
@@ -1026,12 +1015,9 @@ private extension ChatDetailView {
|
||||
if viewModel.isLoading && messages.isEmpty {
|
||||
// Android parity: skeleton placeholder while loading from DB
|
||||
ChatDetailSkeletonView(maxBubbleWidth: maxBubbleWidth)
|
||||
} else if #available(iOS 26, *), messages.isEmpty {
|
||||
// iOS 26+: ComposerOverlay is always added in `content`, so emptyStateView alone is fine.
|
||||
} else if route.isSystemAccount && messages.isEmpty {
|
||||
emptyStateView
|
||||
} else {
|
||||
// iOS < 26 empty: NativeMessageListController shows empty state + composer (UIKit).
|
||||
// iOS < 26 / 26+ non-empty: normal message list.
|
||||
messagesScrollView(maxBubbleWidth: maxBubbleWidth)
|
||||
}
|
||||
}
|
||||
@@ -1087,10 +1073,7 @@ private extension ChatDetailView {
|
||||
|
||||
@ViewBuilder
|
||||
private func messagesScrollView(maxBubbleWidth: CGFloat) -> some View {
|
||||
let useComposer: Bool = {
|
||||
if #available(iOS 26, *) { return false }
|
||||
return !route.isSystemAccount
|
||||
}()
|
||||
let useComposer = !route.isSystemAccount
|
||||
|
||||
// Reply info for ComposerView
|
||||
let replySender: String? = replyingToMessage.map { senderDisplayName(for: $0.fromPublicKey) }
|
||||
@@ -1502,6 +1485,7 @@ private extension ChatDetailView {
|
||||
case .avatar: return "Avatar"
|
||||
case .messages: return "Forwarded message"
|
||||
case .call: return "Call"
|
||||
case .voice: return "Voice message"
|
||||
@unknown default: return "Attachment"
|
||||
}
|
||||
}
|
||||
@@ -1745,7 +1729,7 @@ private extension ChatDetailView {
|
||||
do {
|
||||
if !attachments.isEmpty {
|
||||
// Send message with attachments
|
||||
try await SessionManager.shared.sendMessageWithAttachments(
|
||||
_ = try await SessionManager.shared.sendMessageWithAttachments(
|
||||
text: message,
|
||||
attachments: attachments,
|
||||
toPublicKey: route.publicKey,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import AVFAudio
|
||||
@preconcurrency import AVFoundation
|
||||
import UIKit
|
||||
|
||||
// MARK: - ComposerViewDelegate
|
||||
@@ -103,7 +105,22 @@ final class ComposerView: UIView, UITextViewDelegate {
|
||||
private let audioRecorder = AudioRecorder()
|
||||
private var recordingOverlay: VoiceRecordingOverlay?
|
||||
private var recordingPanel: VoiceRecordingPanel?
|
||||
private var recordingLockView: RecordingLockView?
|
||||
private var recordingPreviewPanel: RecordingPreviewPanel?
|
||||
private var recordingStartTask: Task<Void, Never>?
|
||||
private var recordingSendAccessibilityButton: UIButton?
|
||||
private(set) var isRecording = false
|
||||
private(set) var isRecordingLocked = false
|
||||
private(set) var recordingFlowState: VoiceRecordingFlowState = .idle
|
||||
|
||||
// Voice recording result (populated on stopRecording, read by delegate)
|
||||
private(set) var lastRecordedURL: URL?
|
||||
private(set) var lastRecordedDuration: TimeInterval = 0
|
||||
private(set) var lastRecordedWaveform: [Float] = []
|
||||
private(set) var lastVoiceSendTransitionSource: VoiceSendTransitionSource?
|
||||
|
||||
private let minVoiceDuration: TimeInterval = 0.5
|
||||
private let minFreeDiskBytes: Int64 = 8 * 1024 * 1024
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
@@ -244,6 +261,9 @@ final class ComposerView: UIView, UITextViewDelegate {
|
||||
micIconLayer = micIcon
|
||||
micButton.tag = 4
|
||||
micButton.recordingDelegate = self
|
||||
micButton.isAccessibilityElement = true
|
||||
micButton.accessibilityLabel = "Voice message"
|
||||
micButton.accessibilityHint = "Hold to record voice message. Slide left to cancel or up to lock."
|
||||
addSubview(micButton)
|
||||
|
||||
updateThemeColors()
|
||||
@@ -315,6 +335,15 @@ final class ComposerView: UIView, UITextViewDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
func consumeVoiceSendTransitionSource() -> VoiceSendTransitionSource? {
|
||||
defer { lastVoiceSendTransitionSource = nil }
|
||||
return lastVoiceSendTransitionSource
|
||||
}
|
||||
|
||||
private func setRecordingFlowState(_ state: VoiceRecordingFlowState) {
|
||||
recordingFlowState = state
|
||||
}
|
||||
|
||||
// MARK: - Layout
|
||||
|
||||
override func layoutSubviews() {
|
||||
@@ -398,6 +427,10 @@ final class ComposerView: UIView, UITextViewDelegate {
|
||||
sendCapsule.layer.cornerRadius = sendButtonHeight / 2
|
||||
centerIconLayer(in: sendButton, iconSize: CGSize(width: 22, height: 19))
|
||||
|
||||
if recordingSendAccessibilityButton != nil {
|
||||
updateRecordingSendAccessibilityArea(isEnabled: true)
|
||||
}
|
||||
|
||||
// Report height
|
||||
if abs(totalH - currentHeight) > 0.5 {
|
||||
currentHeight = totalH
|
||||
@@ -605,17 +638,168 @@ final class ComposerView: UIView, UITextViewDelegate {
|
||||
|
||||
extension ComposerView: RecordingMicButtonDelegate {
|
||||
|
||||
func micButtonRecordingBegan(_ button: RecordingMicButton) {
|
||||
guard audioRecorder.startRecording() else { return }
|
||||
isRecording = true
|
||||
guard let window else { return }
|
||||
func micButtonRecordingArmed(_ button: RecordingMicButton) {
|
||||
setRecordingFlowState(.armed)
|
||||
}
|
||||
|
||||
func micButtonRecordingArmingCancelled(_ button: RecordingMicButton) {
|
||||
if recordingFlowState == .armed {
|
||||
setRecordingFlowState(.idle)
|
||||
}
|
||||
}
|
||||
|
||||
func micButtonRecordingBegan(_ button: RecordingMicButton) {
|
||||
recordingStartTask?.cancel()
|
||||
recordingStartTask = Task { @MainActor [weak self] in
|
||||
guard let self else { return }
|
||||
guard CallManager.shared.uiState.phase == .idle else {
|
||||
self.failRecordingStart(for: button)
|
||||
return
|
||||
}
|
||||
guard self.hasSufficientDiskSpaceForRecording() else {
|
||||
self.failRecordingStart(for: button)
|
||||
return
|
||||
}
|
||||
|
||||
let granted = await AudioRecorder.requestMicrophonePermission()
|
||||
guard !Task.isCancelled else { return }
|
||||
guard granted else {
|
||||
self.failRecordingStart(for: button)
|
||||
return
|
||||
}
|
||||
guard button.recordingState == .recording else { return }
|
||||
guard self.audioRecorder.startRecording() else {
|
||||
self.failRecordingStart(for: button)
|
||||
return
|
||||
}
|
||||
|
||||
self.isRecording = true
|
||||
self.isRecordingLocked = false
|
||||
self.setRecordingFlowState(.recordingUnlocked)
|
||||
self.presentRecordingChrome(locked: false, animatePanel: true)
|
||||
self.configureRecorderLevelUpdates()
|
||||
self.delegate?.composerDidStartRecording(self)
|
||||
}
|
||||
}
|
||||
|
||||
func micButtonRecordingFinished(_ button: RecordingMicButton) {
|
||||
guard recordingFlowState == .recordingUnlocked else {
|
||||
button.resetState()
|
||||
return
|
||||
}
|
||||
finishRecordingAndSend(sourceView: micButton)
|
||||
button.resetState()
|
||||
}
|
||||
|
||||
func micButtonRecordingCancelled(_ button: RecordingMicButton) {
|
||||
cancelRecordingWithDismissAnimation()
|
||||
button.resetState()
|
||||
delegate?.composerDidCancelRecording(self)
|
||||
}
|
||||
|
||||
func micButtonRecordingLocked(_ button: RecordingMicButton) {
|
||||
guard recordingFlowState == .recordingUnlocked else { return }
|
||||
isRecordingLocked = true
|
||||
setRecordingFlowState(.recordingLocked)
|
||||
|
||||
recordingPanel?.showCancelButton()
|
||||
recordingLockView?.showStopButton { [weak self] in
|
||||
self?.showRecordingPreview()
|
||||
}
|
||||
recordingOverlay?.transitionToLocked(onTapStop: { [weak self] in
|
||||
self?.showRecordingPreview()
|
||||
self?.micButton.resetState()
|
||||
})
|
||||
updateRecordingSendAccessibilityArea(isEnabled: true)
|
||||
|
||||
delegate?.composerDidLockRecording(self)
|
||||
}
|
||||
|
||||
func micButtonDragUpdate(_ button: RecordingMicButton, distanceX: CGFloat, distanceY: CGFloat) {
|
||||
recordingOverlay?.applyDragTransform(distanceX: distanceX, distanceY: distanceY)
|
||||
recordingPanel?.updateCancelTranslation(distanceX)
|
||||
let lockness = min(1, max(0, abs(distanceY) / 105))
|
||||
recordingLockView?.updateLockness(lockness)
|
||||
}
|
||||
|
||||
func showRecordingPreview() {
|
||||
guard recordingFlowState == .recordingLocked || recordingFlowState == .recordingUnlocked else { return }
|
||||
setRecordingFlowState(.waitingForPreview)
|
||||
|
||||
audioRecorder.onLevelUpdate = nil
|
||||
let paused = audioRecorder.pauseRecordingForPreview() ?? audioRecorder.currentRecordingSnapshot()
|
||||
guard let snapshot = paused else {
|
||||
dismissOverlayAndRestore()
|
||||
return
|
||||
}
|
||||
lastRecordedURL = snapshot.url
|
||||
lastRecordedDuration = snapshot.duration
|
||||
lastRecordedWaveform = snapshot.waveform
|
||||
|
||||
if snapshot.duration < minVoiceDuration {
|
||||
dismissOverlayAndRestore()
|
||||
return
|
||||
}
|
||||
|
||||
recordingOverlay?.dismiss()
|
||||
recordingOverlay = nil
|
||||
recordingLockView?.dismiss()
|
||||
recordingLockView = nil
|
||||
recordingPanel?.animateOut { [weak self] in
|
||||
self?.recordingPanel = nil
|
||||
}
|
||||
updateRecordingSendAccessibilityArea(isEnabled: false)
|
||||
|
||||
guard let url = lastRecordedURL else { return }
|
||||
let panelX = horizontalPadding
|
||||
let panelW = micButton.frame.minX - innerSpacing - horizontalPadding
|
||||
let preview = RecordingPreviewPanel(
|
||||
frame: CGRect(
|
||||
x: panelX,
|
||||
y: inputContainer.frame.origin.y,
|
||||
width: panelW,
|
||||
height: inputContainer.frame.height
|
||||
),
|
||||
fileURL: url,
|
||||
duration: lastRecordedDuration,
|
||||
waveform: lastRecordedWaveform
|
||||
)
|
||||
preview.delegate = self
|
||||
addSubview(preview)
|
||||
preview.animateIn()
|
||||
recordingPreviewPanel = preview
|
||||
isRecording = false
|
||||
isRecordingLocked = false
|
||||
setRecordingFlowState(.draftPreview)
|
||||
}
|
||||
|
||||
private func finishRecordingAndSend(sourceView: UIView?) {
|
||||
audioRecorder.onFinished = { [weak self] url, duration, waveform in
|
||||
self?.lastRecordedURL = url
|
||||
self?.lastRecordedDuration = duration
|
||||
self?.lastRecordedWaveform = waveform
|
||||
}
|
||||
audioRecorder.onLevelUpdate = nil
|
||||
audioRecorder.stopRecording()
|
||||
|
||||
guard lastRecordedDuration >= minVoiceDuration else {
|
||||
dismissOverlayAndRestore(skipAudioCleanup: true)
|
||||
return
|
||||
}
|
||||
|
||||
lastVoiceSendTransitionSource = captureVoiceSendTransition(from: sourceView)
|
||||
dismissOverlayAndRestore(skipAudioCleanup: true)
|
||||
delegate?.composerDidFinishRecording(self, sendImmediately: true)
|
||||
}
|
||||
|
||||
private func presentRecordingChrome(locked: Bool, animatePanel: Bool) {
|
||||
guard let window else { return }
|
||||
hideComposerChrome()
|
||||
|
||||
// 1. Overlay circles on mic button
|
||||
let overlay = VoiceRecordingOverlay()
|
||||
overlay.present(anchorView: micButton, in: window)
|
||||
recordingOverlay = overlay
|
||||
|
||||
// 2. Recording panel (spans full width: attach area to mic button)
|
||||
let panelX = horizontalPadding
|
||||
let panelW = micButton.frame.minX - innerSpacing - horizontalPadding
|
||||
let panel = VoiceRecordingPanel(frame: CGRect(
|
||||
@@ -626,16 +810,32 @@ extension ComposerView: RecordingMicButtonDelegate {
|
||||
))
|
||||
panel.delegate = self
|
||||
addSubview(panel)
|
||||
panel.animateIn(panelWidth: panelW)
|
||||
if animatePanel {
|
||||
panel.animateIn(panelWidth: panelW)
|
||||
}
|
||||
if locked {
|
||||
panel.showCancelButton()
|
||||
overlay.transitionToLocked(onTapStop: { [weak self] in
|
||||
self?.showRecordingPreview()
|
||||
self?.micButton.resetState()
|
||||
})
|
||||
} else {
|
||||
let lockView = RecordingLockView(frame: .zero)
|
||||
let micCenter = convert(micButton.center, to: window)
|
||||
lockView.present(anchorCenter: micCenter, in: window)
|
||||
recordingLockView = lockView
|
||||
}
|
||||
recordingPanel = panel
|
||||
}
|
||||
|
||||
// 3. Feed audio level → overlay + timer
|
||||
private func configureRecorderLevelUpdates() {
|
||||
audioRecorder.onLevelUpdate = { [weak self] duration, level in
|
||||
self?.recordingOverlay?.addMicLevel(CGFloat(level))
|
||||
self?.recordingPanel?.updateDuration(duration)
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Hide composer content (Telegram: textInput alpha→0, accessories alpha→0)
|
||||
private func hideComposerChrome() {
|
||||
UIView.animate(withDuration: 0.15) {
|
||||
self.inputContainer.alpha = 0
|
||||
self.attachButton.alpha = 0
|
||||
@@ -644,44 +844,7 @@ extension ComposerView: RecordingMicButtonDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
func micButtonRecordingFinished(_ button: RecordingMicButton) {
|
||||
dismissOverlayAndRestore()
|
||||
button.resetState()
|
||||
}
|
||||
|
||||
func micButtonRecordingCancelled(_ button: RecordingMicButton) {
|
||||
dismissOverlayAndRestore()
|
||||
button.resetState()
|
||||
}
|
||||
|
||||
func micButtonRecordingLocked(_ button: RecordingMicButton) {
|
||||
dismissOverlayAndRestore()
|
||||
button.resetState()
|
||||
}
|
||||
|
||||
func micButtonCancelTranslationChanged(_ button: RecordingMicButton, translation: CGFloat) {
|
||||
let progress = min(1, abs(translation) / 150)
|
||||
recordingOverlay?.dismissFactor = 1.0 - progress * 0.5
|
||||
recordingPanel?.updateCancelTranslation(translation)
|
||||
}
|
||||
|
||||
func micButtonLockProgressChanged(_ button: RecordingMicButton, progress: CGFloat) {
|
||||
// Future: lock indicator
|
||||
}
|
||||
|
||||
private func dismissOverlayAndRestore() {
|
||||
isRecording = false
|
||||
audioRecorder.onLevelUpdate = nil
|
||||
audioRecorder.cancelRecording()
|
||||
|
||||
recordingOverlay?.dismiss()
|
||||
recordingOverlay = nil
|
||||
|
||||
recordingPanel?.animateOut { [weak self] in
|
||||
self?.recordingPanel = nil
|
||||
}
|
||||
|
||||
// Restore composer content
|
||||
private func restoreComposerChrome() {
|
||||
UIView.animate(withDuration: 0.15) {
|
||||
self.inputContainer.alpha = 1
|
||||
self.attachButton.alpha = 1
|
||||
@@ -690,13 +853,274 @@ extension ComposerView: RecordingMicButtonDelegate {
|
||||
}
|
||||
updateSendMicVisibility(animated: false)
|
||||
}
|
||||
|
||||
private func failRecordingStart(for button: RecordingMicButton) {
|
||||
let feedback = UINotificationFeedbackGenerator()
|
||||
feedback.notificationOccurred(.warning)
|
||||
setRecordingFlowState(.idle)
|
||||
button.resetState()
|
||||
}
|
||||
|
||||
private func hasSufficientDiskSpaceForRecording() -> Bool {
|
||||
let home = URL(fileURLWithPath: NSHomeDirectory(), isDirectory: true)
|
||||
let keys: Set<URLResourceKey> = [
|
||||
.volumeAvailableCapacityForImportantUsageKey,
|
||||
.volumeAvailableCapacityKey
|
||||
]
|
||||
guard let values = try? home.resourceValues(forKeys: keys) else { return true }
|
||||
let important = values.volumeAvailableCapacityForImportantUsage ?? Int64.max
|
||||
let generic = Int64(values.volumeAvailableCapacity ?? Int.max)
|
||||
let available = min(important, generic)
|
||||
return available >= minFreeDiskBytes
|
||||
}
|
||||
|
||||
private func updateRecordingSendAccessibilityArea(isEnabled: Bool) {
|
||||
if !isEnabled {
|
||||
recordingSendAccessibilityButton?.removeFromSuperview()
|
||||
recordingSendAccessibilityButton = nil
|
||||
return
|
||||
}
|
||||
guard let window else { return }
|
||||
let button: UIButton
|
||||
if let existing = recordingSendAccessibilityButton {
|
||||
button = existing
|
||||
} else {
|
||||
button = UIButton(type: .custom)
|
||||
button.backgroundColor = .clear
|
||||
button.isAccessibilityElement = true
|
||||
button.accessibilityLabel = "Stop recording"
|
||||
button.accessibilityHint = "Stops recording and opens voice preview."
|
||||
button.addTarget(self, action: #selector(accessibilityStopRecordingTapped), for: .touchUpInside)
|
||||
recordingSendAccessibilityButton = button
|
||||
window.addSubview(button)
|
||||
}
|
||||
let micCenter = convert(micButton.center, to: window)
|
||||
button.frame = CGRect(x: micCenter.x - 60, y: micCenter.y - 60, width: 120, height: 120)
|
||||
}
|
||||
|
||||
@objc private func accessibilityStopRecordingTapped() {
|
||||
showRecordingPreview()
|
||||
}
|
||||
|
||||
private func cancelRecordingWithDismissAnimation() {
|
||||
isRecording = false
|
||||
isRecordingLocked = false
|
||||
setRecordingFlowState(.idle)
|
||||
audioRecorder.onLevelUpdate = nil
|
||||
audioRecorder.cancelRecording()
|
||||
|
||||
recordingOverlay?.dismissCancel()
|
||||
recordingOverlay = nil
|
||||
|
||||
recordingLockView?.dismiss()
|
||||
recordingLockView = nil
|
||||
|
||||
recordingPanel?.animateOutCancel { [weak self] in
|
||||
self?.recordingPanel = nil
|
||||
}
|
||||
|
||||
recordingPreviewPanel?.animateOut { [weak self] in
|
||||
self?.recordingPreviewPanel = nil
|
||||
}
|
||||
|
||||
updateRecordingSendAccessibilityArea(isEnabled: false)
|
||||
restoreComposerChrome()
|
||||
}
|
||||
|
||||
private func dismissOverlayAndRestore(skipAudioCleanup: Bool = false) {
|
||||
isRecording = false
|
||||
isRecordingLocked = false
|
||||
setRecordingFlowState(.idle)
|
||||
recordingStartTask?.cancel()
|
||||
recordingStartTask = nil
|
||||
audioRecorder.onLevelUpdate = nil
|
||||
if !skipAudioCleanup {
|
||||
audioRecorder.cancelRecording()
|
||||
}
|
||||
|
||||
recordingOverlay?.dismiss()
|
||||
recordingOverlay = nil
|
||||
|
||||
recordingLockView?.dismiss()
|
||||
recordingLockView = nil
|
||||
|
||||
recordingPanel?.animateOut { [weak self] in
|
||||
self?.recordingPanel = nil
|
||||
}
|
||||
|
||||
recordingPreviewPanel?.animateOut { [weak self] in
|
||||
self?.recordingPreviewPanel = nil
|
||||
}
|
||||
updateRecordingSendAccessibilityArea(isEnabled: false)
|
||||
restoreComposerChrome()
|
||||
}
|
||||
|
||||
private func captureVoiceSendTransition(from sourceView: UIView?) -> VoiceSendTransitionSource? {
|
||||
guard let sourceView, let window else { return nil }
|
||||
guard let snapshot = sourceView.snapshotView(afterScreenUpdates: true) else { return nil }
|
||||
let frame = sourceView.convert(sourceView.bounds, to: window)
|
||||
snapshot.frame = frame
|
||||
snapshot.layer.cornerRadius = sourceView.layer.cornerRadius
|
||||
snapshot.layer.cornerCurve = .continuous
|
||||
snapshot.clipsToBounds = true
|
||||
return VoiceSendTransitionSource(
|
||||
snapshotView: snapshot,
|
||||
sourceFrameInWindow: frame,
|
||||
cornerRadius: sourceView.layer.cornerRadius
|
||||
)
|
||||
}
|
||||
|
||||
private func resumeRecordingFromPreview() {
|
||||
guard audioRecorder.resumeRecording() else {
|
||||
dismissOverlayAndRestore()
|
||||
return
|
||||
}
|
||||
recordingPreviewPanel?.animateOut { [weak self] in
|
||||
self?.recordingPreviewPanel = nil
|
||||
}
|
||||
isRecording = true
|
||||
isRecordingLocked = true
|
||||
setRecordingFlowState(.recordingLocked)
|
||||
presentRecordingChrome(locked: true, animatePanel: false)
|
||||
configureRecorderLevelUpdates()
|
||||
updateRecordingSendAccessibilityArea(isEnabled: true)
|
||||
}
|
||||
|
||||
private func clampTrimRange(_ trimRange: ClosedRange<TimeInterval>, duration: TimeInterval) -> ClosedRange<TimeInterval> {
|
||||
let lower = max(0, min(trimRange.lowerBound, duration))
|
||||
let upper = max(lower, min(trimRange.upperBound, duration))
|
||||
return lower...upper
|
||||
}
|
||||
|
||||
private func trimWaveform(
|
||||
_ waveform: [Float],
|
||||
totalDuration: TimeInterval,
|
||||
trimRange: ClosedRange<TimeInterval>
|
||||
) -> [Float] {
|
||||
guard !waveform.isEmpty, totalDuration > 0 else { return waveform }
|
||||
let startIndex = max(0, Int(floor((trimRange.lowerBound / totalDuration) * Double(waveform.count))))
|
||||
let endIndex = min(waveform.count, Int(ceil((trimRange.upperBound / totalDuration) * Double(waveform.count))))
|
||||
guard startIndex < endIndex else { return waveform }
|
||||
return Array(waveform[startIndex..<endIndex])
|
||||
}
|
||||
|
||||
private func exportTrimmedAudio(url: URL, trimRange: ClosedRange<TimeInterval>) async -> URL? {
|
||||
let asset = AVURLAsset(url: url)
|
||||
guard let export = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) else {
|
||||
return nil
|
||||
}
|
||||
final class ExportSessionBox: @unchecked Sendable {
|
||||
let session: AVAssetExportSession
|
||||
init(_ session: AVAssetExportSession) {
|
||||
self.session = session
|
||||
}
|
||||
}
|
||||
let box = ExportSessionBox(export)
|
||||
let outputURL = FileManager.default.temporaryDirectory
|
||||
.appendingPathComponent("rosetta_voice_trim_\(UUID().uuidString).m4a")
|
||||
try? FileManager.default.removeItem(at: outputURL)
|
||||
|
||||
box.session.outputURL = outputURL
|
||||
box.session.outputFileType = .m4a
|
||||
box.session.timeRange = CMTimeRange(
|
||||
start: CMTime(seconds: trimRange.lowerBound, preferredTimescale: 600),
|
||||
end: CMTime(seconds: trimRange.upperBound, preferredTimescale: 600)
|
||||
)
|
||||
|
||||
return await withCheckedContinuation { continuation in
|
||||
box.session.exportAsynchronously {
|
||||
if box.session.status == .completed {
|
||||
continuation.resume(returning: outputURL)
|
||||
} else {
|
||||
continuation.resume(returning: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func finalizePreviewSend(trimRange: ClosedRange<TimeInterval>, sourceView: UIView?) {
|
||||
Task { @MainActor [weak self] in
|
||||
guard let self else { return }
|
||||
|
||||
audioRecorder.onFinished = { [weak self] url, duration, waveform in
|
||||
self?.lastRecordedURL = url
|
||||
self?.lastRecordedDuration = duration
|
||||
self?.lastRecordedWaveform = waveform
|
||||
}
|
||||
audioRecorder.stopRecording()
|
||||
|
||||
guard let url = lastRecordedURL else {
|
||||
dismissOverlayAndRestore(skipAudioCleanup: true)
|
||||
return
|
||||
}
|
||||
|
||||
var finalURL = url
|
||||
var finalDuration = lastRecordedDuration
|
||||
var finalWaveform = lastRecordedWaveform
|
||||
|
||||
let normalizedTrim = clampTrimRange(trimRange, duration: lastRecordedDuration)
|
||||
let shouldTrim =
|
||||
normalizedTrim.lowerBound > 0.01 ||
|
||||
normalizedTrim.upperBound < lastRecordedDuration - 0.01
|
||||
if shouldTrim,
|
||||
let trimmedURL = await exportTrimmedAudio(url: url, trimRange: normalizedTrim) {
|
||||
finalURL = trimmedURL
|
||||
finalDuration = normalizedTrim.upperBound - normalizedTrim.lowerBound
|
||||
finalWaveform = trimWaveform(
|
||||
lastRecordedWaveform,
|
||||
totalDuration: lastRecordedDuration,
|
||||
trimRange: normalizedTrim
|
||||
)
|
||||
try? FileManager.default.removeItem(at: url)
|
||||
}
|
||||
|
||||
guard finalDuration >= minVoiceDuration else {
|
||||
try? FileManager.default.removeItem(at: finalURL)
|
||||
dismissOverlayAndRestore(skipAudioCleanup: true)
|
||||
return
|
||||
}
|
||||
|
||||
lastRecordedURL = finalURL
|
||||
lastRecordedDuration = finalDuration
|
||||
lastRecordedWaveform = finalWaveform
|
||||
lastVoiceSendTransitionSource = captureVoiceSendTransition(from: sourceView)
|
||||
dismissOverlayAndRestore(skipAudioCleanup: true)
|
||||
delegate?.composerDidFinishRecording(self, sendImmediately: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - VoiceRecordingPanelDelegate
|
||||
|
||||
extension ComposerView: VoiceRecordingPanelDelegate {
|
||||
func recordingPanelDidTapCancel(_ panel: VoiceRecordingPanel) {
|
||||
dismissOverlayAndRestore()
|
||||
cancelRecordingWithDismissAnimation()
|
||||
micButton.resetState()
|
||||
delegate?.composerDidCancelRecording(self)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - RecordingPreviewPanelDelegate
|
||||
|
||||
extension ComposerView: RecordingPreviewPanelDelegate {
|
||||
func previewPanelDidTapSend(_ panel: RecordingPreviewPanel, trimRange: ClosedRange<TimeInterval>) {
|
||||
finalizePreviewSend(trimRange: trimRange, sourceView: panel)
|
||||
}
|
||||
|
||||
func previewPanelDidTapDelete(_ panel: RecordingPreviewPanel) {
|
||||
audioRecorder.cancelRecording()
|
||||
if let url = lastRecordedURL {
|
||||
try? FileManager.default.removeItem(at: url)
|
||||
}
|
||||
lastRecordedURL = nil
|
||||
lastRecordedDuration = 0
|
||||
lastRecordedWaveform = []
|
||||
|
||||
dismissOverlayAndRestore(skipAudioCleanup: true)
|
||||
delegate?.composerDidCancelRecording(self)
|
||||
}
|
||||
|
||||
func previewPanelDidTapRecordMore(_ panel: RecordingPreviewPanel) {
|
||||
resumeRecordingFromPreview()
|
||||
}
|
||||
}
|
||||
|
||||
237
Rosetta/Features/Chats/ChatDetail/MessageVoiceView.swift
Normal file
237
Rosetta/Features/Chats/ChatDetail/MessageVoiceView.swift
Normal file
@@ -0,0 +1,237 @@
|
||||
import UIKit
|
||||
|
||||
// MARK: - MessageVoiceView
|
||||
|
||||
/// Voice message bubble content: play/pause button + waveform bars + duration.
|
||||
/// Telegram parity from ChatMessageInteractiveFileNode.swift audit.
|
||||
final class MessageVoiceView: UIView {
|
||||
|
||||
// MARK: - Subviews
|
||||
|
||||
private let playButton = UIButton(type: .system)
|
||||
private let waveformView = WaveformView()
|
||||
private let durationLabel = UILabel()
|
||||
|
||||
// MARK: - State
|
||||
|
||||
private var messageId: String = ""
|
||||
private var attachmentId: String = ""
|
||||
private var isOutgoing = false
|
||||
|
||||
// MARK: - Layout Constants (Telegram exact: ChatMessageInteractiveFileNode)
|
||||
|
||||
private let playButtonSize: CGFloat = 44
|
||||
private let playButtonLeading: CGFloat = 3
|
||||
private let waveformX: CGFloat = 57 // Telegram: x=57
|
||||
private let waveformY: CGFloat = 1 // Telegram: y=1
|
||||
private let waveformHeight: CGFloat = 18 // Telegram: peakHeight=18 in component
|
||||
private let durationX: CGFloat = 56 // Telegram: x=56
|
||||
private let durationY: CGFloat = 22 // Telegram: y=22
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
setupSubviews()
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder: NSCoder) { fatalError() }
|
||||
|
||||
// Playback
|
||||
var onPlayTapped: (() -> Void)?
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
private func setupSubviews() {
|
||||
let config = UIImage.SymbolConfiguration(pointSize: 18, weight: .bold)
|
||||
playButton.setImage(UIImage(systemName: "play.fill", withConfiguration: config), for: .normal)
|
||||
playButton.backgroundColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
playButton.tintColor = .white
|
||||
playButton.layer.cornerRadius = playButtonSize / 2
|
||||
playButton.clipsToBounds = true
|
||||
playButton.addTarget(self, action: #selector(playTapped), for: .touchUpInside)
|
||||
addSubview(playButton)
|
||||
|
||||
waveformView.peakHeight = 18 // Telegram AudioWaveformComponent peak
|
||||
waveformView.gravity = .center
|
||||
addSubview(waveformView)
|
||||
|
||||
durationLabel.font = .monospacedDigitSystemFont(ofSize: 11, weight: .regular)
|
||||
durationLabel.textColor = .white.withAlphaComponent(0.6)
|
||||
addSubview(durationLabel)
|
||||
}
|
||||
|
||||
// MARK: - Layout (Telegram exact: play at x=3,y=1; waveform at x=57,y=1; duration at x=56,y=22)
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
let h = bounds.height
|
||||
|
||||
// Play button: 44×44, Telegram x=3, y=centered in cell
|
||||
playButton.frame = CGRect(
|
||||
x: playButtonLeading,
|
||||
y: max(0, (h - playButtonSize) / 2),
|
||||
width: playButtonSize,
|
||||
height: playButtonSize
|
||||
)
|
||||
|
||||
// Waveform: from x=57 to near right edge, height=18, y=1
|
||||
let waveW = bounds.width - waveformX - 4
|
||||
waveformView.frame = CGRect(
|
||||
x: waveformX,
|
||||
y: waveformY,
|
||||
width: max(0, waveW),
|
||||
height: waveformHeight
|
||||
)
|
||||
|
||||
// Duration: at x=56, y=22
|
||||
durationLabel.frame = CGRect(
|
||||
x: durationX,
|
||||
y: durationY,
|
||||
width: bounds.width - durationX - 4,
|
||||
height: 14
|
||||
)
|
||||
}
|
||||
|
||||
// MARK: - Configure
|
||||
|
||||
func configure(messageId: String, attachmentId: String, preview: String,
|
||||
duration: TimeInterval, isOutgoing: Bool) {
|
||||
self.messageId = messageId
|
||||
self.attachmentId = attachmentId
|
||||
self.isOutgoing = isOutgoing
|
||||
|
||||
// Decode waveform from preview
|
||||
let samples = Self.decodeWaveform(from: preview)
|
||||
waveformView.setSamples(samples)
|
||||
waveformView.progress = 0
|
||||
|
||||
// Duration label
|
||||
let totalSeconds = Int(duration)
|
||||
let minutes = totalSeconds / 60
|
||||
let seconds = totalSeconds % 60
|
||||
durationLabel.text = String(format: "%d:%02d", minutes, seconds)
|
||||
|
||||
// Style based on incoming/outgoing
|
||||
if isOutgoing {
|
||||
playButton.backgroundColor = .white
|
||||
playButton.tintColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
durationLabel.textColor = .white.withAlphaComponent(0.6)
|
||||
} else {
|
||||
playButton.backgroundColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
playButton.tintColor = .white
|
||||
durationLabel.textColor = UIColor.white.withAlphaComponent(0.5)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Play Action
|
||||
|
||||
@objc private func playTapped() {
|
||||
onPlayTapped?()
|
||||
}
|
||||
|
||||
/// Update play button icon and waveform progress from VoiceMessagePlayer state.
|
||||
func updatePlaybackState(isPlaying: Bool, progress: CGFloat) {
|
||||
let config = UIImage.SymbolConfiguration(pointSize: 16, weight: .bold)
|
||||
let name = isPlaying ? "pause.fill" : "play.fill"
|
||||
playButton.setImage(UIImage(systemName: name, withConfiguration: config), for: .normal)
|
||||
waveformView.progress = progress
|
||||
}
|
||||
|
||||
// MARK: - Waveform Decoding
|
||||
|
||||
/// Decode waveform from preview string.
|
||||
/// Format: comma-separated float values or base64-encoded 5-bit bitstream.
|
||||
static func decodeWaveform(from preview: String) -> [Float] {
|
||||
guard !preview.isEmpty else { return [] }
|
||||
|
||||
// Try comma-separated floats first
|
||||
if preview.contains(",") || preview.contains(".") {
|
||||
let parts = preview.split(separator: ",")
|
||||
let values = parts.compactMap { Float($0.trimmingCharacters(in: .whitespaces)) }
|
||||
if !values.isEmpty { return values }
|
||||
}
|
||||
|
||||
// Try base64-encoded 5-bit bitstream (Telegram format)
|
||||
guard let data = Data(base64Encoded: preview), !data.isEmpty else {
|
||||
return []
|
||||
}
|
||||
return decode5BitWaveform(data)
|
||||
}
|
||||
|
||||
/// Decode 5-bit packed waveform data (Telegram AudioWaveform format).
|
||||
/// Each sample is 5 bits (0-31), normalized to 0.0-1.0.
|
||||
private static func decode5BitWaveform(_ data: Data) -> [Float] {
|
||||
let bitCount = data.count * 8
|
||||
let sampleCount = bitCount / 5
|
||||
guard sampleCount > 0 else { return [] }
|
||||
|
||||
var samples = [Float](repeating: 0, count: sampleCount)
|
||||
let bytes = [UInt8](data)
|
||||
|
||||
for i in 0..<sampleCount {
|
||||
let bitOffset = i * 5
|
||||
let byteIndex = bitOffset / 8
|
||||
let bitIndex = bitOffset % 8
|
||||
|
||||
var value: UInt8 = 0
|
||||
if bitIndex + 5 <= 8 {
|
||||
value = (bytes[byteIndex] >> (8 - bitIndex - 5)) & 0x1F
|
||||
} else {
|
||||
let bitsFromFirst = 8 - bitIndex
|
||||
let bitsFromSecond = 5 - bitsFromFirst
|
||||
value = (bytes[byteIndex] & ((1 << bitsFromFirst) - 1)) << bitsFromSecond
|
||||
if byteIndex + 1 < bytes.count {
|
||||
value |= bytes[byteIndex + 1] >> (8 - bitsFromSecond)
|
||||
}
|
||||
}
|
||||
|
||||
samples[i] = Float(value) / 31.0
|
||||
}
|
||||
|
||||
return samples
|
||||
}
|
||||
|
||||
// MARK: - Waveform Encoding
|
||||
|
||||
/// Encode waveform samples to 5-bit packed base64 string (for sending).
|
||||
static func encodeWaveform(_ samples: [Float]) -> String {
|
||||
guard !samples.isEmpty else { return "" }
|
||||
|
||||
// Resample to ~63 bars (Telegram standard)
|
||||
let targetCount = min(63, samples.count)
|
||||
let step = Float(samples.count) / Float(targetCount)
|
||||
var resampled = [Float](repeating: 0, count: targetCount)
|
||||
for i in 0..<targetCount {
|
||||
let start = Int(Float(i) * step)
|
||||
let end = min(samples.count, Int(Float(i + 1) * step))
|
||||
guard start < end else { continue }
|
||||
resampled[i] = samples[start..<end].max() ?? 0
|
||||
}
|
||||
|
||||
// Pack as 5-bit values
|
||||
let bitCount = targetCount * 5
|
||||
let byteCount = (bitCount + 7) / 8
|
||||
var bytes = [UInt8](repeating: 0, count: byteCount)
|
||||
|
||||
for i in 0..<targetCount {
|
||||
let value = UInt8(min(31, max(0, resampled[i] * 31)))
|
||||
let bitOffset = i * 5
|
||||
let byteIndex = bitOffset / 8
|
||||
let bitIndex = bitOffset % 8
|
||||
|
||||
if bitIndex + 5 <= 8 {
|
||||
bytes[byteIndex] |= value << (8 - bitIndex - 5)
|
||||
} else {
|
||||
let bitsInFirst = 8 - bitIndex
|
||||
bytes[byteIndex] |= value >> (5 - bitsInFirst)
|
||||
if byteIndex + 1 < bytes.count {
|
||||
bytes[byteIndex + 1] |= value << (8 - (5 - bitsInFirst))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Data(bytes).base64EncodedString()
|
||||
}
|
||||
}
|
||||
@@ -190,6 +190,9 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
private let callArrowView = UIImageView()
|
||||
private let callBackButton = UIButton(type: .custom)
|
||||
|
||||
// Voice message
|
||||
private let voiceView = MessageVoiceView()
|
||||
|
||||
// Avatar-specific
|
||||
private let avatarImageView = UIImageView()
|
||||
|
||||
@@ -482,6 +485,9 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
avatarImageView.isHidden = true
|
||||
fileContainer.addSubview(avatarImageView)
|
||||
|
||||
voiceView.isHidden = true
|
||||
fileContainer.addSubview(voiceView)
|
||||
|
||||
bubbleView.addSubview(fileContainer)
|
||||
|
||||
// Group Invite Card
|
||||
@@ -855,6 +861,34 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
} else {
|
||||
callPhoneView?.tintColor = UIColor(red: 0.14, green: 0.54, blue: 0.90, alpha: 1)
|
||||
}
|
||||
} else if let voiceAtt = message.attachments.first(where: { $0.type == .voice }) {
|
||||
// Voice message: play button + waveform + duration
|
||||
// Preview format: "tag::duration::waveform_base64" or "duration::waveform_base64"
|
||||
let previewParts = Self.parseVoicePreview(voiceAtt.preview)
|
||||
voiceView.isHidden = false
|
||||
voiceView.frame = CGRect(x: 0, y: 0, width: fileContainer.bounds.width, height: 38)
|
||||
voiceView.configure(
|
||||
messageId: message.id,
|
||||
attachmentId: voiceAtt.id,
|
||||
preview: previewParts.waveform,
|
||||
duration: previewParts.duration,
|
||||
isOutgoing: layout.isOutgoing
|
||||
)
|
||||
let voiceId = voiceAtt.id
|
||||
let voiceFileName = voiceAtt.preview.components(separatedBy: "::").last ?? ""
|
||||
voiceView.onPlayTapped = { [weak self] in
|
||||
guard let self else { return }
|
||||
let fileName = "voice_\(Int(previewParts.duration))s.m4a"
|
||||
if let url = AttachmentCache.shared.fileURL(forAttachmentId: voiceId, fileName: fileName) {
|
||||
VoiceMessagePlayer.shared.play(messageId: message.id, fileURL: url)
|
||||
}
|
||||
}
|
||||
fileIconView.isHidden = true
|
||||
fileNameLabel.isHidden = true
|
||||
fileSizeLabel.isHidden = true
|
||||
callArrowView.isHidden = true
|
||||
callBackButton.isHidden = true
|
||||
avatarImageView.isHidden = true
|
||||
} else if let fileAtt = message.attachments.first(where: { $0.type == .file }) {
|
||||
let parsed = AttachmentPreviewCodec.parseFilePreview(fileAtt.preview)
|
||||
let isFileOutgoing = layout.isOutgoing
|
||||
@@ -1456,6 +1490,20 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
}
|
||||
|
||||
/// Telegram parity: file-type-specific icon name (same mapping as MessageFileView.swift).
|
||||
/// Parse voice preview: "tag::duration::waveform" or "duration::waveform"
|
||||
private static func parseVoicePreview(_ preview: String) -> (duration: TimeInterval, waveform: String) {
|
||||
let parts = preview.components(separatedBy: "::")
|
||||
// Format: "tag::duration::waveform" or "duration::waveform"
|
||||
if parts.count >= 3, let dur = Int(parts[1]) {
|
||||
return (TimeInterval(dur), parts[2])
|
||||
} else if parts.count >= 2, let dur = Int(parts[0]) {
|
||||
return (TimeInterval(dur), parts[1])
|
||||
} else if let dur = Int(parts[0]) {
|
||||
return (TimeInterval(dur), "")
|
||||
}
|
||||
return (0, preview)
|
||||
}
|
||||
|
||||
private static func fileIcon(for fileName: String) -> String {
|
||||
let ext = (fileName as NSString).pathExtension.lowercased()
|
||||
switch ext {
|
||||
@@ -2688,6 +2736,7 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
replyMessageId = nil
|
||||
highlightOverlay.alpha = 0
|
||||
fileContainer.isHidden = true
|
||||
voiceView.isHidden = true
|
||||
callArrowView.isHidden = true
|
||||
callBackButton.isHidden = true
|
||||
groupInviteContainer.isHidden = true
|
||||
@@ -2698,6 +2747,8 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
avatarImageView.image = nil
|
||||
avatarImageView.isHidden = true
|
||||
fileIconView.isHidden = false
|
||||
fileNameLabel.isHidden = false
|
||||
fileSizeLabel.isHidden = false
|
||||
forwardLabel.isHidden = true
|
||||
forwardAvatarView.isHidden = true
|
||||
forwardNameLabel.isHidden = true
|
||||
@@ -2804,6 +2855,15 @@ final class NativeMessageCell: UICollectionViewCell {
|
||||
selectionCheckContainer.layer.add(anim, forKey: "checkBounce")
|
||||
}
|
||||
}
|
||||
|
||||
func voiceTransitionTargetFrame(in window: UIWindow) -> CGRect? {
|
||||
guard !voiceView.isHidden else { return nil }
|
||||
return voiceView.convert(voiceView.bounds, to: window)
|
||||
}
|
||||
|
||||
func bubbleFrameInWindow(_ window: UIWindow) -> CGRect {
|
||||
bubbleView.convert(bubbleView.bounds, to: window)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - UIGestureRecognizerDelegate
|
||||
|
||||
@@ -1433,7 +1433,30 @@ extension NativeMessageListController: ComposerViewDelegate {
|
||||
}
|
||||
|
||||
func composerDidFinishRecording(_ composer: ComposerView, sendImmediately: Bool) {
|
||||
// Recording finished — will be wired to send pipeline later
|
||||
guard sendImmediately,
|
||||
let url = composer.lastRecordedURL,
|
||||
let data = try? Data(contentsOf: url) else { return }
|
||||
let transitionSource = composer.consumeVoiceSendTransitionSource()
|
||||
let pending = PendingAttachment.fromVoice(
|
||||
data: data,
|
||||
duration: composer.lastRecordedDuration,
|
||||
waveform: composer.lastRecordedWaveform
|
||||
)
|
||||
let pubKey = config.opponentPublicKey
|
||||
let title = config.opponentTitle
|
||||
let username = config.opponentUsername
|
||||
Task { @MainActor in
|
||||
let messageId = try? await SessionManager.shared.sendMessageWithAttachments(
|
||||
text: "",
|
||||
attachments: [pending],
|
||||
toPublicKey: pubKey,
|
||||
opponentTitle: title,
|
||||
opponentUsername: username
|
||||
)
|
||||
if let source = transitionSource, let messageId {
|
||||
animateVoiceSendTransition(source: source, messageId: messageId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func composerDidCancelRecording(_ composer: ComposerView) {
|
||||
@@ -1443,6 +1466,70 @@ extension NativeMessageListController: ComposerViewDelegate {
|
||||
func composerDidLockRecording(_ composer: ComposerView) {
|
||||
// Recording locked — UI handled by ComposerView
|
||||
}
|
||||
|
||||
private func animateVoiceSendTransition(source: VoiceSendTransitionSource, messageId: String) {
|
||||
guard let window = view.window else { return }
|
||||
let snapshot = source.snapshotView
|
||||
snapshot.frame = source.sourceFrameInWindow
|
||||
snapshot.layer.cornerRadius = source.cornerRadius
|
||||
snapshot.layer.cornerCurve = .continuous
|
||||
snapshot.clipsToBounds = true
|
||||
window.addSubview(snapshot)
|
||||
|
||||
resolveVoiceTargetFrame(messageId: messageId, attempt: 0, snapshot: snapshot)
|
||||
}
|
||||
|
||||
private func resolveVoiceTargetFrame(messageId: String, attempt: Int, snapshot: UIView) {
|
||||
guard let window = view.window else {
|
||||
snapshot.removeFromSuperview()
|
||||
return
|
||||
}
|
||||
let maxAttempts = 12
|
||||
guard attempt <= maxAttempts else {
|
||||
UIView.animate(withDuration: 0.16, animations: {
|
||||
snapshot.alpha = 0
|
||||
snapshot.transform = CGAffineTransform(scaleX: 0.9, y: 0.9)
|
||||
}, completion: { _ in
|
||||
snapshot.removeFromSuperview()
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
let targetFrame = targetFrameForVoiceMessage(messageId: messageId, in: window)
|
||||
guard let targetFrame else {
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.06) { [weak self] in
|
||||
self?.resolveVoiceTargetFrame(messageId: messageId, attempt: attempt + 1, snapshot: snapshot)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
UIView.animate(withDuration: 0.34, delay: 0, options: [.curveEaseInOut]) {
|
||||
snapshot.frame = targetFrame
|
||||
snapshot.layer.cornerRadius = 12
|
||||
snapshot.alpha = 0.84
|
||||
} completion: { _ in
|
||||
UIView.animate(withDuration: 0.12, animations: {
|
||||
snapshot.alpha = 0
|
||||
}, completion: { _ in
|
||||
snapshot.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private func targetFrameForVoiceMessage(messageId: String, in window: UIWindow) -> CGRect? {
|
||||
let snapshot = dataSource.snapshot()
|
||||
guard let itemIndex = snapshot.indexOfItem(messageId) else { return nil }
|
||||
let indexPath = IndexPath(item: itemIndex, section: 0)
|
||||
|
||||
if collectionView.cellForItem(at: indexPath) == nil {
|
||||
collectionView.scrollToItem(at: indexPath, at: .bottom, animated: false)
|
||||
collectionView.layoutIfNeeded()
|
||||
}
|
||||
guard let cell = collectionView.cellForItem(at: indexPath) as? NativeMessageCell else {
|
||||
return nil
|
||||
}
|
||||
return cell.voiceTransitionTargetFrame(in: window) ?? cell.bubbleFrameInWindow(window)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - PreSizedCell
|
||||
@@ -1504,7 +1591,7 @@ struct NativeMessageListView: UIViewControllerRepresentable {
|
||||
let actions: MessageCellActions
|
||||
let hasMoreMessages: Bool
|
||||
let firstUnreadMessageId: String?
|
||||
/// true = create UIKit ComposerView (iOS < 26). false = iOS 26+ (SwiftUI overlay).
|
||||
/// true = create UIKit ComposerView bridge. false = no composer (system chats).
|
||||
let useUIKitComposer: Bool
|
||||
/// Empty chat state data (iOS < 26). nil = no empty state management.
|
||||
var emptyChatInfo: EmptyChatInfo?
|
||||
|
||||
@@ -27,6 +27,9 @@ struct PendingAttachment: Identifiable, Sendable {
|
||||
/// File size in bytes (files only). `nil` for images.
|
||||
let fileSize: Int?
|
||||
|
||||
/// Voice preview: "duration::waveform_base64" (voice only).
|
||||
var voicePreview: String? = nil
|
||||
|
||||
// MARK: - Factory
|
||||
|
||||
/// Creates a PendingAttachment from a UIImage (compressed to JPEG).
|
||||
@@ -62,13 +65,17 @@ struct PendingAttachment: Identifiable, Sendable {
|
||||
/// Creates a PendingAttachment from a voice recording.
|
||||
/// Duration in seconds, waveform is normalized [Float] array (0..1).
|
||||
static func fromVoice(data: Data, duration: TimeInterval, waveform: [Float]) -> PendingAttachment {
|
||||
// Encode waveform as 5-bit base64 for preview
|
||||
let waveformBase64 = MessageVoiceView.encodeWaveform(waveform)
|
||||
return PendingAttachment(
|
||||
id: generateRandomId(),
|
||||
type: .voice,
|
||||
data: data,
|
||||
thumbnail: nil,
|
||||
// Encode duration + waveform in fileName for preview extraction
|
||||
fileName: "voice_\(Int(duration))s.m4a",
|
||||
fileSize: data.count
|
||||
fileSize: data.count,
|
||||
voicePreview: "\(Int(duration))::\(waveformBase64)"
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
228
Rosetta/Features/Chats/ChatDetail/RecordingLockView.swift
Normal file
228
Rosetta/Features/Chats/ChatDetail/RecordingLockView.swift
Normal file
@@ -0,0 +1,228 @@
|
||||
import QuartzCore
|
||||
import UIKit
|
||||
|
||||
// MARK: - RecordingLockView
|
||||
|
||||
/// Lock indicator shown above mic button during voice recording.
|
||||
/// Telegram parity from TGModernConversationInputMicButton.m:
|
||||
/// - Frame: 40×72pt, positioned 122pt above mic center
|
||||
/// - Padlock icon (CAShapeLayer) + upward arrow
|
||||
/// - Spring entry: damping 0.55, duration 0.5s
|
||||
/// - Lockness progress: arrow fades, panel shrinks
|
||||
final class RecordingLockView: UIView {
|
||||
|
||||
// MARK: - Layout Constants (Telegram exact)
|
||||
|
||||
private let panelWidth: CGFloat = 40
|
||||
private let panelFullHeight: CGFloat = 72
|
||||
private let panelLockedHeight: CGFloat = 40 // 72 - 32
|
||||
private let verticalOffset: CGFloat = 122 // above mic center
|
||||
private let cornerRadius: CGFloat = 20
|
||||
|
||||
// MARK: - Subviews
|
||||
|
||||
private let backgroundView = UIView()
|
||||
private let lockIcon = CAShapeLayer()
|
||||
private let arrowLayer = CAShapeLayer()
|
||||
private let stopButton = UIButton(type: .system)
|
||||
private var onStopTap: (() -> Void)?
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
isUserInteractionEnabled = false
|
||||
setupBackground()
|
||||
setupLockIcon()
|
||||
setupArrow()
|
||||
setupStopButton()
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder: NSCoder) { fatalError() }
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
private func setupBackground() {
|
||||
backgroundView.backgroundColor = UIColor(white: 0.15, alpha: 0.9)
|
||||
backgroundView.layer.cornerRadius = cornerRadius
|
||||
backgroundView.layer.cornerCurve = .continuous
|
||||
backgroundView.layer.borderWidth = 1.0 / UIScreen.main.scale
|
||||
backgroundView.layer.borderColor = UIColor(white: 0.3, alpha: 0.5).cgColor
|
||||
addSubview(backgroundView)
|
||||
}
|
||||
|
||||
private func setupLockIcon() {
|
||||
// Simple padlock: body (rounded rect) + shackle (arc)
|
||||
let path = UIBezierPath()
|
||||
|
||||
// Shackle (arc above body)
|
||||
let shackleW: CGFloat = 10
|
||||
let shackleH: CGFloat = 8
|
||||
let bodyTop: CGFloat = 10
|
||||
let centerX: CGFloat = panelWidth / 2
|
||||
path.move(to: CGPoint(x: centerX - shackleW / 2, y: bodyTop))
|
||||
path.addLine(to: CGPoint(x: centerX - shackleW / 2, y: bodyTop - shackleH + 3))
|
||||
path.addCurve(
|
||||
to: CGPoint(x: centerX + shackleW / 2, y: bodyTop - shackleH + 3),
|
||||
controlPoint1: CGPoint(x: centerX - shackleW / 2, y: bodyTop - shackleH - 2),
|
||||
controlPoint2: CGPoint(x: centerX + shackleW / 2, y: bodyTop - shackleH - 2)
|
||||
)
|
||||
path.addLine(to: CGPoint(x: centerX + shackleW / 2, y: bodyTop))
|
||||
|
||||
lockIcon.path = path.cgPath
|
||||
lockIcon.strokeColor = UIColor.white.cgColor
|
||||
lockIcon.fillColor = UIColor.clear.cgColor
|
||||
lockIcon.lineWidth = 1.5
|
||||
lockIcon.lineCap = .round
|
||||
|
||||
// Body (rounded rect below shackle)
|
||||
let bodyW: CGFloat = 14
|
||||
let bodyH: CGFloat = 10
|
||||
let bodyPath = UIBezierPath(
|
||||
roundedRect: CGRect(
|
||||
x: centerX - bodyW / 2,
|
||||
y: bodyTop,
|
||||
width: bodyW,
|
||||
height: bodyH
|
||||
),
|
||||
cornerRadius: 2
|
||||
)
|
||||
let bodyLayer = CAShapeLayer()
|
||||
bodyLayer.path = bodyPath.cgPath
|
||||
bodyLayer.fillColor = UIColor.white.cgColor
|
||||
layer.addSublayer(bodyLayer)
|
||||
layer.addSublayer(lockIcon)
|
||||
}
|
||||
|
||||
private func setupArrow() {
|
||||
// Upward chevron arrow below the lock
|
||||
let arrowPath = UIBezierPath()
|
||||
let centerX = panelWidth / 2
|
||||
let arrowY: CGFloat = 30
|
||||
arrowPath.move(to: CGPoint(x: centerX - 5, y: arrowY + 5))
|
||||
arrowPath.addLine(to: CGPoint(x: centerX, y: arrowY))
|
||||
arrowPath.addLine(to: CGPoint(x: centerX + 5, y: arrowY + 5))
|
||||
|
||||
arrowLayer.path = arrowPath.cgPath
|
||||
arrowLayer.strokeColor = UIColor.white.withAlphaComponent(0.6).cgColor
|
||||
arrowLayer.fillColor = UIColor.clear.cgColor
|
||||
arrowLayer.lineWidth = 1.5
|
||||
arrowLayer.lineCap = .round
|
||||
arrowLayer.lineJoin = .round
|
||||
layer.addSublayer(arrowLayer)
|
||||
}
|
||||
|
||||
private func setupStopButton() {
|
||||
stopButton.isHidden = true
|
||||
stopButton.alpha = 0
|
||||
stopButton.backgroundColor = UIColor(red: 1, green: 45/255.0, blue: 85/255.0, alpha: 1)
|
||||
stopButton.tintColor = .white
|
||||
stopButton.layer.cornerRadius = 14
|
||||
stopButton.clipsToBounds = true
|
||||
let iconConfig = UIImage.SymbolConfiguration(pointSize: 12, weight: .bold)
|
||||
stopButton.setImage(UIImage(systemName: "stop.fill", withConfiguration: iconConfig), for: .normal)
|
||||
stopButton.addTarget(self, action: #selector(stopTapped), for: .touchUpInside)
|
||||
stopButton.isAccessibilityElement = true
|
||||
stopButton.accessibilityLabel = "Stop recording"
|
||||
stopButton.accessibilityHint = "Stops voice recording and opens preview."
|
||||
addSubview(stopButton)
|
||||
}
|
||||
|
||||
// MARK: - Present
|
||||
|
||||
/// Position above anchor (mic button) and animate in with spring.
|
||||
func present(anchorCenter: CGPoint, in parent: UIView) {
|
||||
frame = CGRect(
|
||||
x: floor(anchorCenter.x - panelWidth / 2),
|
||||
y: floor(anchorCenter.y - verticalOffset - panelFullHeight / 2),
|
||||
width: panelWidth,
|
||||
height: panelFullHeight
|
||||
)
|
||||
backgroundView.frame = bounds
|
||||
stopButton.frame = CGRect(x: floor((panelWidth - 28) / 2), y: panelFullHeight - 34, width: 28, height: 28)
|
||||
|
||||
parent.addSubview(self)
|
||||
|
||||
// Start offscreen below
|
||||
transform = CGAffineTransform(translationX: 0, y: 100)
|
||||
alpha = 0
|
||||
|
||||
UIView.animate(
|
||||
withDuration: 0.5, delay: 0,
|
||||
usingSpringWithDamping: 0.55,
|
||||
initialSpringVelocity: 0, options: []
|
||||
) {
|
||||
self.transform = .identity
|
||||
self.alpha = 1
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Lockness Update
|
||||
|
||||
/// Update lock progress (0 = idle, 1 = locked).
|
||||
/// Telegram: arrow alpha = max(0, 1 - lockness * 1.6)
|
||||
func updateLockness(_ lockness: CGFloat) {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
arrowLayer.opacity = Float(max(0, 1 - lockness * 1.6))
|
||||
CATransaction.commit()
|
||||
|
||||
// Lock icon shifts up slightly
|
||||
let yOffset = -16 * lockness
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
lockIcon.transform = CATransform3DMakeTranslation(0, yOffset, 0)
|
||||
CATransaction.commit()
|
||||
}
|
||||
|
||||
// MARK: - Animate Lock Complete
|
||||
|
||||
/// Shrink and dismiss the lock panel after lock is committed.
|
||||
/// Telegram: panel height 72→40, then slides down off-screen.
|
||||
func animateLockComplete() {
|
||||
UIView.animate(withDuration: 0.2) {
|
||||
self.arrowLayer.opacity = 0
|
||||
self.lockIcon.transform = CATransform3DMakeTranslation(0, -16, 0)
|
||||
}
|
||||
|
||||
// Slide down and fade after 0.45s
|
||||
UIView.animate(withDuration: 0.2, delay: 0.45, options: []) {
|
||||
self.transform = CGAffineTransform(translationX: 0, y: 120)
|
||||
} completion: { _ in
|
||||
self.alpha = 0
|
||||
self.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
func showStopButton(onTap: @escaping () -> Void) {
|
||||
onStopTap = onTap
|
||||
stopButton.isHidden = false
|
||||
stopButton.transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
|
||||
|
||||
UIView.animate(withDuration: 0.2) {
|
||||
self.arrowLayer.opacity = 0
|
||||
self.lockIcon.transform = CATransform3DMakeTranslation(0, -16, 0)
|
||||
}
|
||||
|
||||
UIView.animate(withDuration: 0.2, delay: 0.02, options: [.curveEaseOut]) {
|
||||
self.stopButton.alpha = 1
|
||||
self.stopButton.transform = .identity
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Dismiss
|
||||
|
||||
func dismiss() {
|
||||
UIView.animate(withDuration: 0.18) {
|
||||
self.alpha = 0
|
||||
self.transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
|
||||
} completion: { _ in
|
||||
self.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func stopTapped() {
|
||||
onStopTap?()
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import UIKit
|
||||
|
||||
enum VoiceRecordingState {
|
||||
case idle
|
||||
case waiting // finger down, waiting for threshold (0.15s)
|
||||
case waiting // finger down, waiting for threshold (0.19s)
|
||||
case recording // actively recording, finger held
|
||||
case locked // slid up past lock threshold, finger released
|
||||
case cancelled // slid left past cancel threshold
|
||||
@@ -16,7 +16,13 @@ enum VoiceRecordingState {
|
||||
|
||||
@MainActor
|
||||
protocol RecordingMicButtonDelegate: AnyObject {
|
||||
/// Recording threshold reached (0.15s hold). Start actual recording.
|
||||
/// Finger down, hold timer armed.
|
||||
func micButtonRecordingArmed(_ button: RecordingMicButton)
|
||||
|
||||
/// Hold was cancelled before threshold (tap / move / system cancel).
|
||||
func micButtonRecordingArmingCancelled(_ button: RecordingMicButton)
|
||||
|
||||
/// Recording threshold reached (0.19s hold). Start actual recording.
|
||||
func micButtonRecordingBegan(_ button: RecordingMicButton)
|
||||
|
||||
/// Finger released normally → send the recording.
|
||||
@@ -28,12 +34,9 @@ protocol RecordingMicButtonDelegate: AnyObject {
|
||||
/// Slid up past lock threshold → lock into hands-free recording.
|
||||
func micButtonRecordingLocked(_ button: RecordingMicButton)
|
||||
|
||||
/// Horizontal slide translation update for cancel indicator.
|
||||
/// Value is negative (slide left), range roughly -150..0.
|
||||
func micButtonCancelTranslationChanged(_ button: RecordingMicButton, translation: CGFloat)
|
||||
|
||||
/// Vertical lock progress update (0..1).
|
||||
func micButtonLockProgressChanged(_ button: RecordingMicButton, progress: CGFloat)
|
||||
/// Raw drag distances for overlay transforms (Telegram: continueTrackingWithTouch).
|
||||
/// distanceX: negative = left (cancel), distanceY: negative = up (lock)
|
||||
func micButtonDragUpdate(_ button: RecordingMicButton, distanceX: CGFloat, distanceY: CGFloat)
|
||||
}
|
||||
|
||||
// MARK: - RecordingMicButton
|
||||
@@ -42,9 +45,10 @@ protocol RecordingMicButtonDelegate: AnyObject {
|
||||
/// Ported from Telegram's `TGModernConversationInputMicButton`.
|
||||
///
|
||||
/// Gesture mechanics:
|
||||
/// - Long press (0.15s) → begin recording
|
||||
/// - Long press (0.19s) → begin recording
|
||||
/// - Slide left → cancel (threshold: -150px, haptic at -100px)
|
||||
/// - Slide up → lock (threshold: -110px, haptic at -60px)
|
||||
/// - Release velocity gate: <-400 px/s on X/Y commits cancel/lock
|
||||
/// - Release → finish (send)
|
||||
final class RecordingMicButton: UIControl {
|
||||
|
||||
@@ -54,15 +58,21 @@ final class RecordingMicButton: UIControl {
|
||||
|
||||
// MARK: - Gesture Thresholds (Telegram parity)
|
||||
|
||||
private let holdThreshold: TimeInterval = 0.15
|
||||
private let holdThreshold: TimeInterval = 0.19
|
||||
private let cancelDistanceThreshold: CGFloat = -150
|
||||
private let cancelHapticThreshold: CGFloat = -100
|
||||
private let lockDistanceThreshold: CGFloat = -110
|
||||
private let lockHapticThreshold: CGFloat = -60
|
||||
private let velocityGate: CGFloat = -400
|
||||
private let preHoldCancelDistance: CGFloat = 10
|
||||
|
||||
// MARK: - Tracking State
|
||||
|
||||
private var touchStartLocation: CGPoint = .zero
|
||||
private var lastTouchLocation: CGPoint = .zero
|
||||
private var lastTouchTimestamp: TimeInterval = 0
|
||||
private var velocityX: CGFloat = 0
|
||||
private var velocityY: CGFloat = 0
|
||||
private var holdTimer: Timer?
|
||||
private var displayLink: CADisplayLink?
|
||||
|
||||
@@ -96,6 +106,10 @@ final class RecordingMicButton: UIControl {
|
||||
guard recordingState == .idle else { return false }
|
||||
|
||||
touchStartLocation = touch.location(in: window)
|
||||
lastTouchLocation = touchStartLocation
|
||||
lastTouchTimestamp = touch.timestamp
|
||||
velocityX = 0
|
||||
velocityY = 0
|
||||
recordingState = .waiting
|
||||
targetCancelTranslation = 0
|
||||
targetLockTranslation = 0
|
||||
@@ -105,12 +119,11 @@ final class RecordingMicButton: UIControl {
|
||||
didLockHaptic = false
|
||||
|
||||
impactFeedback.prepare()
|
||||
recordingDelegate?.micButtonRecordingArmed(self)
|
||||
|
||||
// Start hold timer — after 0.15s we begin recording
|
||||
// Start hold timer — after 0.19s we begin recording
|
||||
holdTimer = Timer.scheduledTimer(withTimeInterval: holdThreshold, repeats: false) { [weak self] _ in
|
||||
Task { @MainActor in
|
||||
self?.beginRecording()
|
||||
}
|
||||
self?.beginRecording()
|
||||
}
|
||||
|
||||
return true
|
||||
@@ -122,14 +135,16 @@ final class RecordingMicButton: UIControl {
|
||||
let location = touch.location(in: window)
|
||||
let distanceX = min(0, location.x - touchStartLocation.x)
|
||||
let distanceY = min(0, location.y - touchStartLocation.y)
|
||||
updateVelocity(with: touch, at: location)
|
||||
|
||||
// Check if we moved enough to cancel the hold timer (before recording started)
|
||||
if recordingState == .waiting {
|
||||
let totalDistance = sqrt(distanceX * distanceX + distanceY * distanceY)
|
||||
if totalDistance > 10 {
|
||||
if totalDistance > preHoldCancelDistance {
|
||||
// Movement before threshold — cancel the timer, don't start recording
|
||||
cancelHoldTimer()
|
||||
recordingState = .idle
|
||||
recordingDelegate?.micButtonRecordingArmingCancelled(self)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@@ -171,21 +186,31 @@ final class RecordingMicButton: UIControl {
|
||||
// Released before hold threshold — just a tap
|
||||
cancelHoldTimer()
|
||||
recordingState = .idle
|
||||
recordingDelegate?.micButtonRecordingArmingCancelled(self)
|
||||
return
|
||||
}
|
||||
|
||||
if recordingState == .recording {
|
||||
// Check velocity for quick flick gestures
|
||||
// Telegram velocity gate: fast flick left/up commits immediately.
|
||||
if velocityX < velocityGate {
|
||||
commitCancel()
|
||||
return
|
||||
}
|
||||
if velocityY < velocityGate {
|
||||
commitLock()
|
||||
return
|
||||
}
|
||||
|
||||
// Fallback to distance thresholds on release.
|
||||
if let touch {
|
||||
let location = touch.location(in: window)
|
||||
let distanceX = location.x - touchStartLocation.x
|
||||
let distanceY = location.y - touchStartLocation.y
|
||||
|
||||
if distanceX < cancelDistanceThreshold / 2 {
|
||||
if distanceX < cancelDistanceThreshold {
|
||||
commitCancel()
|
||||
return
|
||||
}
|
||||
if distanceY < lockDistanceThreshold / 2 {
|
||||
if distanceY < lockDistanceThreshold {
|
||||
commitLock()
|
||||
return
|
||||
}
|
||||
@@ -203,10 +228,15 @@ final class RecordingMicButton: UIControl {
|
||||
} else {
|
||||
cancelHoldTimer()
|
||||
recordingState = .idle
|
||||
recordingDelegate?.micButtonRecordingArmingCancelled(self)
|
||||
}
|
||||
stopDisplayLink()
|
||||
}
|
||||
|
||||
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
|
||||
bounds.insetBy(dx: -10, dy: 0).contains(point)
|
||||
}
|
||||
|
||||
// MARK: - State Transitions
|
||||
|
||||
private func beginRecording() {
|
||||
@@ -248,6 +278,8 @@ final class RecordingMicButton: UIControl {
|
||||
cancelHoldTimer()
|
||||
stopDisplayLink()
|
||||
recordingState = .idle
|
||||
velocityX = 0
|
||||
velocityY = 0
|
||||
targetCancelTranslation = 0
|
||||
targetLockTranslation = 0
|
||||
currentCancelTranslation = 0
|
||||
@@ -269,16 +301,12 @@ final class RecordingMicButton: UIControl {
|
||||
}
|
||||
|
||||
@objc private func displayLinkUpdate() {
|
||||
// Smooth interpolation (Telegram: 0.7/0.3 blend)
|
||||
currentCancelTranslation = currentCancelTranslation * 0.7 + targetCancelTranslation * 0.3
|
||||
currentLockTranslation = currentLockTranslation * 0.7 + targetLockTranslation * 0.3
|
||||
// Telegram exact: 0.7/0.3 blend (TGModernConversationInputMicButton.m line 918-919)
|
||||
currentCancelTranslation = min(0, currentCancelTranslation * 0.7 + targetCancelTranslation * 0.3)
|
||||
currentLockTranslation = min(0, currentLockTranslation * 0.7 + targetLockTranslation * 0.3)
|
||||
|
||||
// Report cancel translation
|
||||
recordingDelegate?.micButtonCancelTranslationChanged(self, translation: currentCancelTranslation)
|
||||
|
||||
// Report lock progress (0..1)
|
||||
let lockProgress = min(1.0, abs(currentLockTranslation) / abs(lockDistanceThreshold))
|
||||
recordingDelegate?.micButtonLockProgressChanged(self, progress: lockProgress)
|
||||
// Report raw smoothed distances for overlay transforms
|
||||
recordingDelegate?.micButtonDragUpdate(self, distanceX: currentCancelTranslation, distanceY: currentLockTranslation)
|
||||
}
|
||||
|
||||
// MARK: - Helpers
|
||||
@@ -287,4 +315,12 @@ final class RecordingMicButton: UIControl {
|
||||
holdTimer?.invalidate()
|
||||
holdTimer = nil
|
||||
}
|
||||
|
||||
private func updateVelocity(with touch: UITouch, at location: CGPoint) {
|
||||
let dt = max(0.001, touch.timestamp - lastTouchTimestamp)
|
||||
velocityX = (location.x - lastTouchLocation.x) / dt
|
||||
velocityY = (location.y - lastTouchLocation.y) / dt
|
||||
lastTouchLocation = location
|
||||
lastTouchTimestamp = touch.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
406
Rosetta/Features/Chats/ChatDetail/RecordingPreviewPanel.swift
Normal file
406
Rosetta/Features/Chats/ChatDetail/RecordingPreviewPanel.swift
Normal file
@@ -0,0 +1,406 @@
|
||||
import AVFAudio
|
||||
import QuartzCore
|
||||
import UIKit
|
||||
|
||||
// MARK: - RecordingPreviewPanelDelegate
|
||||
|
||||
@MainActor
|
||||
protocol RecordingPreviewPanelDelegate: AnyObject {
|
||||
func previewPanelDidTapSend(_ panel: RecordingPreviewPanel, trimRange: ClosedRange<TimeInterval>)
|
||||
func previewPanelDidTapDelete(_ panel: RecordingPreviewPanel)
|
||||
func previewPanelDidTapRecordMore(_ panel: RecordingPreviewPanel)
|
||||
}
|
||||
|
||||
// MARK: - RecordingPreviewPanel
|
||||
|
||||
/// Preview panel shown after `lock -> stop`, before sending voice message.
|
||||
/// Includes waveform scrubbing + trim handles + send/delete/record-more controls.
|
||||
final class RecordingPreviewPanel: UIView {
|
||||
|
||||
private enum PanMode {
|
||||
case scrub
|
||||
case trimLeft
|
||||
case trimRight
|
||||
}
|
||||
|
||||
weak var delegate: RecordingPreviewPanelDelegate?
|
||||
|
||||
// MARK: - Subviews
|
||||
|
||||
private let glassBackground = TelegramGlassUIView(frame: .zero)
|
||||
private let deleteButton = UIButton(type: .system)
|
||||
private let playButton = UIButton(type: .system)
|
||||
private let waveformContainer = UIView()
|
||||
private let waveformView = WaveformView()
|
||||
private let leftTrimMask = UIView()
|
||||
private let rightTrimMask = UIView()
|
||||
private let leftTrimHandle = UIView()
|
||||
private let rightTrimHandle = UIView()
|
||||
private let durationLabel = UILabel()
|
||||
private let recordMoreButton = UIButton(type: .system)
|
||||
private let sendButton = UIButton(type: .system)
|
||||
|
||||
// MARK: - Audio Playback
|
||||
|
||||
private var audioPlayer: AVAudioPlayer?
|
||||
private var displayLink: CADisplayLink?
|
||||
private var isPlaying = false
|
||||
private let fileURL: URL
|
||||
private let duration: TimeInterval
|
||||
private let waveformSamples: [Float]
|
||||
|
||||
// MARK: - Trim / Scrub
|
||||
|
||||
private var trimStart: TimeInterval = 0
|
||||
private var trimEnd: TimeInterval = 0
|
||||
private var minTrimDuration: TimeInterval = 1
|
||||
private var activePanMode: PanMode?
|
||||
|
||||
var selectedTrimRange: ClosedRange<TimeInterval> {
|
||||
trimStart...trimEnd
|
||||
}
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
init(frame: CGRect, fileURL: URL, duration: TimeInterval, waveform: [Float]) {
|
||||
self.fileURL = fileURL
|
||||
self.duration = max(0, duration)
|
||||
self.waveformSamples = waveform
|
||||
super.init(frame: frame)
|
||||
self.trimEnd = self.duration
|
||||
clipsToBounds = true
|
||||
layer.cornerRadius = 21
|
||||
layer.cornerCurve = .continuous
|
||||
setupSubviews()
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder: NSCoder) { fatalError() }
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
private func setupSubviews() {
|
||||
glassBackground.fixedCornerRadius = 21
|
||||
glassBackground.isUserInteractionEnabled = false
|
||||
addSubview(glassBackground)
|
||||
|
||||
let trashConfig = UIImage.SymbolConfiguration(pointSize: 18, weight: .medium)
|
||||
deleteButton.setImage(UIImage(systemName: "trash", withConfiguration: trashConfig), for: .normal)
|
||||
deleteButton.tintColor = UIColor(red: 1, green: 45/255.0, blue: 85/255.0, alpha: 1)
|
||||
deleteButton.addTarget(self, action: #selector(deleteTapped), for: .touchUpInside)
|
||||
deleteButton.isAccessibilityElement = true
|
||||
deleteButton.accessibilityLabel = "Delete recording"
|
||||
deleteButton.accessibilityHint = "Deletes the current voice draft."
|
||||
addSubview(deleteButton)
|
||||
|
||||
configurePlayButton(playing: false)
|
||||
playButton.addTarget(self, action: #selector(playTapped), for: .touchUpInside)
|
||||
playButton.isAccessibilityElement = true
|
||||
playButton.accessibilityLabel = "Play recording"
|
||||
playButton.accessibilityHint = "Plays or pauses voice preview."
|
||||
addSubview(playButton)
|
||||
|
||||
waveformContainer.clipsToBounds = true
|
||||
waveformContainer.layer.cornerRadius = 6
|
||||
addSubview(waveformContainer)
|
||||
|
||||
waveformView.setSamples(waveformSamples)
|
||||
waveformView.progress = 0
|
||||
waveformContainer.addSubview(waveformView)
|
||||
|
||||
leftTrimMask.backgroundColor = UIColor.black.withAlphaComponent(0.25)
|
||||
rightTrimMask.backgroundColor = UIColor.black.withAlphaComponent(0.25)
|
||||
waveformContainer.addSubview(leftTrimMask)
|
||||
waveformContainer.addSubview(rightTrimMask)
|
||||
|
||||
leftTrimHandle.backgroundColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
leftTrimHandle.layer.cornerRadius = 2
|
||||
waveformContainer.addSubview(leftTrimHandle)
|
||||
|
||||
rightTrimHandle.backgroundColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
rightTrimHandle.layer.cornerRadius = 2
|
||||
waveformContainer.addSubview(rightTrimHandle)
|
||||
|
||||
let pan = UIPanGestureRecognizer(target: self, action: #selector(handleWaveformPan(_:)))
|
||||
waveformContainer.addGestureRecognizer(pan)
|
||||
waveformContainer.isAccessibilityElement = true
|
||||
waveformContainer.accessibilityLabel = "Waveform trim area"
|
||||
waveformContainer.accessibilityHint = "Drag to scrub, or drag edges to trim."
|
||||
|
||||
durationLabel.font = .monospacedDigitSystemFont(ofSize: 13, weight: .semibold)
|
||||
durationLabel.textColor = .white.withAlphaComponent(0.72)
|
||||
durationLabel.textAlignment = .right
|
||||
addSubview(durationLabel)
|
||||
|
||||
let recordMoreConfig = UIImage.SymbolConfiguration(pointSize: 17, weight: .semibold)
|
||||
recordMoreButton.setImage(UIImage(systemName: "plus.circle", withConfiguration: recordMoreConfig), for: .normal)
|
||||
recordMoreButton.tintColor = .white.withAlphaComponent(0.85)
|
||||
recordMoreButton.addTarget(self, action: #selector(recordMoreTapped), for: .touchUpInside)
|
||||
recordMoreButton.isAccessibilityElement = true
|
||||
recordMoreButton.accessibilityLabel = "Record more"
|
||||
recordMoreButton.accessibilityHint = "Resume recording and append more audio."
|
||||
addSubview(recordMoreButton)
|
||||
|
||||
let sendConfig = UIImage.SymbolConfiguration(pointSize: 20, weight: .semibold)
|
||||
sendButton.setImage(UIImage(systemName: "arrow.up.circle.fill", withConfiguration: sendConfig), for: .normal)
|
||||
sendButton.tintColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
|
||||
sendButton.addTarget(self, action: #selector(sendTapped), for: .touchUpInside)
|
||||
sendButton.isAccessibilityElement = true
|
||||
sendButton.accessibilityLabel = "Send recording"
|
||||
sendButton.accessibilityHint = "Sends current trimmed voice message."
|
||||
addSubview(sendButton)
|
||||
}
|
||||
|
||||
// MARK: - Layout
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
let h = bounds.height
|
||||
let w = bounds.width
|
||||
|
||||
glassBackground.frame = bounds
|
||||
glassBackground.applyCornerRadius()
|
||||
|
||||
deleteButton.frame = CGRect(x: 4, y: (h - 40) / 2, width: 40, height: 40)
|
||||
playButton.frame = CGRect(x: 44, y: (h - 30) / 2, width: 30, height: 30)
|
||||
|
||||
sendButton.frame = CGRect(x: w - 40, y: (h - 36) / 2, width: 36, height: 36)
|
||||
recordMoreButton.frame = CGRect(x: sendButton.frame.minX - 34, y: (h - 30) / 2, width: 30, height: 30)
|
||||
|
||||
let durationW: CGFloat = 44
|
||||
durationLabel.frame = CGRect(
|
||||
x: recordMoreButton.frame.minX - durationW - 6,
|
||||
y: (h - 20) / 2,
|
||||
width: durationW,
|
||||
height: 20
|
||||
)
|
||||
|
||||
let waveX = playButton.frame.maxX + 8
|
||||
let waveW = durationLabel.frame.minX - 8 - waveX
|
||||
waveformContainer.frame = CGRect(x: waveX, y: 4, width: max(0, waveW), height: h - 8)
|
||||
waveformView.frame = waveformContainer.bounds
|
||||
|
||||
minTrimDuration = max(1.0, 56.0 * duration / max(waveformContainer.bounds.width, 1))
|
||||
trimEnd = max(trimEnd, min(duration, trimStart + minTrimDuration))
|
||||
updateTrimVisuals()
|
||||
updateDurationLabel(isPlaying ? remainingFromPlayer() : (trimEnd - trimStart))
|
||||
}
|
||||
|
||||
// MARK: - Play/Pause
|
||||
|
||||
@objc private func playTapped() {
|
||||
if isPlaying {
|
||||
pausePlayback()
|
||||
} else {
|
||||
startPlayback()
|
||||
}
|
||||
}
|
||||
|
||||
private func startPlayback() {
|
||||
if audioPlayer == nil {
|
||||
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
|
||||
audioPlayer?.prepareToPlay()
|
||||
}
|
||||
guard let player = audioPlayer else { return }
|
||||
if player.currentTime < trimStart || player.currentTime > trimEnd {
|
||||
player.currentTime = trimStart
|
||||
}
|
||||
player.play()
|
||||
isPlaying = true
|
||||
configurePlayButton(playing: true)
|
||||
startDisplayLink()
|
||||
}
|
||||
|
||||
private func pausePlayback() {
|
||||
audioPlayer?.pause()
|
||||
isPlaying = false
|
||||
configurePlayButton(playing: false)
|
||||
stopDisplayLink()
|
||||
}
|
||||
|
||||
private func stopPlayback(resetToTrimStart: Bool = true) {
|
||||
audioPlayer?.stop()
|
||||
if resetToTrimStart {
|
||||
audioPlayer?.currentTime = trimStart
|
||||
waveformView.progress = CGFloat((duration > 0 ? trimStart / duration : 0))
|
||||
} else {
|
||||
waveformView.progress = 0
|
||||
}
|
||||
isPlaying = false
|
||||
configurePlayButton(playing: false)
|
||||
updateDurationLabel(trimEnd - trimStart)
|
||||
stopDisplayLink()
|
||||
}
|
||||
|
||||
private func configurePlayButton(playing: Bool) {
|
||||
let config = UIImage.SymbolConfiguration(pointSize: 18, weight: .semibold)
|
||||
let name = playing ? "pause.fill" : "play.fill"
|
||||
playButton.setImage(UIImage(systemName: name, withConfiguration: config), for: .normal)
|
||||
playButton.tintColor = .white
|
||||
}
|
||||
|
||||
// MARK: - Display Link
|
||||
|
||||
private func startDisplayLink() {
|
||||
guard displayLink == nil else { return }
|
||||
let link = CADisplayLink(target: self, selector: #selector(displayLinkTick))
|
||||
link.add(to: .main, forMode: .common)
|
||||
displayLink = link
|
||||
}
|
||||
|
||||
private func stopDisplayLink() {
|
||||
displayLink?.invalidate()
|
||||
displayLink = nil
|
||||
}
|
||||
|
||||
@objc private func displayLinkTick() {
|
||||
guard let player = audioPlayer else { return }
|
||||
if !player.isPlaying && isPlaying {
|
||||
stopPlayback()
|
||||
return
|
||||
}
|
||||
if player.currentTime >= trimEnd {
|
||||
stopPlayback()
|
||||
return
|
||||
}
|
||||
let progress = duration > 0 ? player.currentTime / duration : 0
|
||||
waveformView.progress = CGFloat(progress)
|
||||
updateDurationLabel(remainingFromPlayer())
|
||||
}
|
||||
|
||||
// MARK: - Trim / Scrub
|
||||
|
||||
@objc private func handleWaveformPan(_ gesture: UIPanGestureRecognizer) {
|
||||
guard duration > 0, waveformContainer.bounds.width > 1 else { return }
|
||||
let location = gesture.location(in: waveformContainer)
|
||||
let normalized = min(1, max(0, location.x / waveformContainer.bounds.width))
|
||||
let targetTime = TimeInterval(normalized) * duration
|
||||
|
||||
switch gesture.state {
|
||||
case .began:
|
||||
let leftX = xForTime(trimStart)
|
||||
let rightX = xForTime(trimEnd)
|
||||
if abs(location.x - leftX) <= 14 {
|
||||
activePanMode = .trimLeft
|
||||
} else if abs(location.x - rightX) <= 14 {
|
||||
activePanMode = .trimRight
|
||||
} else {
|
||||
activePanMode = .scrub
|
||||
}
|
||||
if activePanMode != .scrub {
|
||||
pausePlayback()
|
||||
}
|
||||
case .changed:
|
||||
switch activePanMode {
|
||||
case .trimLeft:
|
||||
trimStart = min(max(0, targetTime), trimEnd - minTrimDuration)
|
||||
if let player = audioPlayer, player.currentTime < trimStart {
|
||||
player.currentTime = trimStart
|
||||
}
|
||||
case .trimRight:
|
||||
trimEnd = max(min(duration, targetTime), trimStart + minTrimDuration)
|
||||
if let player = audioPlayer, player.currentTime > trimEnd {
|
||||
player.currentTime = trimEnd
|
||||
}
|
||||
case .scrub:
|
||||
let clamped = min(trimEnd, max(trimStart, targetTime))
|
||||
if audioPlayer == nil {
|
||||
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
|
||||
audioPlayer?.prepareToPlay()
|
||||
}
|
||||
audioPlayer?.currentTime = clamped
|
||||
waveformView.progress = CGFloat(clamped / duration)
|
||||
case .none:
|
||||
break
|
||||
}
|
||||
updateTrimVisuals()
|
||||
if activePanMode == .scrub {
|
||||
updateDurationLabel(max(0, trimEnd - (audioPlayer?.currentTime ?? trimStart)))
|
||||
} else {
|
||||
updateDurationLabel(trimEnd - trimStart)
|
||||
}
|
||||
default:
|
||||
activePanMode = nil
|
||||
}
|
||||
}
|
||||
|
||||
private func updateTrimVisuals() {
|
||||
let h = waveformContainer.bounds.height
|
||||
let w = waveformContainer.bounds.width
|
||||
guard w > 0 else { return }
|
||||
|
||||
let startX = xForTime(trimStart)
|
||||
let endX = xForTime(trimEnd)
|
||||
leftTrimMask.frame = CGRect(x: 0, y: 0, width: max(0, startX), height: h)
|
||||
rightTrimMask.frame = CGRect(x: min(w, endX), y: 0, width: max(0, w - endX), height: h)
|
||||
|
||||
let handleW: CGFloat = 4
|
||||
leftTrimHandle.frame = CGRect(x: max(0, startX - handleW / 2), y: 0, width: handleW, height: h)
|
||||
rightTrimHandle.frame = CGRect(x: min(w - handleW, endX - handleW / 2), y: 0, width: handleW, height: h)
|
||||
}
|
||||
|
||||
private func xForTime(_ time: TimeInterval) -> CGFloat {
|
||||
guard duration > 0 else { return 0 }
|
||||
let normalized = min(1, max(0, time / duration))
|
||||
return CGFloat(normalized) * waveformContainer.bounds.width
|
||||
}
|
||||
|
||||
private func remainingFromPlayer() -> TimeInterval {
|
||||
guard let player = audioPlayer else { return trimEnd - trimStart }
|
||||
return max(0, trimEnd - player.currentTime)
|
||||
}
|
||||
|
||||
// MARK: - Duration Formatting
|
||||
|
||||
private func updateDurationLabel(_ time: TimeInterval) {
|
||||
let totalSeconds = Int(max(0, time))
|
||||
let minutes = totalSeconds / 60
|
||||
let seconds = totalSeconds % 60
|
||||
durationLabel.text = String(format: "%d:%02d", minutes, seconds)
|
||||
}
|
||||
|
||||
// MARK: - Actions
|
||||
|
||||
@objc private func deleteTapped() {
|
||||
stopPlayback()
|
||||
delegate?.previewPanelDidTapDelete(self)
|
||||
}
|
||||
|
||||
@objc private func recordMoreTapped() {
|
||||
stopPlayback(resetToTrimStart: false)
|
||||
delegate?.previewPanelDidTapRecordMore(self)
|
||||
}
|
||||
|
||||
@objc private func sendTapped() {
|
||||
stopPlayback(resetToTrimStart: false)
|
||||
delegate?.previewPanelDidTapSend(self, trimRange: selectedTrimRange)
|
||||
}
|
||||
|
||||
// MARK: - Animate In
|
||||
|
||||
func animateIn() {
|
||||
alpha = 0
|
||||
transform = CGAffineTransform(scaleX: 0.95, y: 0.95)
|
||||
UIView.animate(withDuration: 0.25, delay: 0, usingSpringWithDamping: 0.8, initialSpringVelocity: 0) {
|
||||
self.alpha = 1
|
||||
self.transform = .identity
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Animate Out
|
||||
|
||||
func animateOut(completion: (() -> Void)? = nil) {
|
||||
stopPlayback()
|
||||
UIView.animate(withDuration: 0.15, animations: {
|
||||
self.alpha = 0
|
||||
self.transform = CGAffineTransform(scaleX: 0.95, y: 0.95)
|
||||
}, completion: { _ in
|
||||
self.removeFromSuperview()
|
||||
completion?()
|
||||
})
|
||||
}
|
||||
|
||||
deinit {
|
||||
stopDisplayLink()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import CoreGraphics
|
||||
import UIKit
|
||||
|
||||
enum VoiceRecordingFlowState: Equatable {
|
||||
case idle
|
||||
case armed
|
||||
case recordingUnlocked
|
||||
case recordingLocked
|
||||
case waitingForPreview
|
||||
case draftPreview
|
||||
}
|
||||
|
||||
final class VoiceSendTransitionSource {
|
||||
let snapshotView: UIView
|
||||
let sourceFrameInWindow: CGRect
|
||||
let cornerRadius: CGFloat
|
||||
|
||||
init(snapshotView: UIView, sourceFrameInWindow: CGRect, cornerRadius: CGFloat) {
|
||||
self.snapshotView = snapshotView
|
||||
self.sourceFrameInWindow = sourceFrameInWindow
|
||||
self.cornerRadius = cornerRadius
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import ObjectiveC
|
||||
import QuartzCore
|
||||
import UIKit
|
||||
|
||||
@@ -36,12 +37,8 @@ final class VoiceRecordingOverlay {
|
||||
private var currentLevel: CGFloat = 0
|
||||
private var inputLevel: CGFloat = 0
|
||||
|
||||
var dismissFactor: CGFloat = 1.0 {
|
||||
didSet {
|
||||
let s = max(0.3, min(dismissFactor, 1.0))
|
||||
containerView.transform = CGAffineTransform(scaleX: s, y: s)
|
||||
}
|
||||
}
|
||||
private var isLocked = false
|
||||
private var onTapStop: (() -> Void)?
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
@@ -67,6 +64,11 @@ final class VoiceRecordingOverlay {
|
||||
containerView.layer.addSublayer(micIconLayer)
|
||||
}
|
||||
|
||||
deinit {
|
||||
displayLink?.invalidate()
|
||||
containerView.removeFromSuperview()
|
||||
}
|
||||
|
||||
private func configureMicIcon() {
|
||||
let viewBox = CGSize(width: 17.168, height: 23.555)
|
||||
let targetSize = CGSize(width: 25, height: 34)
|
||||
@@ -138,11 +140,107 @@ final class VoiceRecordingOverlay {
|
||||
startDisplayLink()
|
||||
}
|
||||
|
||||
// MARK: - Lock Transition (mic → stop icon, tappable)
|
||||
|
||||
/// Transition to locked state: mic icon → stop icon, overlay becomes tappable.
|
||||
/// Telegram: TGModernConversationInputMicButton.m line 616-693
|
||||
func transitionToLocked(onTapStop: @escaping () -> Void) {
|
||||
isLocked = true
|
||||
self.onTapStop = onTapStop
|
||||
containerView.isUserInteractionEnabled = true
|
||||
|
||||
// Add tap gesture via helper target
|
||||
let tapTarget = TapTarget { onTapStop() }
|
||||
let tap = UITapGestureRecognizer(target: tapTarget, action: #selector(TapTarget.tapped))
|
||||
containerView.addGestureRecognizer(tap)
|
||||
// Retain the target via associated object
|
||||
objc_setAssociatedObject(containerView, "tapTarget", tapTarget, .OBJC_ASSOCIATION_RETAIN)
|
||||
|
||||
// Reset drag transforms: scale back to 1.0, position to center
|
||||
UIView.animate(withDuration: 0.3, delay: 0, options: [.curveEaseInOut]) {
|
||||
self.innerCircle.transform = .identity
|
||||
self.outerCircle.transform = CGAffineTransform(
|
||||
scaleX: self.outerMinScale, y: self.outerMinScale
|
||||
)
|
||||
}
|
||||
|
||||
// Transition icon: mic → stop (two vertical bars)
|
||||
transitionToStopIcon()
|
||||
}
|
||||
|
||||
/// Animate mic icon → stop icon (Telegram: snapshot + cross-fade, 0.3s)
|
||||
private func transitionToStopIcon() {
|
||||
// Create stop icon path (two parallel vertical bars, Telegram exact)
|
||||
let stopPath = UIBezierPath()
|
||||
let barW: CGFloat = 4
|
||||
let barH: CGFloat = 16
|
||||
let gap: CGFloat = 6
|
||||
let totalW = barW * 2 + gap
|
||||
let originX = -totalW / 2
|
||||
let originY = -barH / 2
|
||||
// Left bar
|
||||
stopPath.append(UIBezierPath(
|
||||
roundedRect: CGRect(x: originX, y: originY, width: barW, height: barH),
|
||||
cornerRadius: 1
|
||||
))
|
||||
// Right bar
|
||||
stopPath.append(UIBezierPath(
|
||||
roundedRect: CGRect(x: originX + barW + gap, y: originY, width: barW, height: barH),
|
||||
cornerRadius: 1
|
||||
))
|
||||
|
||||
// Animate: old icon scales down, new icon scales up
|
||||
let newIconLayer = CAShapeLayer()
|
||||
newIconLayer.path = stopPath.cgPath
|
||||
newIconLayer.fillColor = UIColor.white.cgColor
|
||||
let mid = CGPoint(x: outerDiameter / 2, y: outerDiameter / 2)
|
||||
newIconLayer.position = mid
|
||||
newIconLayer.transform = CATransform3DMakeScale(0.3, 0.3, 1)
|
||||
newIconLayer.opacity = 0
|
||||
containerView.layer.addSublayer(newIconLayer)
|
||||
|
||||
// Old mic icon scales to 0
|
||||
let shrink = CABasicAnimation(keyPath: "transform.scale")
|
||||
shrink.toValue = 0.001
|
||||
shrink.duration = 0.3
|
||||
shrink.fillMode = .forwards
|
||||
shrink.isRemovedOnCompletion = false
|
||||
micIconLayer.add(shrink, forKey: "shrink")
|
||||
|
||||
let fadeOut = CABasicAnimation(keyPath: "opacity")
|
||||
fadeOut.toValue = 0
|
||||
fadeOut.duration = 0.2
|
||||
fadeOut.fillMode = .forwards
|
||||
fadeOut.isRemovedOnCompletion = false
|
||||
micIconLayer.add(fadeOut, forKey: "fadeOutMic")
|
||||
|
||||
// New stop icon grows in
|
||||
let grow = CABasicAnimation(keyPath: "transform.scale")
|
||||
grow.fromValue = 0.3
|
||||
grow.toValue = 1.0
|
||||
grow.duration = 0.3
|
||||
grow.fillMode = .forwards
|
||||
grow.isRemovedOnCompletion = false
|
||||
newIconLayer.add(grow, forKey: "grow")
|
||||
|
||||
let fadeIn = CABasicAnimation(keyPath: "opacity")
|
||||
fadeIn.fromValue = 0
|
||||
fadeIn.toValue = 1
|
||||
fadeIn.duration = 0.25
|
||||
fadeIn.fillMode = .forwards
|
||||
fadeIn.isRemovedOnCompletion = false
|
||||
newIconLayer.add(fadeIn, forKey: "fadeInStop")
|
||||
}
|
||||
|
||||
// MARK: - Dismiss (Telegram exact: 0.18s, scale→0.2, alpha→0)
|
||||
|
||||
func dismiss() {
|
||||
stopDisplayLink()
|
||||
|
||||
// Capture containerView strongly — overlay may be deallocated before the
|
||||
// delayed cleanup fires (ComposerView sets recordingOverlay = nil immediately).
|
||||
let container = containerView
|
||||
|
||||
UIView.animate(withDuration: 0.18, animations: {
|
||||
self.innerCircle.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
|
||||
self.innerCircle.alpha = 0
|
||||
@@ -158,12 +256,34 @@ final class VoiceRecordingOverlay {
|
||||
iconFade.isRemovedOnCompletion = false
|
||||
micIconLayer.add(iconFade, forKey: "fadeOut")
|
||||
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) { [weak self] in
|
||||
self?.containerView.removeFromSuperview()
|
||||
self?.micIconLayer.removeAllAnimations()
|
||||
self?.micIconLayer.opacity = 1
|
||||
self?.currentLevel = 0
|
||||
self?.inputLevel = 0
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
|
||||
container.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
/// Cancel-specific dismiss: leftward translation matching cancel drag direction.
|
||||
func dismissCancel() {
|
||||
stopDisplayLink()
|
||||
|
||||
let container = containerView
|
||||
|
||||
UIView.animate(withDuration: 0.18, animations: {
|
||||
self.innerCircle.transform = CGAffineTransform(translationX: -80, y: 0)
|
||||
.scaledBy(x: 0.2, y: 0.2)
|
||||
self.innerCircle.alpha = 0
|
||||
self.outerCircle.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
|
||||
self.outerCircle.alpha = 0
|
||||
})
|
||||
|
||||
let iconFade = CABasicAnimation(keyPath: "opacity")
|
||||
iconFade.toValue = 0.0
|
||||
iconFade.duration = 0.18
|
||||
iconFade.fillMode = .forwards
|
||||
iconFade.isRemovedOnCompletion = false
|
||||
micIconLayer.add(iconFade, forKey: "fadeOut")
|
||||
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
|
||||
container.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,6 +293,48 @@ final class VoiceRecordingOverlay {
|
||||
inputLevel = level
|
||||
}
|
||||
|
||||
// MARK: - Drag Transforms (Telegram: displayLinkUpdate lines 921-936)
|
||||
|
||||
/// Apply drag-based transforms to overlay circles.
|
||||
/// distanceX: negative = sliding left (cancel), distanceY: negative = sliding up (lock)
|
||||
/// Telegram exact from TGModernConversationInputMicButton.m
|
||||
func applyDragTransform(distanceX: CGFloat, distanceY: CGFloat) {
|
||||
guard CACurrentMediaTime() > animationStartTime else { return }
|
||||
|
||||
// Telegram cancel-transform threshold: 8pt
|
||||
guard abs(distanceX) > 8 || abs(distanceY) > 8 else { return }
|
||||
|
||||
// Telegram line 763: normalize to 0..1 over 300pt range
|
||||
let valueX = max(0, min(1, abs(distanceX) / 300))
|
||||
|
||||
// Telegram line 768: inner scale squeezes from 1.0 → 0.4
|
||||
let innerScale = max(0.4, min(1.0, 1.0 - valueX))
|
||||
|
||||
// Vertical translation (follows finger)
|
||||
let translation = CGAffineTransform(translationX: 0, y: distanceY)
|
||||
|
||||
// Telegram line 922-924: outer circle = translation + audio scale
|
||||
let outerScale = outerMinScale + currentLevel * (1.0 - outerMinScale)
|
||||
outerCircle.transform = translation.scaledBy(x: outerScale, y: outerScale)
|
||||
|
||||
// Telegram line 931-932: inner circle = translation + cancel scale + horizontal offset
|
||||
let innerTransform = translation
|
||||
.scaledBy(x: innerScale, y: innerScale)
|
||||
.translatedBy(x: distanceX, y: 0)
|
||||
innerCircle.transform = innerTransform
|
||||
|
||||
// Icon follows inner circle
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
let mid = CGPoint(x: outerDiameter / 2, y: outerDiameter / 2)
|
||||
micIconLayer.position = CGPoint(
|
||||
x: mid.x + distanceX * innerScale,
|
||||
y: mid.y + distanceY
|
||||
)
|
||||
micIconLayer.transform = CATransform3DMakeScale(innerScale, innerScale, 1)
|
||||
CATransaction.commit()
|
||||
}
|
||||
|
||||
// MARK: - Display Link (Telegram: displayLinkEvent, 0.8/0.2 smoothing)
|
||||
|
||||
private func startDisplayLink() {
|
||||
@@ -194,8 +356,8 @@ final class VoiceRecordingOverlay {
|
||||
// Telegram: wait 0.5s for spring to settle before reacting to audio
|
||||
guard CACurrentMediaTime() > animationStartTime + 0.5 else { return }
|
||||
|
||||
// Telegram exact smoothing (ChatTextInputAudioRecordingOverlay line 162)
|
||||
currentLevel = currentLevel * 0.8 + inputLevel * 0.2
|
||||
// Telegram exact: TGModernConversationInputMicButton.m line 916 (0.9/0.1)
|
||||
currentLevel = currentLevel * 0.9 + inputLevel * 0.1
|
||||
|
||||
// Telegram exact: outerCircleMinScale + currentLevel * (1.0 - outerCircleMinScale)
|
||||
let scale = outerMinScale + currentLevel * (1.0 - outerMinScale)
|
||||
@@ -210,3 +372,11 @@ private final class DisplayLinkTarget: NSObject {
|
||||
init(_ callback: @escaping () -> Void) { self.callback = callback }
|
||||
@objc func tick() { callback() }
|
||||
}
|
||||
|
||||
// MARK: - TapTarget
|
||||
|
||||
private final class TapTarget: NSObject {
|
||||
let callback: () -> Void
|
||||
init(_ callback: @escaping () -> Void) { self.callback = callback }
|
||||
@objc func tapped() { callback() }
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ final class VoiceRecordingPanel: UIView {
|
||||
|
||||
// MARK: - Subviews
|
||||
|
||||
// Glass background
|
||||
// Glass background (matches input container style)
|
||||
private let glassBackground = TelegramGlassUIView(frame: .zero)
|
||||
|
||||
// Red dot (10×10, #FF2D55)
|
||||
@@ -42,11 +42,12 @@ final class VoiceRecordingPanel: UIView {
|
||||
// MARK: - State
|
||||
|
||||
private(set) var isDisplayingCancel = false
|
||||
private var isEntryAnimationComplete = false
|
||||
|
||||
// MARK: - Telegram-exact layout constants
|
||||
|
||||
private let dotX: CGFloat = 16
|
||||
private let timerX: CGFloat = 34
|
||||
private let dotX: CGFloat = 5 // Telegram: indicator X=5
|
||||
private let timerX: CGFloat = 40 // Telegram: timer X=40
|
||||
private let dotSize: CGFloat = 10
|
||||
private let arrowLabelGap: CGFloat = 6
|
||||
|
||||
@@ -76,23 +77,29 @@ final class VoiceRecordingPanel: UIView {
|
||||
redDot.layer.cornerRadius = dotSize / 2
|
||||
addSubview(redDot)
|
||||
|
||||
// Timer: 15pt monospaced
|
||||
timerLabel.font = .monospacedDigitSystemFont(ofSize: 15, weight: .regular)
|
||||
// Timer: 15pt monospaced rounded (Telegram: Font.with(size: 15, design: .camera))
|
||||
if let descriptor = UIFont.systemFont(ofSize: 15, weight: .regular)
|
||||
.fontDescriptor.withDesign(.rounded) {
|
||||
timerLabel.font = UIFont(descriptor: descriptor, size: 15)
|
||||
} else {
|
||||
timerLabel.font = .monospacedDigitSystemFont(ofSize: 15, weight: .regular)
|
||||
}
|
||||
timerLabel.textColor = .white
|
||||
timerLabel.text = "0:00"
|
||||
addSubview(timerLabel)
|
||||
|
||||
// Arrow icon (template, white 30% alpha like panelControlColor on dark)
|
||||
let arrowConfig = UIImage.SymbolConfiguration(pointSize: 11, weight: .semibold)
|
||||
arrowIcon.image = UIImage(systemName: "chevron.left", withConfiguration: arrowConfig)
|
||||
arrowIcon.tintColor = UIColor.white.withAlphaComponent(0.4)
|
||||
// Arrow: exact Telegram SVG "AudioRecordingCancelArrow" (arrowleft.svg, 9×18pt)
|
||||
arrowIcon.image = Self.makeCancelArrowImage()
|
||||
arrowIcon.contentMode = .center
|
||||
cancelContainer.addSubview(arrowIcon)
|
||||
|
||||
// "Slide to cancel" label: 14pt regular
|
||||
// "Slide to cancel": 14pt regular, panelControlColor = #FFFFFF (dark theme)
|
||||
slideLabel.font = .systemFont(ofSize: 14, weight: .regular)
|
||||
slideLabel.textColor = UIColor.white.withAlphaComponent(0.4)
|
||||
slideLabel.textColor = .white
|
||||
slideLabel.text = "Slide to cancel"
|
||||
cancelContainer.addSubview(slideLabel)
|
||||
cancelContainer.isAccessibilityElement = true
|
||||
cancelContainer.accessibilityLabel = "Slide left to cancel recording"
|
||||
addSubview(cancelContainer)
|
||||
|
||||
// Cancel button (for locked state): 17pt
|
||||
@@ -100,6 +107,9 @@ final class VoiceRecordingPanel: UIView {
|
||||
cancelButton.setTitleColor(.white, for: .normal)
|
||||
cancelButton.titleLabel?.font = .systemFont(ofSize: 17, weight: .regular)
|
||||
cancelButton.addTarget(self, action: #selector(cancelTapped), for: .touchUpInside)
|
||||
cancelButton.isAccessibilityElement = true
|
||||
cancelButton.accessibilityLabel = "Cancel recording"
|
||||
cancelButton.accessibilityHint = "Discards the current recording."
|
||||
cancelButton.alpha = 0
|
||||
addSubview(cancelButton)
|
||||
}
|
||||
@@ -129,16 +139,19 @@ final class VoiceRecordingPanel: UIView {
|
||||
// Timer: at X=34
|
||||
timerLabel.frame = CGRect(x: timerX, y: timerY, width: timerSize.width + 4, height: timerSize.height)
|
||||
|
||||
// Cancel indicator: centered in available width
|
||||
// Cancel indicator: centered in full panel width
|
||||
// Telegram: frame.width = arrowSize.width + 12.0 + labelLayout.size.width
|
||||
let labelSize = slideLabel.sizeThatFits(CGSize(width: 200, height: h))
|
||||
let arrowW: CGFloat = 12
|
||||
let totalCancelW = arrowW + arrowLabelGap + labelSize.width
|
||||
let arrowW: CGFloat = 9 // Telegram SVG: 9pt wide
|
||||
let arrowH: CGFloat = 18 // Telegram SVG: 18pt tall
|
||||
let totalCancelW = arrowW + 12 + labelSize.width // Telegram: arrowWidth + 12 + labelWidth
|
||||
let cancelX = floor((w - totalCancelW) / 2)
|
||||
|
||||
cancelContainer.frame = CGRect(x: cancelX, y: 0, width: totalCancelW, height: h)
|
||||
arrowIcon.frame = CGRect(x: 0, y: floor((h - 12) / 2), width: arrowW, height: 12)
|
||||
arrowIcon.frame = CGRect(x: 0, y: floor((h - arrowH) / 2), width: arrowW, height: arrowH)
|
||||
// Telegram: label X = arrowSize.width + 6.0
|
||||
slideLabel.frame = CGRect(
|
||||
x: arrowW + arrowLabelGap,
|
||||
x: arrowW + 6,
|
||||
y: 1 + floor((h - labelSize.height) / 2),
|
||||
width: labelSize.width,
|
||||
height: labelSize.height
|
||||
@@ -163,16 +176,18 @@ final class VoiceRecordingPanel: UIView {
|
||||
/// Updates cancel indicator position based on horizontal drag.
|
||||
/// translation is negative (finger sliding left).
|
||||
func updateCancelTranslation(_ translation: CGFloat) {
|
||||
guard !isDisplayingCancel else { return }
|
||||
guard !isDisplayingCancel, isEntryAnimationComplete else { return }
|
||||
|
||||
// Telegram: indicatorTranslation = max(0, cancelTranslation - 8)
|
||||
let offset = max(0, abs(translation) - 8)
|
||||
// Only apply transform when actually dragging (threshold 8pt)
|
||||
let drag = abs(translation)
|
||||
guard drag > 8 else { return }
|
||||
|
||||
let offset = drag - 8
|
||||
cancelContainer.transform = CGAffineTransform(translationX: -offset * 0.5, y: 0)
|
||||
|
||||
// Telegram: alpha = max(0, min(1, (frameMinX - 100) / 10))
|
||||
let minX = cancelContainer.frame.minX - offset * 0.5
|
||||
let alpha = max(0, min(1, (minX - 100) / 10))
|
||||
cancelContainer.alpha = alpha
|
||||
// Fade: starts at 60% of cancel threshold (90pt drag), fully hidden at threshold
|
||||
let fadeProgress = max(0, min(1, (drag - 90) / 60))
|
||||
cancelContainer.alpha = 1 - fadeProgress
|
||||
}
|
||||
|
||||
/// Animate panel in. Called when recording begins.
|
||||
@@ -195,7 +210,7 @@ final class VoiceRecordingPanel: UIView {
|
||||
timerLabel.alpha = 0
|
||||
let timerStartX = timerLabel.frame.origin.x - 30
|
||||
timerLabel.transform = CGAffineTransform(translationX: -30, y: 0)
|
||||
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 0.65, initialSpringVelocity: 0, options: []) {
|
||||
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 0.55, initialSpringVelocity: 0, options: []) {
|
||||
self.timerLabel.alpha = 1
|
||||
self.timerLabel.transform = .identity
|
||||
}
|
||||
@@ -203,17 +218,18 @@ final class VoiceRecordingPanel: UIView {
|
||||
// Cancel indicator: slide in from right, spring 0.4s (Telegram exact)
|
||||
cancelContainer.alpha = 1
|
||||
cancelContainer.transform = CGAffineTransform(translationX: panelWidth * 0.3, y: 0)
|
||||
UIView.animate(withDuration: 0.4, delay: 0, usingSpringWithDamping: 0.65, initialSpringVelocity: 0, options: []) {
|
||||
UIView.animate(withDuration: 0.4, delay: 0, usingSpringWithDamping: 0.55, initialSpringVelocity: 0, options: []) {
|
||||
self.cancelContainer.transform = .identity
|
||||
}
|
||||
|
||||
// Start jiggle after cancel slides in (Telegram: 6pt, 1.0s, easeInOut, infinite)
|
||||
// Mark entry animation complete + start jiggle after spring settles
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
|
||||
self?.isEntryAnimationComplete = true
|
||||
self?.startCancelJiggle()
|
||||
}
|
||||
}
|
||||
|
||||
/// Animate panel out. Called when recording ends.
|
||||
/// Animate panel out. Called when recording ends normally (send).
|
||||
func animateOut(completion: (() -> Void)? = nil) {
|
||||
stopDotPulsing()
|
||||
stopCancelJiggle()
|
||||
@@ -231,6 +247,43 @@ final class VoiceRecordingPanel: UIView {
|
||||
})
|
||||
}
|
||||
|
||||
/// Cancel-specific dismiss: red dot "trash" animation + timer slide left.
|
||||
/// Telegram: MediaRecordingPanelComponent animateOut(dismissRecording:)
|
||||
func animateOutCancel(completion: (() -> Void)? = nil) {
|
||||
stopDotPulsing()
|
||||
stopCancelJiggle()
|
||||
|
||||
// Red dot: scale pulse 1→1.3→0, color red→gray
|
||||
UIView.animate(withDuration: 0.15, animations: {
|
||||
self.redDot.transform = CGAffineTransform(scaleX: 1.3, y: 1.3)
|
||||
self.redDot.backgroundColor = .gray
|
||||
}, completion: { _ in
|
||||
UIView.animate(withDuration: 0.15, animations: {
|
||||
self.redDot.transform = CGAffineTransform(scaleX: 0.01, y: 0.01)
|
||||
self.redDot.alpha = 0
|
||||
})
|
||||
})
|
||||
|
||||
// Timer: scale to 0, slide left
|
||||
UIView.animate(withDuration: 0.25) {
|
||||
self.timerLabel.transform = CGAffineTransform(translationX: -30, y: 0)
|
||||
.scaledBy(x: 0.001, y: 0.001)
|
||||
self.timerLabel.alpha = 0
|
||||
}
|
||||
|
||||
// Cancel indicator: fade out
|
||||
UIView.animate(withDuration: 0.25) {
|
||||
self.cancelContainer.alpha = 0
|
||||
self.cancelButton.alpha = 0
|
||||
}
|
||||
|
||||
// Remove after animation completes
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { [weak self] in
|
||||
self?.removeFromSuperview()
|
||||
completion?()
|
||||
}
|
||||
}
|
||||
|
||||
/// Transition cancel indicator to "Cancel" button (locked state).
|
||||
/// Telegram: arrow+label shrink up (-22pt, scale 0.25), button grows down.
|
||||
func showCancelButton() {
|
||||
@@ -291,4 +344,49 @@ final class VoiceRecordingPanel: UIView {
|
||||
@objc private func cancelTapped() {
|
||||
delegate?.recordingPanelDidTapCancel(self)
|
||||
}
|
||||
|
||||
// MARK: - Telegram Cancel Arrow (exact SVG from arrowleft.svg, 9×18pt)
|
||||
|
||||
private static func makeCancelArrowImage() -> UIImage {
|
||||
let size = CGSize(width: 9, height: 18)
|
||||
let renderer = UIGraphicsImageRenderer(size: size)
|
||||
return renderer.image { ctx in
|
||||
let path = UIBezierPath()
|
||||
// Exact path from Telegram's arrowleft.svg
|
||||
path.move(to: CGPoint(x: 8.438, y: 0.500))
|
||||
path.addCurve(
|
||||
to: CGPoint(x: 8.500, y: 1.438),
|
||||
controlPoint1: CGPoint(x: 8.714, y: 0.741),
|
||||
controlPoint2: CGPoint(x: 8.742, y: 1.161)
|
||||
)
|
||||
path.addLine(to: CGPoint(x: 1.884, y: 9.000))
|
||||
path.addLine(to: CGPoint(x: 8.500, y: 16.562))
|
||||
path.addCurve(
|
||||
to: CGPoint(x: 8.438, y: 17.500),
|
||||
controlPoint1: CGPoint(x: 8.742, y: 16.839),
|
||||
controlPoint2: CGPoint(x: 8.714, y: 17.259)
|
||||
)
|
||||
path.addCurve(
|
||||
to: CGPoint(x: 7.500, y: 17.438),
|
||||
controlPoint1: CGPoint(x: 8.161, y: 17.742),
|
||||
controlPoint2: CGPoint(x: 7.741, y: 17.714)
|
||||
)
|
||||
path.addLine(to: CGPoint(x: 0.499, y: 9.438))
|
||||
path.addCurve(
|
||||
to: CGPoint(x: 0.499, y: 8.562),
|
||||
controlPoint1: CGPoint(x: 0.280, y: 9.187),
|
||||
controlPoint2: CGPoint(x: 0.280, y: 8.813)
|
||||
)
|
||||
path.addLine(to: CGPoint(x: 7.500, y: 0.562))
|
||||
path.addCurve(
|
||||
to: CGPoint(x: 8.438, y: 0.500),
|
||||
controlPoint1: CGPoint(x: 7.741, y: 0.286),
|
||||
controlPoint2: CGPoint(x: 8.161, y: 0.258)
|
||||
)
|
||||
path.close()
|
||||
|
||||
UIColor.white.setFill()
|
||||
path.fill()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ struct MainTabView: View {
|
||||
.tag(RosettaTab.chats)
|
||||
.badge(cachedUnreadCount)
|
||||
|
||||
SettingsView(onLogout: onLogout, onAddAccount: handleAddAccount, isEditingProfile: $isSettingsEditPresented, isDetailPresented: $isSettingsDetailPresented)
|
||||
SettingsContainerView(onLogout: onLogout, onAddAccount: handleAddAccount, isEditingProfile: $isSettingsEditPresented, isDetailPresented: $isSettingsDetailPresented)
|
||||
.tabItem {
|
||||
Label(RosettaTab.settings.label, systemImage: RosettaTab.settings.icon)
|
||||
}
|
||||
@@ -216,7 +216,7 @@ struct MainTabView: View {
|
||||
CallsView()
|
||||
.callBarSafeAreaInset(callBarTopPadding)
|
||||
case .settings:
|
||||
SettingsView(onLogout: onLogout, onAddAccount: handleAddAccount, isEditingProfile: $isSettingsEditPresented, isDetailPresented: $isSettingsDetailPresented)
|
||||
SettingsContainerView(onLogout: onLogout, onAddAccount: handleAddAccount, isEditingProfile: $isSettingsEditPresented, isDetailPresented: $isSettingsDetailPresented)
|
||||
.callBarSafeAreaInset(callBarTopPadding)
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -35,7 +35,7 @@ struct SettingsProfileHeader: View {
|
||||
)
|
||||
.background(RosettaColors.Adaptive.background)
|
||||
.opacity(1 - scrollProgress)
|
||||
.blur(radius: scrollProgress * 10, opaque: true)
|
||||
.blur(radius: scrollProgress * 10)
|
||||
.clipShape(Circle())
|
||||
.anchorPreference(key: AnchorKey.self, value: .bounds) {
|
||||
["HEADER": $0]
|
||||
|
||||
@@ -148,10 +148,70 @@ struct SettingsView: View {
|
||||
}
|
||||
}
|
||||
|
||||
// Toolbar OUTSIDE NavigationStack — above hidden nav bar
|
||||
// Toolbar OUTSIDE NavigationStack — sits in safe area naturally
|
||||
if !isDetailPresented {
|
||||
settingsToolbarOverlay(safeArea: viewSafeArea)
|
||||
.ignoresSafeArea(.all, edges: .top)
|
||||
HStack {
|
||||
if isEditingProfile {
|
||||
Button {
|
||||
pendingAvatarPhoto = nil
|
||||
withAnimation(.easeInOut(duration: 0.2)) {
|
||||
isEditingProfile = false
|
||||
}
|
||||
} label: {
|
||||
Text("Cancel")
|
||||
.font(.system(size: 17, weight: .medium))
|
||||
.foregroundStyle(RosettaColors.Adaptive.text)
|
||||
.frame(height: 44)
|
||||
.padding(.horizontal, 10)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.glassCapsule()
|
||||
.disabled(isSaving)
|
||||
} else {
|
||||
DarkModeButton()
|
||||
.glassCircle()
|
||||
}
|
||||
|
||||
Spacer()
|
||||
|
||||
if isEditingProfile {
|
||||
Button {
|
||||
saveProfile()
|
||||
} label: {
|
||||
Text("Done")
|
||||
.font(.system(size: 17, weight: .semibold))
|
||||
.foregroundStyle(
|
||||
hasProfileChanges
|
||||
? RosettaColors.Adaptive.text
|
||||
: RosettaColors.Adaptive.text.opacity(0.4)
|
||||
)
|
||||
.frame(height: 44)
|
||||
.padding(.horizontal, 10)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.glassCapsule()
|
||||
} else {
|
||||
Button {
|
||||
editDisplayName = viewModel.displayName
|
||||
editUsername = viewModel.username
|
||||
displayNameError = nil
|
||||
usernameError = nil
|
||||
pendingAvatarPhoto = nil
|
||||
withAnimation(.easeInOut(duration: 0.2)) {
|
||||
isEditingProfile = true
|
||||
}
|
||||
} label: {
|
||||
Text("Edit")
|
||||
.font(.system(size: 17, weight: .medium))
|
||||
.foregroundStyle(RosettaColors.Adaptive.text)
|
||||
.frame(height: 44)
|
||||
.padding(.horizontal, 10)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.glassCapsule()
|
||||
}
|
||||
}
|
||||
.padding(.horizontal, 15)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -180,48 +240,6 @@ struct SettingsView: View {
|
||||
.transition(.opacity)
|
||||
}
|
||||
}
|
||||
.backgroundPreferenceValue(AnchorKey.self) { pref in
|
||||
GeometryReader { proxy in
|
||||
if let anchor = pref["HEADER"], isHavingNotch {
|
||||
let frameRect = proxy[anchor]
|
||||
let isHavingDynamicIsland = safeArea.top > 51
|
||||
let capsuleHeight: CGFloat = isHavingDynamicIsland ? 37 : (safeArea.top - 15)
|
||||
|
||||
Canvas { out, canvasSize in
|
||||
out.addFilter(.alphaThreshold(min: 0.5))
|
||||
out.addFilter(.blur(radius: 12))
|
||||
out.drawLayer { ctx in
|
||||
if let headerView = out.resolveSymbol(id: 0) {
|
||||
ctx.draw(headerView, in: frameRect)
|
||||
}
|
||||
if let dynamicIsland = out.resolveSymbol(id: 1) {
|
||||
let rect = CGRect(
|
||||
x: (canvasSize.width - 120) / 2,
|
||||
y: isHavingDynamicIsland ? 11 : 0,
|
||||
width: 120,
|
||||
height: capsuleHeight
|
||||
)
|
||||
ctx.draw(dynamicIsland, in: rect)
|
||||
}
|
||||
}
|
||||
} symbols: {
|
||||
Circle()
|
||||
.fill(.black)
|
||||
.frame(width: frameRect.width, height: frameRect.height)
|
||||
.tag(0).id(0)
|
||||
Capsule()
|
||||
.fill(.black)
|
||||
.frame(width: 120, height: capsuleHeight)
|
||||
.tag(1).id(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
.overlay(alignment: .top) {
|
||||
Rectangle()
|
||||
.fill(RosettaColors.Adaptive.background)
|
||||
.frame(height: 15)
|
||||
}
|
||||
}
|
||||
.coordinateSpace(name: "SETTINGS_SCROLL")
|
||||
}
|
||||
|
||||
|
||||
681
Rosetta/Features/Settings/SettingsViewController.swift
Normal file
681
Rosetta/Features/Settings/SettingsViewController.swift
Normal file
@@ -0,0 +1,681 @@
|
||||
import SwiftUI
|
||||
import UIKit
|
||||
|
||||
// MARK: - SettingsViewController
|
||||
|
||||
/// Pure UIKit Settings screen — replaces SwiftUI SettingsView.
|
||||
/// Frame-based layout (Telegram pattern), no Auto Layout.
|
||||
/// Sub-screens (Appearance, Updates, Safety, Backup) stay SwiftUI via UIHostingController.
|
||||
final class SettingsViewController: UIViewController, UIScrollViewDelegate {
|
||||
|
||||
// MARK: - Callbacks
|
||||
|
||||
var onLogout: (() -> Void)?
|
||||
var onAddAccount: ((AuthScreen) -> Void)?
|
||||
/// Reports editing/detail state to parent so tab bar hides when appropriate.
|
||||
var onEditingStateChanged: ((Bool) -> Void)?
|
||||
var onDetailStateChanged: ((Bool) -> Void)?
|
||||
|
||||
// MARK: - State
|
||||
|
||||
private let viewModel = SettingsViewModel()
|
||||
private var avatarImage: UIImage?
|
||||
private var isBiometricEnabled = false
|
||||
|
||||
// MARK: - Views
|
||||
|
||||
private let scrollView = UIScrollView()
|
||||
private let contentView = UIView()
|
||||
private let toolbarView = UIView()
|
||||
|
||||
// Profile header
|
||||
private let avatarContainer = UIView()
|
||||
private var avatarHosting: UIHostingController<AvatarView>?
|
||||
private let nameLabel = UILabel()
|
||||
private let usernameLabel = UILabel()
|
||||
private var publicKeyView: CopyableLabel?
|
||||
|
||||
// Cards
|
||||
private var accountCard: UIView?
|
||||
private var appearanceCard: UIView!
|
||||
private var updatesCard: UIView!
|
||||
private var biometricCard: UIView?
|
||||
private let biometricSwitch = UISwitch()
|
||||
private var safetyCard: UIView!
|
||||
private var footerView: UIView!
|
||||
|
||||
// Toolbar buttons (DarkModeButton is SwiftUI — uses circular reveal animation + AppStorage theme)
|
||||
private var darkModeHosting: UIHostingController<AnyView>?
|
||||
private var editHosting: UIHostingController<AnyView>?
|
||||
|
||||
// MARK: - Layout Constants
|
||||
|
||||
private let hPad: CGFloat = 16
|
||||
private let cardCornerRadius: CGFloat = 26
|
||||
private let rowHeight: CGFloat = 52
|
||||
private let iconSize: CGFloat = 26
|
||||
private let iconCornerRadius: CGFloat = 6
|
||||
private let cardFill = UIColor { traits in
|
||||
traits.userInterfaceStyle == .dark
|
||||
? UIColor(red: 28/255, green: 28/255, blue: 30/255, alpha: 1)
|
||||
: UIColor(red: 242/255, green: 242/255, blue: 247/255, alpha: 1)
|
||||
}
|
||||
|
||||
// MARK: - Lifecycle
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
navigationController?.setNavigationBarHidden(true, animated: false)
|
||||
view.backgroundColor = UIColor(RosettaColors.Adaptive.background)
|
||||
|
||||
setupScrollView()
|
||||
setupToolbar()
|
||||
setupProfileHeader()
|
||||
setupCards()
|
||||
setupFooter()
|
||||
|
||||
refresh()
|
||||
|
||||
NotificationCenter.default.addObserver(
|
||||
self, selector: #selector(profileDidUpdate),
|
||||
name: .profileDidUpdate, object: nil
|
||||
)
|
||||
}
|
||||
|
||||
override func viewWillAppear(_ animated: Bool) {
|
||||
super.viewWillAppear(animated)
|
||||
navigationController?.setNavigationBarHidden(true, animated: animated)
|
||||
onDetailStateChanged?(false)
|
||||
refresh()
|
||||
}
|
||||
|
||||
override func viewDidLayoutSubviews() {
|
||||
super.viewDidLayoutSubviews()
|
||||
layoutAll()
|
||||
}
|
||||
|
||||
@objc private func profileDidUpdate() {
|
||||
refresh()
|
||||
}
|
||||
|
||||
// MARK: - Refresh
|
||||
|
||||
private func refresh() {
|
||||
viewModel.refresh()
|
||||
avatarImage = AvatarRepository.shared.loadAvatar(publicKey: viewModel.publicKey)
|
||||
refreshBiometricState()
|
||||
updateProfileHeader()
|
||||
}
|
||||
|
||||
// MARK: - Setup Scroll View
|
||||
|
||||
private func setupScrollView() {
|
||||
scrollView.delegate = self
|
||||
scrollView.showsVerticalScrollIndicator = false
|
||||
scrollView.alwaysBounceVertical = true
|
||||
scrollView.backgroundColor = .clear
|
||||
view.addSubview(scrollView)
|
||||
|
||||
contentView.backgroundColor = .clear
|
||||
scrollView.addSubview(contentView)
|
||||
}
|
||||
|
||||
// MARK: - Setup Toolbar
|
||||
|
||||
private func setupToolbar() {
|
||||
toolbarView.backgroundColor = .clear
|
||||
view.addSubview(toolbarView)
|
||||
|
||||
// Dark mode button — SwiftUI DarkModeButton (circular reveal animation + theme system)
|
||||
let darkModeView = AnyView(
|
||||
DarkModeButton()
|
||||
.glassCircle()
|
||||
)
|
||||
let darkHosting = UIHostingController(rootView: darkModeView)
|
||||
darkHosting.view.backgroundColor = .clear
|
||||
addChild(darkHosting)
|
||||
toolbarView.addSubview(darkHosting.view)
|
||||
darkHosting.didMove(toParent: self)
|
||||
darkModeHosting = darkHosting
|
||||
|
||||
// Edit button — SwiftUI for glass consistency
|
||||
let editView = AnyView(
|
||||
SettingsEditButton { [weak self] in
|
||||
self?.editTapped()
|
||||
}
|
||||
)
|
||||
let editHost = UIHostingController(rootView: editView)
|
||||
editHost.view.backgroundColor = UIColor.clear
|
||||
addChild(editHost)
|
||||
toolbarView.addSubview(editHost.view)
|
||||
editHost.didMove(toParent: self)
|
||||
editHosting = editHost
|
||||
}
|
||||
|
||||
// MARK: - Setup Profile Header
|
||||
|
||||
private func setupProfileHeader() {
|
||||
contentView.addSubview(avatarContainer)
|
||||
|
||||
nameLabel.font = .systemFont(ofSize: 22, weight: .bold)
|
||||
nameLabel.textColor = UIColor(RosettaColors.Adaptive.text)
|
||||
nameLabel.textAlignment = .center
|
||||
contentView.addSubview(nameLabel)
|
||||
|
||||
usernameLabel.font = .systemFont(ofSize: 15, weight: .regular)
|
||||
usernameLabel.textColor = UIColor(RosettaColors.secondaryText)
|
||||
usernameLabel.textAlignment = .center
|
||||
contentView.addSubview(usernameLabel)
|
||||
|
||||
let pkView = CopyableLabel()
|
||||
pkView.font = .monospacedSystemFont(ofSize: 12, weight: .regular)
|
||||
pkView.textColor = UIColor(RosettaColors.tertiaryText)
|
||||
pkView.textAlignment = .center
|
||||
contentView.addSubview(pkView)
|
||||
publicKeyView = pkView
|
||||
}
|
||||
|
||||
// MARK: - Setup Cards
|
||||
|
||||
private func setupCards() {
|
||||
appearanceCard = makeSettingsCard(
|
||||
icon: "paintbrush.fill", title: "Appearance", iconColor: .systemBlue,
|
||||
subtitle: "Customize theme, wallpaper and chat appearance.",
|
||||
action: #selector(appearanceTapped)
|
||||
)
|
||||
contentView.addSubview(appearanceCard)
|
||||
|
||||
updatesCard = makeSettingsCard(
|
||||
icon: "arrow.triangle.2.circlepath", title: "Updates", iconColor: .systemGreen,
|
||||
subtitle: "You can check for new versions of the app here. Updates may include security improvements and new features.",
|
||||
action: #selector(updatesTapped)
|
||||
)
|
||||
contentView.addSubview(updatesCard)
|
||||
|
||||
if BiometricAuthManager.shared.isBiometricAvailable {
|
||||
let card = makeBiometricCard()
|
||||
contentView.addSubview(card)
|
||||
biometricCard = card
|
||||
}
|
||||
|
||||
safetyCard = makeSettingsCard(
|
||||
icon: "shield.lefthalf.filled", title: "Safety", iconColor: .systemPurple,
|
||||
subtitle: "You can learn more about your safety on the safety page, please make sure you are viewing the screen alone before proceeding to the safety page.",
|
||||
action: #selector(safetyTapped)
|
||||
)
|
||||
contentView.addSubview(safetyCard)
|
||||
}
|
||||
|
||||
// MARK: - Setup Footer
|
||||
|
||||
private func setupFooter() {
|
||||
let footer = UIView()
|
||||
let label = UILabel()
|
||||
label.text = "rosetta – powering freedom"
|
||||
label.font = .systemFont(ofSize: 12)
|
||||
label.textColor = UIColor(RosettaColors.tertiaryText)
|
||||
label.textAlignment = .center
|
||||
footer.addSubview(label)
|
||||
label.tag = 100
|
||||
contentView.addSubview(footer)
|
||||
footerView = footer
|
||||
}
|
||||
|
||||
// MARK: - Layout
|
||||
|
||||
private func layoutAll() {
|
||||
let w = view.bounds.width
|
||||
let safeTop = view.safeAreaInsets.top
|
||||
let safeBottom = view.safeAreaInsets.bottom
|
||||
|
||||
// Scroll view: full screen
|
||||
scrollView.frame = view.bounds
|
||||
|
||||
// Toolbar: pinned at top in safe area
|
||||
let toolbarH: CGFloat = 44
|
||||
toolbarView.frame = CGRect(x: 0, y: safeTop, width: w, height: toolbarH)
|
||||
|
||||
darkModeHosting?.view.frame = CGRect(x: hPad, y: 0, width: 44, height: 44)
|
||||
editHosting?.view.frame = CGRect(x: w - hPad - 80, y: 0, width: 80, height: 44)
|
||||
|
||||
// Content layout
|
||||
var y: CGFloat = safeTop + toolbarH + 15
|
||||
|
||||
// Avatar (100×100, centered)
|
||||
let avatarSize: CGFloat = 100
|
||||
let avatarX = floor((w - avatarSize) / 2)
|
||||
avatarContainer.frame = CGRect(x: avatarX, y: y, width: avatarSize, height: avatarSize)
|
||||
layoutAvatarHosting()
|
||||
y += avatarSize + 12
|
||||
|
||||
// Name
|
||||
nameLabel.sizeToFit()
|
||||
nameLabel.frame = CGRect(x: hPad, y: y, width: w - hPad * 2, height: 28)
|
||||
y += 28
|
||||
|
||||
// Username
|
||||
if !usernameLabel.isHidden {
|
||||
usernameLabel.frame = CGRect(x: hPad, y: y, width: w - hPad * 2, height: 20)
|
||||
y += 24
|
||||
}
|
||||
|
||||
// Public key
|
||||
if let pkView = publicKeyView {
|
||||
pkView.frame = CGRect(x: hPad, y: y, width: w - hPad * 2, height: 16)
|
||||
y += 28
|
||||
}
|
||||
|
||||
// Cards
|
||||
let cardW = w - hPad * 2
|
||||
|
||||
y += 16
|
||||
layoutCard(appearanceCard, x: hPad, y: &y, width: cardW)
|
||||
y += 16
|
||||
layoutCard(updatesCard, x: hPad, y: &y, width: cardW)
|
||||
|
||||
if let bioCard = biometricCard {
|
||||
y += 16
|
||||
layoutCard(bioCard, x: hPad, y: &y, width: cardW)
|
||||
}
|
||||
|
||||
y += 16
|
||||
layoutCard(safetyCard, x: hPad, y: &y, width: cardW)
|
||||
|
||||
// Footer
|
||||
y += 32
|
||||
footerView.frame = CGRect(x: hPad, y: y, width: cardW, height: 20)
|
||||
if let label = footerView.viewWithTag(100) as? UILabel {
|
||||
label.frame = CGRect(x: 0, y: 0, width: cardW, height: 20)
|
||||
}
|
||||
y += 20
|
||||
|
||||
// Content size
|
||||
let contentH = y + safeBottom + 100
|
||||
contentView.frame = CGRect(x: 0, y: 0, width: w, height: contentH)
|
||||
scrollView.contentSize = CGSize(width: w, height: contentH)
|
||||
}
|
||||
|
||||
private func layoutCard(_ card: UIView, x: CGFloat, y: inout CGFloat, width: CGFloat) {
|
||||
// Card has: row (52pt) + subtitle label
|
||||
let rowView = card.viewWithTag(200)
|
||||
let subtitleLabel = card.viewWithTag(201) as? UILabel
|
||||
let bgView = card.viewWithTag(199)
|
||||
|
||||
let subtitleH: CGFloat
|
||||
if let sub = subtitleLabel, let text = sub.text, !text.isEmpty {
|
||||
let maxW = width - 32
|
||||
let size = sub.sizeThatFits(CGSize(width: maxW, height: .greatestFiniteMagnitude))
|
||||
subtitleH = size.height + 16
|
||||
sub.frame = CGRect(x: 16, y: rowHeight + 8, width: maxW, height: size.height)
|
||||
} else {
|
||||
subtitleH = 0
|
||||
}
|
||||
|
||||
let totalH = rowHeight + subtitleH
|
||||
card.frame = CGRect(x: x, y: y, width: width, height: totalH)
|
||||
bgView?.frame = CGRect(x: 0, y: 0, width: width, height: rowHeight)
|
||||
rowView?.frame = CGRect(x: 0, y: 0, width: width, height: rowHeight)
|
||||
|
||||
y += totalH
|
||||
}
|
||||
|
||||
// MARK: - Avatar
|
||||
|
||||
private func layoutAvatarHosting() {
|
||||
let size = avatarContainer.bounds.size
|
||||
guard size.width > 0 else { return }
|
||||
|
||||
if let hosting = avatarHosting {
|
||||
hosting.view.frame = CGRect(origin: .zero, size: size)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateAvatarHosting() {
|
||||
let avatarView = AvatarView(
|
||||
initials: viewModel.initials,
|
||||
colorIndex: viewModel.avatarColorIndex,
|
||||
size: 100,
|
||||
isSavedMessages: false,
|
||||
image: avatarImage
|
||||
)
|
||||
|
||||
if let hosting = avatarHosting {
|
||||
hosting.rootView = avatarView
|
||||
} else {
|
||||
let hosting = UIHostingController(rootView: avatarView)
|
||||
hosting.view.backgroundColor = .clear
|
||||
addChild(hosting)
|
||||
avatarContainer.addSubview(hosting.view)
|
||||
hosting.didMove(toParent: self)
|
||||
avatarHosting = hosting
|
||||
}
|
||||
layoutAvatarHosting()
|
||||
}
|
||||
|
||||
// MARK: - Update Profile Header
|
||||
|
||||
private func updateProfileHeader() {
|
||||
nameLabel.text = viewModel.headerName
|
||||
usernameLabel.text = viewModel.username.isEmpty ? nil : "@\(viewModel.username)"
|
||||
usernameLabel.isHidden = viewModel.username.isEmpty
|
||||
|
||||
let pk = viewModel.publicKey
|
||||
let displayPK = pk.count > 16
|
||||
? String(pk.prefix(8)) + "..." + String(pk.suffix(6))
|
||||
: pk
|
||||
publicKeyView?.text = displayPK
|
||||
publicKeyView?.textToCopy = pk
|
||||
|
||||
updateAvatarHosting()
|
||||
biometricSwitch.isOn = isBiometricEnabled
|
||||
view.setNeedsLayout()
|
||||
}
|
||||
|
||||
// MARK: - Card Factory
|
||||
|
||||
private func makeSettingsCard(
|
||||
icon: String, title: String, iconColor: UIColor,
|
||||
subtitle: String, action: Selector
|
||||
) -> UIView {
|
||||
let container = UIView()
|
||||
|
||||
// Background
|
||||
let bg = UIView()
|
||||
bg.backgroundColor = cardFill
|
||||
bg.layer.cornerRadius = cardCornerRadius
|
||||
bg.layer.cornerCurve = .continuous
|
||||
bg.tag = 199
|
||||
container.addSubview(bg)
|
||||
|
||||
// Row (tappable)
|
||||
let row = UIControl()
|
||||
row.tag = 200
|
||||
row.addTarget(self, action: action, for: .touchUpInside)
|
||||
container.addSubview(row)
|
||||
|
||||
// Icon
|
||||
let iconBg = UIView()
|
||||
iconBg.backgroundColor = iconColor
|
||||
iconBg.layer.cornerRadius = iconCornerRadius
|
||||
iconBg.frame = CGRect(x: 16, y: (rowHeight - iconSize) / 2, width: iconSize, height: iconSize)
|
||||
let iconImg = UIImageView(image: UIImage(
|
||||
systemName: icon,
|
||||
withConfiguration: UIImage.SymbolConfiguration(pointSize: 14, weight: .medium)
|
||||
))
|
||||
iconImg.tintColor = .white
|
||||
iconImg.contentMode = .center
|
||||
iconImg.frame = iconBg.bounds
|
||||
iconBg.addSubview(iconImg)
|
||||
row.addSubview(iconBg)
|
||||
|
||||
// Title
|
||||
let titleLabel = UILabel()
|
||||
titleLabel.text = title
|
||||
titleLabel.font = .systemFont(ofSize: 17, weight: .medium)
|
||||
titleLabel.textColor = UIColor(RosettaColors.Adaptive.text)
|
||||
titleLabel.frame = CGRect(x: 16 + iconSize + 16, y: 0, width: 200, height: rowHeight)
|
||||
row.addSubview(titleLabel)
|
||||
|
||||
// Chevron
|
||||
let chevron = UIImageView(image: UIImage(
|
||||
systemName: "chevron.right",
|
||||
withConfiguration: UIImage.SymbolConfiguration(pointSize: 14, weight: .semibold)
|
||||
))
|
||||
chevron.tintColor = .tertiaryLabel
|
||||
chevron.contentMode = .center
|
||||
chevron.frame = CGRect(x: 0, y: 0, width: 20, height: rowHeight)
|
||||
chevron.autoresizingMask = [.flexibleLeftMargin]
|
||||
row.addSubview(chevron)
|
||||
chevron.tag = 202
|
||||
|
||||
// Subtitle
|
||||
let sub = UILabel()
|
||||
sub.text = subtitle
|
||||
sub.font = .systemFont(ofSize: 13)
|
||||
sub.textColor = .secondaryLabel
|
||||
sub.numberOfLines = 0
|
||||
sub.tag = 201
|
||||
container.addSubview(sub)
|
||||
|
||||
return container
|
||||
}
|
||||
|
||||
private func makeBiometricCard() -> UIView {
|
||||
let biometric = BiometricAuthManager.shared
|
||||
let container = UIView()
|
||||
|
||||
// Background
|
||||
let bg = UIView()
|
||||
bg.backgroundColor = cardFill
|
||||
bg.layer.cornerRadius = cardCornerRadius
|
||||
bg.layer.cornerCurve = .continuous
|
||||
bg.tag = 199
|
||||
container.addSubview(bg)
|
||||
|
||||
// Row
|
||||
let row = UIView()
|
||||
row.tag = 200
|
||||
container.addSubview(row)
|
||||
|
||||
// Icon
|
||||
let iconBg = UIView()
|
||||
iconBg.backgroundColor = .systemBlue
|
||||
iconBg.layer.cornerRadius = iconCornerRadius
|
||||
iconBg.frame = CGRect(x: 16, y: (rowHeight - iconSize) / 2, width: iconSize, height: iconSize)
|
||||
let iconImg = UIImageView(image: UIImage(
|
||||
systemName: biometric.biometricIconName,
|
||||
withConfiguration: UIImage.SymbolConfiguration(pointSize: 14, weight: .medium)
|
||||
))
|
||||
iconImg.tintColor = .white
|
||||
iconImg.contentMode = .center
|
||||
iconImg.frame = iconBg.bounds
|
||||
iconBg.addSubview(iconImg)
|
||||
row.addSubview(iconBg)
|
||||
|
||||
// Title
|
||||
let titleLabel = UILabel()
|
||||
titleLabel.text = biometric.biometricName
|
||||
titleLabel.font = .systemFont(ofSize: 17, weight: .medium)
|
||||
titleLabel.textColor = UIColor(RosettaColors.Adaptive.text)
|
||||
titleLabel.frame = CGRect(x: 16 + iconSize + 16, y: 0, width: 200, height: rowHeight)
|
||||
row.addSubview(titleLabel)
|
||||
|
||||
// Switch
|
||||
biometricSwitch.isOn = isBiometricEnabled
|
||||
biometricSwitch.onTintColor = UIColor(RosettaColors.primaryBlue)
|
||||
biometricSwitch.addTarget(self, action: #selector(biometricToggled), for: .valueChanged)
|
||||
row.addSubview(biometricSwitch)
|
||||
|
||||
// Subtitle
|
||||
let sub = UILabel()
|
||||
sub.text = "Use \(biometric.biometricName) to unlock Rosetta instead of entering your password."
|
||||
sub.font = .systemFont(ofSize: 13)
|
||||
sub.textColor = .secondaryLabel
|
||||
sub.numberOfLines = 0
|
||||
sub.tag = 201
|
||||
container.addSubview(sub)
|
||||
|
||||
return container
|
||||
}
|
||||
|
||||
// MARK: - Layout Card Fixups
|
||||
|
||||
override func viewWillLayoutSubviews() {
|
||||
super.viewWillLayoutSubviews()
|
||||
// Position chevrons at right edge
|
||||
for card in [appearanceCard, updatesCard, safetyCard].compactMap({ $0 }) {
|
||||
if let row = card.viewWithTag(200), let chevron = row.viewWithTag(202) as? UIImageView {
|
||||
chevron.frame = CGRect(x: row.bounds.width - 36, y: 0, width: 20, height: rowHeight)
|
||||
}
|
||||
}
|
||||
// Position biometric switch
|
||||
if let bioCard = biometricCard, let row = bioCard.viewWithTag(200) {
|
||||
let switchW: CGFloat = 51
|
||||
biometricSwitch.frame = CGRect(
|
||||
x: row.bounds.width - switchW - 16,
|
||||
y: (rowHeight - 31) / 2,
|
||||
width: switchW, height: 31
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Actions
|
||||
|
||||
private func editTapped() {
|
||||
let editView = ProfileEditView(
|
||||
onAddAccount: onAddAccount,
|
||||
displayName: .constant(viewModel.displayName),
|
||||
username: .constant(viewModel.username),
|
||||
publicKey: viewModel.publicKey,
|
||||
displayNameError: .constant(nil),
|
||||
usernameError: .constant(nil),
|
||||
pendingPhoto: .constant(nil)
|
||||
)
|
||||
let hosting = UIHostingController(rootView: editView)
|
||||
hosting.view.backgroundColor = UIColor(RosettaColors.Adaptive.background)
|
||||
onEditingStateChanged?(true)
|
||||
navigationController?.pushViewController(hosting, animated: true)
|
||||
}
|
||||
|
||||
@objc private func appearanceTapped() {
|
||||
let hosting = UIHostingController(rootView: AppearanceView())
|
||||
hosting.view.backgroundColor = UIColor(RosettaColors.Adaptive.background)
|
||||
onDetailStateChanged?(true)
|
||||
navigationController?.pushViewController(hosting, animated: true)
|
||||
}
|
||||
|
||||
@objc private func updatesTapped() {
|
||||
let hosting = UIHostingController(rootView: UpdatesView())
|
||||
hosting.view.backgroundColor = UIColor(RosettaColors.Adaptive.background)
|
||||
onDetailStateChanged?(true)
|
||||
navigationController?.pushViewController(hosting, animated: true)
|
||||
}
|
||||
|
||||
@objc private func safetyTapped() {
|
||||
let hosting = UIHostingController(rootView: SafetyView(onLogout: onLogout))
|
||||
hosting.view.backgroundColor = UIColor(RosettaColors.Adaptive.background)
|
||||
onDetailStateChanged?(true)
|
||||
navigationController?.pushViewController(hosting, animated: true)
|
||||
}
|
||||
|
||||
@objc private func biometricToggled() {
|
||||
let newValue = biometricSwitch.isOn
|
||||
if newValue {
|
||||
showBiometricPasswordPrompt()
|
||||
} else {
|
||||
disableBiometric()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Biometric
|
||||
|
||||
private func refreshBiometricState() {
|
||||
let pk = viewModel.publicKey
|
||||
guard !pk.isEmpty else { return }
|
||||
isBiometricEnabled = BiometricAuthManager.shared.isBiometricEnabled(forAccount: pk)
|
||||
biometricSwitch.isOn = isBiometricEnabled
|
||||
}
|
||||
|
||||
private func showBiometricPasswordPrompt() {
|
||||
let biometric = BiometricAuthManager.shared
|
||||
let alert = UIAlertController(
|
||||
title: "Enable \(biometric.biometricName)",
|
||||
message: "Enter your password to securely save it for \(biometric.biometricName) unlock.",
|
||||
preferredStyle: .alert
|
||||
)
|
||||
alert.addTextField { field in
|
||||
field.placeholder = "Password"
|
||||
field.isSecureTextEntry = true
|
||||
}
|
||||
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel) { [weak self] _ in
|
||||
self?.biometricSwitch.setOn(false, animated: true)
|
||||
self?.isBiometricEnabled = false
|
||||
})
|
||||
alert.addAction(UIAlertAction(title: "Enable", style: .default) { [weak self] _ in
|
||||
let password = alert.textFields?.first?.text ?? ""
|
||||
self?.enableBiometric(password: password)
|
||||
})
|
||||
present(alert, animated: true)
|
||||
}
|
||||
|
||||
private func enableBiometric(password: String) {
|
||||
guard !password.isEmpty else {
|
||||
biometricSwitch.setOn(false, animated: true)
|
||||
isBiometricEnabled = false
|
||||
return
|
||||
}
|
||||
|
||||
let pk = viewModel.publicKey
|
||||
Task {
|
||||
do {
|
||||
_ = try await AccountManager.shared.unlock(password: password)
|
||||
let biometric = BiometricAuthManager.shared
|
||||
try biometric.savePassword(password, forAccount: pk)
|
||||
biometric.setBiometricEnabled(true, forAccount: pk)
|
||||
isBiometricEnabled = true
|
||||
biometricSwitch.setOn(true, animated: true)
|
||||
} catch {
|
||||
isBiometricEnabled = false
|
||||
biometricSwitch.setOn(false, animated: true)
|
||||
let alert = UIAlertController(title: "Error", message: "Wrong password", preferredStyle: .alert)
|
||||
alert.addAction(UIAlertAction(title: "OK", style: .default))
|
||||
present(alert, animated: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func disableBiometric() {
|
||||
let pk = viewModel.publicKey
|
||||
let biometric = BiometricAuthManager.shared
|
||||
biometric.deletePassword(forAccount: pk)
|
||||
biometric.setBiometricEnabled(false, forAccount: pk)
|
||||
isBiometricEnabled = false
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - SwiftUI Edit Button (needed for @MainActor closure compatibility)
|
||||
|
||||
private struct SettingsEditButton: View {
|
||||
let action: @MainActor () -> Void
|
||||
|
||||
var body: some View {
|
||||
Button(action: action) {
|
||||
Text("Edit")
|
||||
.font(.system(size: 17, weight: .medium))
|
||||
.foregroundStyle(RosettaColors.Adaptive.text)
|
||||
.frame(height: 44)
|
||||
.padding(.horizontal, 10)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.glassCapsule()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - SwiftUI Bridge
|
||||
|
||||
struct SettingsContainerView: UIViewControllerRepresentable {
|
||||
var onLogout: (() -> Void)?
|
||||
var onAddAccount: ((AuthScreen) -> Void)?
|
||||
@Binding var isEditingProfile: Bool
|
||||
@Binding var isDetailPresented: Bool
|
||||
|
||||
func makeUIViewController(context: Context) -> UINavigationController {
|
||||
let vc = SettingsViewController()
|
||||
vc.onLogout = onLogout
|
||||
vc.onAddAccount = onAddAccount
|
||||
vc.onEditingStateChanged = { editing in
|
||||
DispatchQueue.main.async { isEditingProfile = editing }
|
||||
}
|
||||
vc.onDetailStateChanged = { detail in
|
||||
DispatchQueue.main.async { isDetailPresented = detail }
|
||||
}
|
||||
let nav = UINavigationController(rootViewController: vc)
|
||||
nav.setNavigationBarHidden(true, animated: false)
|
||||
nav.view.backgroundColor = UIColor(RosettaColors.Adaptive.background)
|
||||
return nav
|
||||
}
|
||||
|
||||
func updateUIViewController(_ nav: UINavigationController, context: Context) {
|
||||
// State flows from UIKit → SwiftUI via callbacks, not the other way
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user