бейдж упоминаний в чат-листе, прямая навигация по @mention, тап на аватарку → профиль, RequestChats на UIKit

This commit is contained in:
2026-04-12 21:40:32 +05:00
parent 86a400b543
commit 30f333ef90
77 changed files with 6346 additions and 1362 deletions

View File

@@ -48,6 +48,8 @@ struct ChatDetailView: View {
@State private var forwardingMessage: ChatMessage?
@State private var pendingGroupInvite: String?
@State private var pendingGroupInviteTitle: String?
@State private var mentionChatRoute: ChatRoute?
@State private var avatarProfileRoute: ChatRoute?
@State private var messageToDelete: ChatMessage?
// Image viewer is presented via ImageViewerPresenter (UIKit overFullScreen),
// not via SwiftUI fullScreenCover, to avoid bottom-sheet slide-up animation.
@@ -55,8 +57,9 @@ struct ChatDetailView: View {
@State private var scrollToMessageId: String?
/// ID of message currently highlighted after scroll-to-reply navigation.
@State private var highlightedMessageId: String?
/// Triggers NativeMessageList to scroll to bottom (button tap).
@State private var scrollToBottomRequested = false
/// Triggers NativeMessageList to scroll to bottom (button tap). Increment-only counter
/// avoids @Binding write-back cycle (truefalse causes double updateUIViewController).
@State private var scrollToBottomTrigger: UInt = 0
// Multi-select
@State private var isMultiSelectMode = false
@@ -195,7 +198,9 @@ struct ChatDetailView: View {
@ViewBuilder
private var content: some View {
#if DEBUG
let _ = PerformanceLogger.shared.track("chatDetail.bodyEval")
#endif
// iOS 26+ and iOS < 26 use the same UIKit ComposerView bridge.
// #available branches stay explicit to keep platform separation intact.
Group {
@@ -269,6 +274,38 @@ struct ChatDetailView: View {
selectedMessageIds.insert(msgId)
}
}
cellActions.onMentionTap = { [self] username in
// @all no action (desktop parity)
guard username.lowercased() != "all" else { return }
// Tap own username Saved Messages
let myUsername = AccountManager.shared.currentAccount?.username?.lowercased() ?? ""
if !myUsername.isEmpty && username.lowercased() == myUsername {
if let saved = DialogRepository.shared.sortedDialogs.first(where: { $0.isSavedMessages }) {
mentionChatRoute = ChatRoute(dialog: saved)
} else if let myKey = AccountManager.shared.currentAccount?.publicKey {
mentionChatRoute = ChatRoute(publicKey: myKey, title: "Saved Messages", username: "", verified: 0)
}
return
}
// Find dialog by username push directly (no flash to chat list)
if let dialog = DialogRepository.shared.sortedDialogs.first(where: {
$0.opponentUsername.lowercased() == username.lowercased()
}) {
mentionChatRoute = ChatRoute(dialog: dialog)
}
}
cellActions.onAvatarTap = { [self] senderKey in
// Build route from sender's public key open profile
if let dialog = DialogRepository.shared.sortedDialogs.first(where: {
$0.opponentKey == senderKey
}) {
avatarProfileRoute = ChatRoute(dialog: dialog)
} else {
// User not in dialogs build minimal route from key
let title = String(senderKey.prefix(8))
avatarProfileRoute = ChatRoute(publicKey: senderKey, title: title, username: "", verified: 0)
}
}
cellActions.onGroupInviteOpen = { dialogKey in
let title = DialogRepository.shared.dialogs[dialogKey]?.opponentTitle ?? "Group"
let route = ChatRoute(groupDialogKey: dialogKey, title: title)
@@ -356,6 +393,12 @@ struct ChatDetailView: View {
.navigationDestination(isPresented: $showGroupInfo) {
GroupInfoView(groupDialogKey: route.publicKey)
}
.navigationDestination(item: $mentionChatRoute) { chatRoute in
ChatDetailView(route: chatRoute)
}
.navigationDestination(item: $avatarProfileRoute) { profileRoute in
OpponentProfileView(route: profileRoute)
}
.sheet(isPresented: $showForwardPicker) {
ForwardChatPickerView { targetRoutes in
showForwardPicker = false
@@ -687,28 +730,11 @@ private extension ChatDetailView {
}
ToolbarItem(placement: .navigationBarTrailing) {
HStack(spacing: 8) {
if canStartCall {
Button { startVoiceCall() } label: {
Image(systemName: "phone.fill")
.font(.system(size: 14, weight: .semibold))
.foregroundStyle(RosettaColors.Adaptive.text)
.frame(width: 36, height: 36)
.contentShape(Circle())
.background {
glass(shape: .circle, strokeOpacity: 0.22, strokeColor: RosettaColors.Adaptive.text)
}
}
.buttonStyle(.plain)
.accessibilityLabel("Start Call")
}
Button { openProfile() } label: {
ChatDetailToolbarAvatar(route: route, size: 35)
.frame(width: 36, height: 36)
.contentShape(Circle())
.background { glass(shape: .circle, strokeOpacity: 0.22, strokeColor: RosettaColors.Adaptive.text) }
}
Button { openProfile() } label: {
ChatDetailToolbarAvatar(route: route, size: 35)
.frame(width: 36, height: 36)
.contentShape(Circle())
.background { glass(shape: .circle, strokeOpacity: 0.22, strokeColor: RosettaColors.Adaptive.text) }
}
.buttonStyle(.plain)
}
@@ -735,28 +761,11 @@ private extension ChatDetailView {
}
ToolbarItem(placement: .navigationBarTrailing) {
HStack(spacing: 8) {
if canStartCall {
Button { startVoiceCall() } label: {
Image(systemName: "phone.fill")
.font(.system(size: 15, weight: .semibold))
.foregroundStyle(RosettaColors.Adaptive.text)
.frame(width: 44, height: 44)
.contentShape(Circle())
.background {
glass(shape: .circle, strokeOpacity: 0.22, strokeColor: RosettaColors.Adaptive.text)
}
}
.buttonStyle(.plain)
.accessibilityLabel("Start Call")
}
Button { openProfile() } label: {
ChatDetailToolbarAvatar(route: route, size: 38)
.frame(width: 44, height: 44)
.contentShape(Circle())
.background { glass(shape: .circle, strokeOpacity: 0.22, strokeColor: RosettaColors.Adaptive.text) }
}
Button { openProfile() } label: {
ChatDetailToolbarAvatar(route: route, size: 38)
.frame(width: 44, height: 44)
.contentShape(Circle())
.background { glass(shape: .circle, strokeOpacity: 0.22, strokeColor: RosettaColors.Adaptive.text) }
}
.buttonStyle(.plain)
}
@@ -1012,10 +1021,8 @@ private extension ChatDetailView {
@ViewBuilder
func messagesList(maxBubbleWidth: CGFloat) -> some View {
if viewModel.isLoading && messages.isEmpty {
// Android parity: skeleton placeholder while loading from DB
ChatDetailSkeletonView(maxBubbleWidth: maxBubbleWidth)
} else if route.isSystemAccount && messages.isEmpty {
// Skeleton loading is now handled inside NativeMessageListController (UIKit)
if route.isSystemAccount && messages.isEmpty {
emptyStateView
} else {
messagesScrollView(maxBubbleWidth: maxBubbleWidth)
@@ -1100,7 +1107,7 @@ private extension ChatDetailView {
) : nil,
scrollToMessageId: scrollToMessageId,
shouldScrollToBottom: shouldScrollOnNextMessage,
scrollToBottomRequested: $scrollToBottomRequested,
scrollToBottomTrigger: scrollToBottomTrigger,
onAtBottomChange: { atBottom in
isAtBottom = atBottom
SessionManager.shared.resetIdleTimer()
@@ -1112,6 +1119,13 @@ private extension ChatDetailView {
onPaginate: {
Task { await viewModel.loadMore() }
},
onBottomPaginate: {
Task { await viewModel.loadNewer() }
},
onJumpToBottom: {
viewModel.jumpToBottom()
},
hasNewerMessagesFlag: viewModel.hasNewerMessages,
onTapBackground: {
isInputFocused = false
},

View File

@@ -19,6 +19,12 @@ final class ChatDetailViewModel: ObservableObject {
@Published private(set) var hasMoreMessages: Bool = true
/// Pagination: guard against concurrent loads.
@Published private(set) var isLoadingMore: Bool = false
/// Reverse pagination: true while newer messages exist below current window.
@Published private(set) var hasNewerMessages: Bool = false
/// True when user scrolled up past the initial window (not showing latest).
@Published private(set) var isDetachedFromBottom: Bool = false
/// Guard against concurrent downward loads.
private var isLoadingNewer: Bool = false
private var cancellables = Set<AnyCancellable>()
@@ -50,7 +56,13 @@ final class ChatDetailViewModel: ObservableObject {
}
.debounce(for: .milliseconds(50), scheduler: DispatchQueue.main)
.removeDuplicates { (lhs: [ChatMessage], rhs: [ChatMessage]) -> Bool in
// O(1) fast path for common cases (insert/delete).
guard lhs.count == rhs.count else { return false }
guard lhs.first?.id == rhs.first?.id,
lhs.last?.id == rhs.last?.id else { return false }
// O(n) scan for status/read changes needed to detect mid-array
// delivery ACK and read receipt updates. ~0.3ms for 3000 messages,
// runs at most once per 50ms debounce negligible vs layout cost.
for i in lhs.indices {
if lhs[i].id != rhs[i].id ||
lhs[i].deliveryStatus != rhs[i].deliveryStatus ||
@@ -109,12 +121,13 @@ final class ChatDetailViewModel: ObservableObject {
}
/// Pagination: load older messages from SQLite when user scrolls to top.
/// Async: DB read + AES decryption run on background thread zero main thread freeze.
func loadMore() async {
guard !isLoadingMore, hasMoreMessages else { return }
guard let earliest = messages.first else { return }
isLoadingMore = true
let older = MessageRepository.shared.loadOlderMessages(
let older = await MessageRepository.shared.loadOlderMessagesAsync(
for: dialogKey,
beforeTimestamp: earliest.timestamp,
beforeMessageId: earliest.id,
@@ -124,10 +137,40 @@ final class ChatDetailViewModel: ObservableObject {
if older.count < MessageRepository.pageSize {
hasMoreMessages = false
}
// messages will update via Combine pipeline (repo already prepends to cache).
isLoadingMore = false
}
/// Reverse pagination: load newer messages when scrolling back toward bottom.
/// Async: DB read + AES decryption run on background thread.
func loadNewer() async {
guard !isLoadingNewer, hasNewerMessages else { return }
guard let latest = messages.last else { return }
isLoadingNewer = true
let newer = await MessageRepository.shared.loadNewerMessagesAsync(
for: dialogKey,
afterTimestamp: latest.timestamp,
afterMessageId: latest.id,
limit: MessageRepository.pageSize
)
if newer.count < MessageRepository.pageSize {
hasNewerMessages = false
isDetachedFromBottom = false
}
isLoadingNewer = false
}
/// Jump to latest messages when detached from bottom (scroll-to-bottom button).
func jumpToBottom() {
if isDetachedFromBottom {
MessageRepository.shared.reloadLatest(for: dialogKey)
hasNewerMessages = false
isDetachedFromBottom = false
hasMoreMessages = true
}
}
/// Ensures a target message is present in current dialog cache before scroll-to-message.
/// Returns true when the message is available to the UI list.
func ensureMessageLoaded(messageId: String) async -> Bool {

View File

@@ -29,6 +29,33 @@ protocol ComposerViewDelegate: AnyObject {
/// Frame-based layout (Telegram pattern), no Auto Layout inside.
final class ComposerView: UIView, UITextViewDelegate {
struct VoiceRecordingRuntimeGuards {
var isCallIdle: () -> Bool
var requestMicrophonePermission: () async -> Bool
var hasSufficientDiskSpace: (Int64) -> Bool
static let live = VoiceRecordingRuntimeGuards(
isCallIdle: { CallManager.shared.uiState.phase == .idle },
requestMicrophonePermission: { await AudioRecorder.requestMicrophonePermission() },
hasSufficientDiskSpace: { minBytes in
Self.defaultHasSufficientDiskSpace(minBytes: minBytes)
}
)
private static func defaultHasSufficientDiskSpace(minBytes: Int64) -> Bool {
let home = URL(fileURLWithPath: NSHomeDirectory(), isDirectory: true)
let keys: Set<URLResourceKey> = [
.volumeAvailableCapacityForImportantUsageKey,
.volumeAvailableCapacityKey
]
guard let values = try? home.resourceValues(forKeys: keys) else { return true }
let important = values.volumeAvailableCapacityForImportantUsage ?? Int64.max
let generic = Int64(values.volumeAvailableCapacity ?? Int.max)
let available = min(important, generic)
return available >= minBytes
}
}
weak var delegate: ComposerViewDelegate?
// MARK: - Public State
@@ -42,6 +69,23 @@ final class ComposerView: UIView, UITextViewDelegate {
/// from auto-restoring first responder on transition cancellation.
var isFocusBlocked = false
// MARK: - Mention Autocomplete
var isGroupChat: Bool = false
var groupDialogKey: String = ""
/// Autocomplete table added as subview of inputContainer (unified glass).
private var mentionTableView: UITableView?
private var mentionCandidates: [MentionCandidate] = []
private var mentionCandidatesCache: [MentionCandidate]?
/// Height of the autocomplete section inside inputContainer.
private var mentionAutocompleteHeight: CGFloat = 0
/// Resolved members from PacketSearch (not dependent on DialogRepository having a dialog).
private var resolvedMembers: [String: (username: String, title: String)] = [:]
private var mentionSearchHandlerId: UUID?
private let mentionRowHeight: CGFloat = 42
private let mentionMaxRows: CGFloat = 5
// MARK: - Subviews
// Attach button (glass circle, 42×42)
@@ -77,7 +121,7 @@ final class ComposerView: UIView, UITextViewDelegate {
private var attachIconLayer: CAShapeLayer?
private var emojiIconLayer: CAShapeLayer?
private var sendIconLayer: CAShapeLayer?
private var micIconLayer: CAShapeLayer?
private var micIconView: UIImageView?
// MARK: - Layout Constants
@@ -109,6 +153,7 @@ final class ComposerView: UIView, UITextViewDelegate {
private var recordingPreviewPanel: RecordingPreviewPanel?
private var recordingStartTask: Task<Void, Never>?
private var recordingSendAccessibilityButton: UIButton?
private var isPreviewReplacingInputRow = false
private(set) var isRecording = false
private(set) var isRecordingLocked = false
private(set) var recordingFlowState: VoiceRecordingFlowState = .idle
@@ -119,8 +164,19 @@ final class ComposerView: UIView, UITextViewDelegate {
private(set) var lastRecordedWaveform: [Float] = []
private(set) var lastVoiceSendTransitionSource: VoiceSendTransitionSource?
private let minVoiceDuration: TimeInterval = 0.5
private let minFreeDiskBytes: Int64 = 8 * 1024 * 1024
private let minVoiceDuration = VoiceRecordingParityConstants.minVoiceDuration
private let minFreeDiskBytes = VoiceRecordingParityConstants.minFreeDiskBytes
var runtimeGuards: VoiceRecordingRuntimeGuards = .live
private enum VoiceSessionCleanupMode {
case preserveRecordedDraft
case discardRecording
}
private enum VoiceSessionDismissStyle {
case standard
case cancel
}
// MARK: - Init
@@ -207,6 +263,7 @@ final class ComposerView: UIView, UITextViewDelegate {
textView.placeholderLabel.text = "Message"
textView.placeholderLabel.font = .systemFont(ofSize: 17, weight: .regular)
textView.placeholderLabel.textColor = .placeholderText
textView.accessibilityIdentifier = "voice.composer.textView"
textView.trackingView.onHeightChange = { [weak self] height in
guard let self else { return }
@@ -251,19 +308,18 @@ final class ComposerView: UIView, UITextViewDelegate {
micGlass.isCircle = true
micGlass.isUserInteractionEnabled = false
micButton.addSubview(micGlass)
let micIcon = makeIconLayer(
pathData: TelegramIconPath.microphone,
viewBox: CGSize(width: 18, height: 24),
targetSize: CGSize(width: 18, height: 24),
color: .label
)
micButton.layer.addSublayer(micIcon)
micIconLayer = micIcon
let micIconView = UIImageView(image: VoiceRecordingAssets.image(.iconMicrophone, templated: true))
micIconView.contentMode = .center
micIconView.tintColor = .label
micIconView.isUserInteractionEnabled = false
micButton.addSubview(micIconView)
self.micIconView = micIconView
micButton.tag = 4
micButton.recordingDelegate = self
micButton.isAccessibilityElement = true
micButton.accessibilityLabel = "Voice message"
micButton.accessibilityHint = "Hold to record voice message. Slide left to cancel or up to lock."
micButton.accessibilityIdentifier = "voice.mic.button"
addSubview(micButton)
updateThemeColors()
@@ -340,6 +396,65 @@ final class ComposerView: UIView, UITextViewDelegate {
return lastVoiceSendTransitionSource
}
#if DEBUG
func debugSetRecordingStopAccessibilityAreaEnabled(_ isEnabled: Bool) {
updateRecordingSendAccessibilityArea(isEnabled: isEnabled)
}
func debugShowPreviewReplacingInputRow(fileURL: URL, duration: TimeInterval, waveform: [Float]) {
lastRecordedURL = fileURL
lastRecordedDuration = duration
lastRecordedWaveform = waveform
setPreviewRowReplacement(true)
recordingPreviewPanel?.removeFromSuperview()
let preview = RecordingPreviewPanel(
frame: inputContainer.bounds,
fileURL: fileURL,
duration: duration,
waveform: waveform
)
preview.delegate = self
inputContainer.addSubview(preview)
recordingPreviewPanel = preview
isRecording = false
isRecordingLocked = false
setRecordingFlowState(.draftPreview)
}
var debugPreviewPanelFrame: CGRect? {
guard let recordingPreviewPanel else { return nil }
return recordingPreviewPanel.convert(recordingPreviewPanel.bounds, to: self)
}
var debugIsPreviewReplacingInputRow: Bool {
isPreviewReplacingInputRow
}
func debugForceVoiceSessionState(flow: VoiceRecordingFlowState, mic: VoiceRecordingState) {
setRecordingFlowState(flow)
isRecording = flow != .idle
isRecordingLocked = flow == .recordingLocked || flow == .waitingForPreview || flow == .draftPreview
micButton.debugSetRecordingState(mic)
}
func debugFinalizeVoiceSession(skipAudioCleanup: Bool = true) {
resetVoiceSessionState(skipAudioCleanup: skipAudioCleanup)
recordingOverlay?.dismiss()
recordingOverlay = nil
recordingLockView?.removeFromSuperview()
recordingLockView = nil
recordingPanel?.removeFromSuperview()
recordingPanel = nil
recordingPreviewPanel?.removeFromSuperview()
recordingPreviewPanel = nil
}
var debugMicRecordingState: VoiceRecordingState {
micButton.recordingState
}
#endif
private func setRecordingFlowState(_ state: VoiceRecordingFlowState) {
recordingFlowState = state
}
@@ -357,8 +472,8 @@ final class ComposerView: UIView, UITextViewDelegate {
// Text row height = textViewHeight (clamped)
let textRowH = textViewHeight
// Input container inner height = padding + reply + text row + padding
let inputInnerH = innerPadding + replyH + textRowH + innerPadding
// Input container inner height = mention autocomplete + padding + reply + text row + padding
let inputInnerH = mentionAutocompleteHeight + innerPadding + replyH + textRowH + innerPadding
let inputContainerH = max(minInputContainerHeight, inputInnerH)
// Main bar height
@@ -377,15 +492,23 @@ final class ComposerView: UIView, UITextViewDelegate {
centerIconLayer(in: attachButton, iconSize: CGSize(width: 21, height: 24))
// Mic button
let showMic = !isSendVisible
let showMic = !isSendVisible && !isPreviewReplacingInputRow
let micX = w - horizontalPadding - buttonSize
let micY = topPadding + mainBarH - buttonSize
micButton.frame = CGRect(x: micX, y: micY, width: buttonSize, height: buttonSize)
micGlass.frame = micButton.bounds
centerIconLayer(in: micButton, iconSize: CGSize(width: 18, height: 24))
if let micIconView {
let iconSize = micIconView.image?.size ?? CGSize(width: 30, height: 30)
micIconView.frame = CGRect(
x: floor((micButton.bounds.width - iconSize.width) / 2.0),
y: floor((micButton.bounds.height - iconSize.height) / 2.0),
width: iconSize.width,
height: iconSize.height
)
}
// Input container
let inputX = attachX + buttonSize + innerSpacing
let inputX = isPreviewReplacingInputRow ? horizontalPadding : (attachX + buttonSize + innerSpacing)
let micWidth: CGFloat = showMic ? (buttonSize + innerSpacing) : 0
let inputW = w - inputX - horizontalPadding - micWidth
let inputY = topPadding + mainBarH - inputContainerH
@@ -398,15 +521,21 @@ final class ComposerView: UIView, UITextViewDelegate {
inputGlass.fixedCornerRadius = cornerRadius
inputGlass.applyCornerRadius()
// Reply bar inside input container
// Mention autocomplete table (inside inputContainer, at the top)
if let tv = mentionTableView {
tv.frame = CGRect(x: 0, y: 0, width: inputW, height: mentionAutocompleteHeight)
tv.isHidden = mentionAutocompleteHeight < 1
}
// Reply bar inside input container (shifted down by autocomplete height)
let replyX: CGFloat = 6
let replyW = inputW - replyX - 4
replyBar.frame = CGRect(x: replyX, y: innerPadding, width: replyW, height: replyH)
replyBar.frame = CGRect(x: replyX, y: mentionAutocompleteHeight + innerPadding, width: replyW, height: replyH)
layoutReplyBar(width: replyW, height: replyH)
// Text view inside input container
// Text view inside input container (shifted down by autocomplete height)
let textX: CGFloat = innerPadding + 6
let textY = innerPadding + replyH
let textY = mentionAutocompleteHeight + innerPadding + replyH
let sendExtraW: CGFloat = isSendVisible ? sendButtonWidth : 0
let emojiW: CGFloat = 20
let emojiTrailing: CGFloat = 8 + sendExtraW
@@ -430,6 +559,9 @@ final class ComposerView: UIView, UITextViewDelegate {
if recordingSendAccessibilityButton != nil {
updateRecordingSendAccessibilityArea(isEnabled: true)
}
if let recordingPreviewPanel {
recordingPreviewPanel.frame = inputContainer.bounds
}
// Report height
if abs(totalH - currentHeight) > 0.5 {
@@ -493,7 +625,7 @@ final class ComposerView: UIView, UITextViewDelegate {
attachIconLayer?.fillColor = UIColor.label.cgColor
emojiIconLayer?.fillColor = UIColor.secondaryLabel.cgColor
sendIconLayer?.fillColor = UIColor.white.cgColor
micIconLayer?.fillColor = UIColor.label.cgColor
micIconView?.tintColor = .label
replyTitleLabel.textColor = .label
replyPreviewLabel.textColor = .label
@@ -541,7 +673,7 @@ final class ComposerView: UIView, UITextViewDelegate {
private func reportHeightIfChanged() {
let replyH: CGFloat = isReplyVisible ? 46 : 0
let inputInnerH = innerPadding + replyH + textViewHeight + innerPadding
let inputInnerH = mentionAutocompleteHeight + innerPadding + replyH + textViewHeight + innerPadding
let inputContainerH = max(minInputContainerHeight, inputInnerH)
let mainBarH = max(buttonSize, inputContainerH)
let totalH = topPadding + mainBarH + bottomPadding
@@ -610,6 +742,7 @@ final class ComposerView: UIView, UITextViewDelegate {
delegate?.composerTextDidChange(self, text: textView.text ?? "")
recalculateTextHeight()
updateSendMicVisibility(animated: true)
updateMentionAutocomplete()
}
// MARK: - Actions
@@ -629,6 +762,277 @@ final class ComposerView: UIView, UITextViewDelegate {
}
}
// MARK: - Mention Autocomplete Logic
private func updateMentionAutocomplete() {
guard isGroupChat else {
hideMentionAutocomplete()
return
}
guard let query = extractMentionQuery() else {
hideMentionAutocomplete()
return
}
let candidates = loadAndFilterCandidates(query: query)
if candidates.isEmpty {
hideMentionAutocomplete()
return
}
showMentionAutocomplete(candidates: candidates)
}
/// Extracts the @query at the cursor. Returns nil if no active mention trigger.
private func extractMentionQuery() -> String? {
let text = textView.text ?? ""
guard !text.isEmpty else { return nil }
let cursorLocation = textView.selectedRange.location
guard cursorLocation > 0, cursorLocation <= (text as NSString).length else { return nil }
let nsText = text as NSString
// Scan backward from cursor to find "@"
var atIndex: Int?
for i in stride(from: cursorLocation - 1, through: 0, by: -1) {
let char = nsText.character(at: i)
if char == Character("@").asciiValue! {
// "@" must be at start or preceded by whitespace/newline
if i == 0 {
atIndex = i
} else {
let prevChar = nsText.character(at: i - 1)
if CharacterSet.whitespacesAndNewlines.contains(Unicode.Scalar(prevChar)!) {
atIndex = i
}
}
break
}
// Stop if we hit whitespace before finding "@"
if CharacterSet.whitespacesAndNewlines.contains(Unicode.Scalar(char)!) {
break
}
}
guard let idx = atIndex else { return nil }
let queryStart = idx + 1 // after "@"
let queryLength = cursorLocation - queryStart
if queryLength < 0 { return nil }
if queryLength == 0 { return "" } // just "@" typed, show all
return nsText.substring(with: NSRange(location: queryStart, length: queryLength))
}
/// Loads group members (cached) and filters by query.
private func loadAndFilterCandidates(query: String) -> [MentionCandidate] {
if mentionCandidatesCache == nil {
mentionCandidatesCache = resolveMentionCandidates()
}
guard let all = mentionCandidatesCache else { return [] }
let lowQuery = query.lowercased()
var result: [MentionCandidate] = []
// @all special entry
if lowQuery.isEmpty || "all".hasPrefix(lowQuery) {
result.append(.allMembers())
}
for candidate in all {
if lowQuery.isEmpty
|| candidate.username.lowercased().hasPrefix(lowQuery)
|| candidate.title.lowercased().hasPrefix(lowQuery) {
result.append(candidate)
}
}
return result
}
/// Resolves group members from GroupRepository + DialogRepository + resolvedMembers cache.
/// Uses direct PacketSearch with own handler doesn't depend on DialogRepository
/// having a dialog entry (updateUserInfo silently drops unknown users).
private func resolveMentionCandidates() -> [MentionCandidate] {
let account = SessionManager.shared.currentPublicKey
guard let cached = GroupRepository.shared.cachedMembers(
account: account, groupDialogKey: groupDialogKey
) else { return [] }
let myKey = account
var candidates: [MentionCandidate] = []
var unknownKeys: [String] = []
for memberKey in cached.memberKeys {
if memberKey == myKey { continue }
// 1. Check DialogRepository (users with prior direct chat)
let dialog = DialogRepository.shared.dialogs[memberKey]
let dialogUsername = dialog?.opponentUsername ?? ""
let dialogTitle = dialog?.opponentTitle ?? ""
if !dialogUsername.isEmpty {
candidates.append(MentionCandidate(
username: dialogUsername,
title: dialogTitle.isEmpty ? dialogUsername : dialogTitle,
publicKey: memberKey, isAll: false
))
continue
}
// 2. Check resolvedMembers (from PacketSearch responses)
if let resolved = resolvedMembers[memberKey], !resolved.username.isEmpty {
candidates.append(MentionCandidate(
username: resolved.username,
title: resolved.title.isEmpty ? resolved.username : resolved.title,
publicKey: memberKey, isAll: false
))
continue
}
unknownKeys.append(memberKey)
}
if !unknownKeys.isEmpty {
fetchUnknownMembers(unknownKeys)
}
return candidates.sorted { $0.username.lowercased() < $1.username.lowercased() }
}
/// Sends PacketSearch for unknown members using a dedicated search channel.
/// Registers own handler to capture responses directly bypasses DialogRepository
/// (which silently drops users without existing dialog entries).
private func fetchUnknownMembers(_ keys: [String]) {
guard let privKeyHash = SessionManager.shared.privateKeyHash else { return }
// Register one-shot handler per batch
let searchId = UUID()
var remaining = keys.count
let handlerId = ProtocolManager.shared.addSearchResultHandler(channel: .ui(searchId)) { [weak self] packet in
Task { @MainActor [weak self] in
guard let self else { return }
for user in packet.users where !user.username.isEmpty {
self.resolvedMembers[user.publicKey] = (username: user.username, title: user.title)
}
remaining -= 1
if remaining <= 0 {
// All responses received rebuild cache
if let hid = self.mentionSearchHandlerId {
ProtocolManager.shared.removeSearchResultHandler(hid)
self.mentionSearchHandlerId = nil
}
self.mentionCandidatesCache = nil
self.updateMentionAutocomplete()
}
}
}
mentionSearchHandlerId = handlerId
for key in keys {
var packet = PacketSearch()
packet.search = key
packet.privateKey = privKeyHash
ProtocolManager.shared.sendSearchPacket(packet, channel: .ui(searchId))
}
}
/// Ensures the mention table view is created and added inside inputContainer.
private func ensureMentionTableView() {
guard mentionTableView == nil else { return }
let tv = UITableView(frame: .zero, style: .plain)
tv.dataSource = self
tv.delegate = self
tv.backgroundColor = .clear
tv.separatorStyle = .none
tv.showsVerticalScrollIndicator = false
tv.register(MentionCell.self, forCellReuseIdentifier: MentionCell.reuseID)
tv.rowHeight = mentionRowHeight
// Insert below text views but above glass
inputContainer.insertSubview(tv, aboveSubview: inputGlass)
mentionTableView = tv
}
private func showMentionAutocomplete(candidates: [MentionCandidate]) {
ensureMentionTableView()
mentionCandidates = candidates
let newHeight = min(CGFloat(candidates.count) * mentionRowHeight, mentionMaxRows * mentionRowHeight)
let changed = abs(mentionAutocompleteHeight - newHeight) > 0.5
mentionAutocompleteHeight = newHeight
mentionTableView?.reloadData()
if changed {
// Trigger re-layout inputContainer will grow upward to include autocomplete rows
UIView.animate(withDuration: 0.3, delay: 0, usingSpringWithDamping: 0.85, initialSpringVelocity: 0) {
self.recalculateTextHeight()
self.layoutSubviews()
self.delegate?.composerHeightDidChange(self, height: self.currentHeight)
}
}
}
private func hideMentionAutocomplete() {
guard mentionAutocompleteHeight > 0 else { return }
mentionAutocompleteHeight = 0
mentionCandidates = []
UIView.animate(withDuration: 0.2) {
self.recalculateTextHeight()
self.layoutSubviews()
self.delegate?.composerHeightDidChange(self, height: self.currentHeight)
}
}
/// Inserts the selected mention into the text view.
func insertMention(username: String) {
guard let query = extractMentionQuery() else { return }
let text = textView.text ?? ""
let nsText = text as NSString
let cursorLocation = textView.selectedRange.location
let atPosition = cursorLocation - query.count - 1
guard atPosition >= 0 else { return }
let replaceRange = NSRange(location: atPosition, length: query.count + 1)
let replacement = "@\(username) "
let newText = nsText.replacingCharacters(in: replaceRange, with: replacement)
textView.text = newText
textView.selectedRange = NSRange(location: atPosition + replacement.count, length: 0)
textViewDidChange(textView)
}
/// Preloads group member info via PacketSearch for members not in DialogRepository.
/// Called eagerly when entering a group chat so member usernames are available by
/// the time the user types "@".
func preloadMentionMembers() {
guard isGroupChat, !groupDialogKey.isEmpty else { return }
let account = SessionManager.shared.currentPublicKey
guard let cached = GroupRepository.shared.cachedMembers(
account: account, groupDialogKey: groupDialogKey
) else { return }
let unknownKeys = cached.memberKeys.filter { key in
key != account
&& (DialogRepository.shared.dialogs[key]?.opponentUsername ?? "").isEmpty
&& resolvedMembers[key] == nil
}
if !unknownKeys.isEmpty {
fetchUnknownMembers(unknownKeys)
}
}
/// Clears cached candidates and removes search handler.
func clearMentionCache() {
mentionCandidatesCache = nil
resolvedMembers.removeAll()
if let hid = mentionSearchHandlerId {
ProtocolManager.shared.removeSearchResultHandler(hid)
mentionSearchHandlerId = nil
}
hideMentionAutocomplete()
}
@objc private func replyCancelTapped() {
delegate?.composerDidCancelReply(self)
}
@@ -652,16 +1056,16 @@ extension ComposerView: RecordingMicButtonDelegate {
recordingStartTask?.cancel()
recordingStartTask = Task { @MainActor [weak self] in
guard let self else { return }
guard CallManager.shared.uiState.phase == .idle else {
guard self.runtimeGuards.isCallIdle() else {
self.failRecordingStart(for: button)
return
}
guard self.hasSufficientDiskSpaceForRecording() else {
guard self.runtimeGuards.hasSufficientDiskSpace(self.minFreeDiskBytes) else {
self.failRecordingStart(for: button)
return
}
let granted = await AudioRecorder.requestMicrophonePermission()
let granted = await self.runtimeGuards.requestMicrophonePermission()
guard !Task.isCancelled else { return }
guard granted else {
self.failRecordingStart(for: button)
@@ -705,6 +1109,7 @@ extension ComposerView: RecordingMicButtonDelegate {
recordingPanel?.showCancelButton()
recordingLockView?.showStopButton { [weak self] in
self?.showRecordingPreview()
self?.micButton.resetState()
}
recordingOverlay?.transitionToLocked(onTapStop: { [weak self] in
self?.showRecordingPreview()
@@ -718,7 +1123,7 @@ extension ComposerView: RecordingMicButtonDelegate {
func micButtonDragUpdate(_ button: RecordingMicButton, distanceX: CGFloat, distanceY: CGFloat) {
recordingOverlay?.applyDragTransform(distanceX: distanceX, distanceY: distanceY)
recordingPanel?.updateCancelTranslation(distanceX)
let lockness = min(1, max(0, abs(distanceY) / 105))
let lockness = VoiceRecordingParityMath.lockness(distanceY: distanceY)
recordingLockView?.updateLockness(lockness)
}
@@ -736,7 +1141,7 @@ extension ComposerView: RecordingMicButtonDelegate {
lastRecordedDuration = snapshot.duration
lastRecordedWaveform = snapshot.waveform
if snapshot.duration < minVoiceDuration {
if VoiceRecordingParityMath.shouldDiscard(duration: snapshot.duration) {
dismissOverlayAndRestore()
return
}
@@ -750,22 +1155,20 @@ extension ComposerView: RecordingMicButtonDelegate {
}
updateRecordingSendAccessibilityArea(isEnabled: false)
guard let url = lastRecordedURL else { return }
let panelX = horizontalPadding
let panelW = micButton.frame.minX - innerSpacing - horizontalPadding
guard let url = lastRecordedURL else {
dismissOverlayAndRestore(skipAudioCleanup: true)
return
}
setPreviewRowReplacement(true)
micButton.resetState()
let preview = RecordingPreviewPanel(
frame: CGRect(
x: panelX,
y: inputContainer.frame.origin.y,
width: panelW,
height: inputContainer.frame.height
),
frame: inputContainer.bounds,
fileURL: url,
duration: lastRecordedDuration,
waveform: lastRecordedWaveform
)
preview.delegate = self
addSubview(preview)
inputContainer.addSubview(preview)
preview.animateIn()
recordingPreviewPanel = preview
isRecording = false
@@ -782,8 +1185,8 @@ extension ComposerView: RecordingMicButtonDelegate {
audioRecorder.onLevelUpdate = nil
audioRecorder.stopRecording()
guard lastRecordedDuration >= minVoiceDuration else {
dismissOverlayAndRestore(skipAudioCleanup: true)
guard !VoiceRecordingParityMath.shouldDiscard(duration: lastRecordedDuration) else {
dismissOverlayAndRestore()
return
}
@@ -839,8 +1242,9 @@ extension ComposerView: RecordingMicButtonDelegate {
UIView.animate(withDuration: 0.15) {
self.inputContainer.alpha = 0
self.attachButton.alpha = 0
self.micButton.alpha = 0
self.micGlass.alpha = 0
self.micIconLayer?.opacity = 0
self.micIconView?.alpha = 0
}
}
@@ -848,12 +1252,70 @@ extension ComposerView: RecordingMicButtonDelegate {
UIView.animate(withDuration: 0.15) {
self.inputContainer.alpha = 1
self.attachButton.alpha = 1
self.micButton.alpha = self.isSendVisible ? 0 : 1
self.micButton.transform = self.isSendVisible ? CGAffineTransform(scaleX: 0.42, y: 0.78) : .identity
self.micButton.isUserInteractionEnabled = true
self.micGlass.alpha = 1
self.micIconLayer?.opacity = 1
self.micIconView?.alpha = 1
}
updateSendMicVisibility(animated: false)
}
private func setPreviewRowReplacement(_ enabled: Bool) {
guard isPreviewReplacingInputRow != enabled else { return }
isPreviewReplacingInputRow = enabled
textView.isHidden = enabled
textView.isUserInteractionEnabled = !enabled
emojiButton.isHidden = enabled
emojiButton.isUserInteractionEnabled = !enabled
sendButton.isHidden = enabled
sendButton.isUserInteractionEnabled = !enabled
replyBar.isHidden = enabled ? true : !isReplyVisible
inputGlass.alpha = enabled ? 0 : 1
micButton.isUserInteractionEnabled = !enabled
if enabled {
inputContainer.alpha = 1
attachButton.alpha = 0
micButton.alpha = 0
micGlass.alpha = 0
micIconView?.alpha = 0
bringSubviewToFront(inputContainer)
} else {
replyBar.alpha = isReplyVisible ? 1 : 0
bringSubviewToFront(micButton)
}
setNeedsLayout()
layoutIfNeeded()
}
private func resetVoiceSessionState(skipAudioCleanup: Bool) {
let cleanupMode: VoiceSessionCleanupMode = skipAudioCleanup ? .preserveRecordedDraft : .discardRecording
resetVoiceSessionState(cleanup: cleanupMode)
}
private func resetVoiceSessionState(cleanup: VoiceSessionCleanupMode) {
isRecording = false
isRecordingLocked = false
setRecordingFlowState(.idle)
recordingStartTask?.cancel()
recordingStartTask = nil
audioRecorder.onLevelUpdate = nil
switch cleanup {
case .discardRecording:
audioRecorder.cancelRecording()
case .preserveRecordedDraft:
// Keep already captured file URL for send transition, but always
// return recorder FSM to idle to allow immediate next recording.
audioRecorder.reset()
}
updateRecordingSendAccessibilityArea(isEnabled: false)
setPreviewRowReplacement(false)
micButton.resetState()
}
private func failRecordingStart(for button: RecordingMicButton) {
let feedback = UINotificationFeedbackGenerator()
feedback.notificationOccurred(.warning)
@@ -861,19 +1323,6 @@ extension ComposerView: RecordingMicButtonDelegate {
button.resetState()
}
private func hasSufficientDiskSpaceForRecording() -> Bool {
let home = URL(fileURLWithPath: NSHomeDirectory(), isDirectory: true)
let keys: Set<URLResourceKey> = [
.volumeAvailableCapacityForImportantUsageKey,
.volumeAvailableCapacityKey
]
guard let values = try? home.resourceValues(forKeys: keys) else { return true }
let important = values.volumeAvailableCapacityForImportantUsage ?? Int64.max
let generic = Int64(values.volumeAvailableCapacity ?? Int.max)
let available = min(important, generic)
return available >= minFreeDiskBytes
}
private func updateRecordingSendAccessibilityArea(isEnabled: Bool) {
if !isEnabled {
recordingSendAccessibilityButton?.removeFromSuperview()
@@ -890,69 +1339,77 @@ extension ComposerView: RecordingMicButtonDelegate {
button.isAccessibilityElement = true
button.accessibilityLabel = "Stop recording"
button.accessibilityHint = "Stops recording and opens voice preview."
button.accessibilityIdentifier = "voice.recording.stopArea"
button.addTarget(self, action: #selector(accessibilityStopRecordingTapped), for: .touchUpInside)
recordingSendAccessibilityButton = button
window.addSubview(button)
}
let micCenter = convert(micButton.center, to: window)
button.frame = CGRect(x: micCenter.x - 60, y: micCenter.y - 60, width: 120, height: 120)
let hitSize = VoiceRecordingParityConstants.sendAccessibilityHitSize
button.frame = CGRect(
x: micCenter.x - hitSize / 2.0,
y: micCenter.y - hitSize / 2.0,
width: hitSize,
height: hitSize
)
}
@objc private func accessibilityStopRecordingTapped() {
showRecordingPreview()
micButton.resetState()
}
private func finalizeVoiceSession(cleanup: VoiceSessionCleanupMode, dismissStyle: VoiceSessionDismissStyle) {
resetVoiceSessionState(cleanup: cleanup)
switch dismissStyle {
case .standard:
recordingOverlay?.dismiss()
recordingPanel?.animateOut { [weak self] in
self?.recordingPanel = nil
}
restoreComposerChrome()
case .cancel:
recordingOverlay?.dismissCancel()
recordingPanel?.animateOutCancel { [weak self] in
self?.recordingPanel = nil
self?.restoreComposerChrome()
}
if recordingPanel == nil {
restoreComposerChrome()
}
}
recordingOverlay = nil
recordingLockView?.dismiss()
recordingLockView = nil
recordingPreviewPanel?.animateOut { [weak self] in
self?.recordingPreviewPanel = nil
}
}
private func clearLastRecordedDraftFile() {
if let url = lastRecordedURL {
try? FileManager.default.removeItem(at: url)
}
lastRecordedURL = nil
lastRecordedDuration = 0
lastRecordedWaveform = []
lastVoiceSendTransitionSource = nil
}
private func cancelRecordingWithDismissAnimation() {
isRecording = false
isRecordingLocked = false
setRecordingFlowState(.idle)
audioRecorder.onLevelUpdate = nil
audioRecorder.cancelRecording()
recordingOverlay?.dismissCancel()
recordingOverlay = nil
recordingLockView?.dismiss()
recordingLockView = nil
recordingPanel?.animateOutCancel { [weak self] in
self?.recordingPanel = nil
}
recordingPreviewPanel?.animateOut { [weak self] in
self?.recordingPreviewPanel = nil
}
updateRecordingSendAccessibilityArea(isEnabled: false)
restoreComposerChrome()
clearLastRecordedDraftFile()
finalizeVoiceSession(cleanup: .discardRecording, dismissStyle: .cancel)
}
private func dismissOverlayAndRestore(skipAudioCleanup: Bool = false) {
isRecording = false
isRecordingLocked = false
setRecordingFlowState(.idle)
recordingStartTask?.cancel()
recordingStartTask = nil
audioRecorder.onLevelUpdate = nil
if !skipAudioCleanup {
audioRecorder.cancelRecording()
let cleanupMode: VoiceSessionCleanupMode = skipAudioCleanup ? .preserveRecordedDraft : .discardRecording
if cleanupMode == .discardRecording {
clearLastRecordedDraftFile()
}
recordingOverlay?.dismiss()
recordingOverlay = nil
recordingLockView?.dismiss()
recordingLockView = nil
recordingPanel?.animateOut { [weak self] in
self?.recordingPanel = nil
}
recordingPreviewPanel?.animateOut { [weak self] in
self?.recordingPreviewPanel = nil
}
updateRecordingSendAccessibilityArea(isEnabled: false)
restoreComposerChrome()
finalizeVoiceSession(cleanup: cleanupMode, dismissStyle: .standard)
}
private func captureVoiceSendTransition(from sourceView: UIView?) -> VoiceSendTransitionSource? {
@@ -971,6 +1428,8 @@ extension ComposerView: RecordingMicButtonDelegate {
}
private func resumeRecordingFromPreview() {
setPreviewRowReplacement(false)
micButton.resetState()
guard audioRecorder.resumeRecording() else {
dismissOverlayAndRestore()
return
@@ -987,9 +1446,7 @@ extension ComposerView: RecordingMicButtonDelegate {
}
private func clampTrimRange(_ trimRange: ClosedRange<TimeInterval>, duration: TimeInterval) -> ClosedRange<TimeInterval> {
let lower = max(0, min(trimRange.lowerBound, duration))
let upper = max(lower, min(trimRange.upperBound, duration))
return lower...upper
VoiceRecordingParityMath.clampTrimRange(trimRange, duration: duration)
}
private func trimWaveform(
@@ -997,11 +1454,14 @@ extension ComposerView: RecordingMicButtonDelegate {
totalDuration: TimeInterval,
trimRange: ClosedRange<TimeInterval>
) -> [Float] {
guard !waveform.isEmpty, totalDuration > 0 else { return waveform }
let startIndex = max(0, Int(floor((trimRange.lowerBound / totalDuration) * Double(waveform.count))))
let endIndex = min(waveform.count, Int(ceil((trimRange.upperBound / totalDuration) * Double(waveform.count))))
guard startIndex < endIndex else { return waveform }
return Array(waveform[startIndex..<endIndex])
guard let sliceRange = VoiceRecordingParityMath.waveformSliceRange(
sampleCount: waveform.count,
totalDuration: totalDuration,
trimRange: trimRange
) else {
return waveform
}
return Array(waveform[sliceRange])
}
private func exportTrimmedAudio(url: URL, trimRange: ClosedRange<TimeInterval>) async -> URL? {
@@ -1074,9 +1534,9 @@ extension ComposerView: RecordingMicButtonDelegate {
try? FileManager.default.removeItem(at: url)
}
guard finalDuration >= minVoiceDuration else {
guard !VoiceRecordingParityMath.shouldDiscard(duration: finalDuration) else {
try? FileManager.default.removeItem(at: finalURL)
dismissOverlayAndRestore(skipAudioCleanup: true)
dismissOverlayAndRestore()
return
}
@@ -1109,13 +1569,7 @@ extension ComposerView: RecordingPreviewPanelDelegate {
func previewPanelDidTapDelete(_ panel: RecordingPreviewPanel) {
audioRecorder.cancelRecording()
if let url = lastRecordedURL {
try? FileManager.default.removeItem(at: url)
}
lastRecordedURL = nil
lastRecordedDuration = 0
lastRecordedWaveform = []
clearLastRecordedDraftFile()
dismissOverlayAndRestore(skipAudioCleanup: true)
delegate?.composerDidCancelRecording(self)
}
@@ -1124,3 +1578,26 @@ extension ComposerView: RecordingPreviewPanelDelegate {
resumeRecordingFromPreview()
}
}
// MARK: - Mention Table DataSource & Delegate
extension ComposerView: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
mentionCandidates.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: MentionCell.reuseID, for: indexPath) as! MentionCell
let candidate = mentionCandidates[indexPath.row]
let isLast = indexPath.row == mentionCandidates.count - 1
cell.configure(candidate: candidate, showSeparator: !isLast)
return cell
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
let candidate = mentionCandidates[indexPath.row]
insertMention(username: candidate.username)
hideMentionAutocomplete()
}
}

View File

@@ -16,7 +16,7 @@ import CoreText
/// Two-phase pattern (matches Telegram asyncLayout):
/// 1. `CoreTextTextLayout.calculate()` runs on ANY thread (background-safe)
/// 2. `CoreTextLabel.draw()` runs on main thread, renders pre-calculated lines
final class CoreTextTextLayout {
final class CoreTextTextLayout: @unchecked Sendable {
// MARK: - Line
@@ -39,6 +39,22 @@ final class CoreTextTextLayout {
var rects: [CGRect]
}
// MARK: - Mention Detection
/// A detected @username with bounding rects for hit testing.
struct MentionInfo {
let username: String // without the "@" prefix
let range: NSRange // range of "@username" in the original string
var rects: [CGRect] // populated during layout
}
/// Cached mention regex (compilation is expensive).
private static let mentionDetector: NSRegularExpression? = {
// Match @username at start of string or after whitespace.
// Username: 1-32 alphanumeric or underscore characters.
try? NSRegularExpression(pattern: "(?:^|(?<=\\s))@([A-Za-z0-9_]{1,32})", options: [])
}()
/// TLD whitelist desktop parity (desktop/app/constants.ts lines 38-63).
static let allowedTLDs: Set<String> = [
"com", "ru", "ua", "org", "net", "edu", "gov", "io", "tech", "info",
@@ -63,6 +79,7 @@ final class CoreTextTextLayout {
let lastLineHasBlockQuote: Bool
let textColor: UIColor
let links: [LinkInfo]
let mentions: [MentionInfo]
private init(
lines: [Line],
@@ -71,7 +88,8 @@ final class CoreTextTextLayout {
lastLineHasRTL: Bool,
lastLineHasBlockQuote: Bool,
textColor: UIColor,
links: [LinkInfo] = []
links: [LinkInfo] = [],
mentions: [MentionInfo] = []
) {
self.lines = lines
self.size = size
@@ -80,6 +98,7 @@ final class CoreTextTextLayout {
self.lastLineHasBlockQuote = lastLineHasBlockQuote
self.textColor = textColor
self.links = links
self.mentions = mentions
}
/// Returns the URL at the given point, or nil if no link at that position.
@@ -94,6 +113,18 @@ final class CoreTextTextLayout {
return nil
}
/// Returns the username at the given point, or nil if no mention at that position.
func mentionAt(point: CGPoint) -> String? {
for mention in mentions {
for rect in mention.rects {
if rect.insetBy(dx: -4, dy: -4).contains(point) {
return mention.username
}
}
}
return nil
}
// MARK: - Telegram Line Spacing
/// Telegram default: 12% of font line height.
@@ -163,6 +194,29 @@ final class CoreTextTextLayout {
}
}
// Mention detection (@username desktop/Android parity)
var detectedMentions: [(username: String, range: NSRange)] = []
if let mentionRegex = mentionDetector {
let fullRange = NSRange(location: 0, length: stringLength)
mentionRegex.enumerateMatches(in: text, options: [], range: fullRange) { result, _, _ in
guard let result else { return }
let matchRange = result.range // full "@username"
let usernameRange = result.range(at: 1) // "username" capture group
guard usernameRange.location != NSNotFound else { return }
// Skip if overlaps with a detected link
let overlapsLink = detectedLinks.contains { link in
NSIntersectionRange(link.range, matchRange).length > 0
}
guard !overlapsLink else { return }
let username = (text as NSString).substring(with: usernameRange)
// Blue color + underline (Telegram parity)
attrString.addAttribute(.foregroundColor, value: linkColor as UIColor, range: matchRange)
attrString.addAttribute(.underlineStyle, value: NSUnderlineStyle.single.rawValue, range: matchRange)
attrString.addAttribute(.underlineColor, value: linkColor as UIColor, range: matchRange)
detectedMentions.append((username: username, range: matchRange))
}
}
// Typesetter (Telegram: InteractiveTextComponent line 1481)
let typesetter = CTTypesetterCreateWithAttributedString(attrString as CFAttributedString)
@@ -269,6 +323,28 @@ final class CoreTextTextLayout {
}
}
// Compute mention bounding rects (same algorithm as links)
var mentionInfos: [MentionInfo] = []
for detected in detectedMentions {
var rects: [CGRect] = []
for line in resultLines {
let overlap = NSIntersectionRange(line.stringRange, detected.range)
guard overlap.length > 0 else { continue }
var xStart = CGFloat(CTLineGetOffsetForStringIndex(
line.ctLine, overlap.location, nil
))
var xEnd = CGFloat(CTLineGetOffsetForStringIndex(
line.ctLine, overlap.location + overlap.length, nil
))
if xEnd < xStart { swap(&xStart, &xEnd) }
let lineH = line.ascent + line.descent
rects.append(CGRect(x: xStart, y: line.origin.y, width: xEnd - xStart, height: lineH))
}
if !rects.isEmpty {
mentionInfos.append(MentionInfo(username: detected.username, range: detected.range, rects: rects))
}
}
return CoreTextTextLayout(
lines: resultLines,
size: CGSize(width: ceil(maxLineWidth), height: ceil(currentY)),
@@ -276,7 +352,8 @@ final class CoreTextTextLayout {
lastLineHasRTL: lastLineHasRTL,
lastLineHasBlockQuote: lastLineHasBlockQuote,
textColor: textColor,
links: linkInfos
links: linkInfos,
mentions: mentionInfos
)
}

View File

@@ -0,0 +1,280 @@
import SwiftUI
import UIKit
// MARK: - Data Model
struct MentionCandidate {
let username: String
let title: String
let publicKey: String
let isAll: Bool
static func allMembers() -> MentionCandidate {
MentionCandidate(username: "all", title: "All Members", publicKey: "", isAll: true)
}
}
// MARK: - Delegate
@MainActor
protocol MentionAutocompleteDelegate: AnyObject {
func mentionAutocomplete(_ view: MentionAutocompleteView, didSelect candidate: MentionCandidate)
}
// MARK: - MentionAutocompleteView
/// UIKit mention autocomplete panel Telegram-exact dimensions.
/// Appears above the composer when user types "@" in a group chat.
///
/// Reference: MentionChatInputContextPanelNode.swift + MentionChatInputPanelItem.swift
/// Row: 42pt | Avatar: 30x30 at x=12 | Text at x=55 | Font: 14pt medium/regular
/// Background: UIVisualEffectView (.ultraThinMaterial), corner radius 20pt
final class MentionAutocompleteView: UIView, UITableViewDataSource, UITableViewDelegate {
weak var mentionDelegate: MentionAutocompleteDelegate?
/// Corner radius matching inputContainer passed from ComposerView.
var panelCornerRadius: CGFloat = 21
private let glassBackground = TelegramGlassUIView()
private let tableView = UITableView(frame: .zero, style: .plain)
private var candidates: [MentionCandidate] = []
// Telegram-exact constants
private let rowHeight: CGFloat = 42
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
@available(*, unavailable)
required init?(coder: NSCoder) { fatalError() }
private func setupViews() {
// Glass background (TelegramGlassUIView same as input container)
glassBackground.isUserInteractionEnabled = false
addSubview(glassBackground)
tableView.dataSource = self
tableView.delegate = self
tableView.backgroundColor = .clear
tableView.separatorStyle = .none
tableView.showsVerticalScrollIndicator = false
tableView.register(MentionCell.self, forCellReuseIdentifier: MentionCell.reuseID)
tableView.rowHeight = rowHeight
addSubview(tableView)
clipsToBounds = true
}
override func layoutSubviews() {
super.layoutSubviews()
glassBackground.frame = bounds
tableView.frame = bounds
// Top-only rounding via maskedCorners (not layer.mask masks break glass backdrop).
// Matches inputContainer radius so panel + input look like ONE unified glass element.
layer.cornerRadius = panelCornerRadius
layer.cornerCurve = .continuous
layer.maskedCorners = [.layerMinXMinYCorner, .layerMaxXMinYCorner]
glassBackground.fixedCornerRadius = panelCornerRadius
glassBackground.applyCornerRadius()
glassBackground.layer.maskedCorners = [.layerMinXMinYCorner, .layerMaxXMinYCorner]
}
// MARK: - Public API
func update(candidates: [MentionCandidate]) {
self.candidates = candidates
tableView.reloadData()
}
var preferredHeight: CGFloat {
min(CGFloat(candidates.count) * rowHeight, 5 * rowHeight)
}
// MARK: - UITableViewDataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
candidates.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: MentionCell.reuseID, for: indexPath) as! MentionCell
let candidate = candidates[indexPath.row]
let isLast = indexPath.row == candidates.count - 1
cell.configure(candidate: candidate, showSeparator: !isLast)
return cell
}
// MARK: - UITableViewDelegate
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
let candidate = candidates[indexPath.row]
mentionDelegate?.mentionAutocomplete(self, didSelect: candidate)
}
}
// MARK: - MentionCell
final class MentionCell: UITableViewCell {
static let reuseID = "MentionCell"
// Telegram-exact constants
private let avatarSize: CGFloat = 30
private let avatarLeftInset: CGFloat = 12
private let textLeftOffset: CGFloat = 55
private let rightPadding: CGFloat = 10
private let avatarCircle = UIView()
private let avatarImageView = UIImageView()
private let avatarInitialLabel = UILabel()
private let allLabel = UILabel()
private let nameLabel = UILabel()
private let usernameLabel = UILabel()
private let separatorLine = UIView()
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
backgroundColor = .clear
contentView.backgroundColor = .clear
selectionStyle = .none
avatarCircle.clipsToBounds = true
contentView.addSubview(avatarCircle)
avatarImageView.contentMode = .scaleAspectFill
avatarImageView.clipsToBounds = true
avatarCircle.addSubview(avatarImageView)
avatarInitialLabel.textAlignment = .center
avatarInitialLabel.font = UIFont.systemFont(ofSize: 12, weight: .bold).rounded()
avatarCircle.addSubview(avatarInitialLabel)
allLabel.text = "@"
allLabel.textAlignment = .center
allLabel.textColor = .white
allLabel.font = .systemFont(ofSize: 16, weight: .bold)
allLabel.isHidden = true
avatarCircle.addSubview(allLabel)
nameLabel.font = .systemFont(ofSize: 14, weight: .medium)
nameLabel.textColor = .white
contentView.addSubview(nameLabel)
usernameLabel.font = .systemFont(ofSize: 14, weight: .regular)
usernameLabel.textColor = UIColor(white: 1, alpha: 0.55)
contentView.addSubview(usernameLabel)
separatorLine.backgroundColor = UIColor.white.withAlphaComponent(0.08)
contentView.addSubview(separatorLine)
}
@available(*, unavailable)
required init?(coder: NSCoder) { fatalError() }
func configure(candidate: MentionCandidate, showSeparator: Bool) {
let h: CGFloat = 42
let w = UIScreen.main.bounds.width
// Avatar
let avatarY = (h - avatarSize) / 2
avatarCircle.frame = CGRect(x: avatarLeftInset, y: avatarY, width: avatarSize, height: avatarSize)
avatarCircle.layer.cornerRadius = avatarSize / 2
avatarImageView.frame = avatarCircle.bounds
avatarInitialLabel.frame = avatarCircle.bounds
allLabel.frame = avatarCircle.bounds
if candidate.isAll {
avatarCircle.backgroundColor = UIColor(RosettaColors.primaryBlue)
avatarImageView.isHidden = true
avatarInitialLabel.isHidden = true
allLabel.isHidden = false
} else {
allLabel.isHidden = true
let avatar = AvatarRepository.shared.loadAvatar(publicKey: candidate.publicKey)
if let avatar {
avatarImageView.image = avatar
avatarImageView.isHidden = false
avatarInitialLabel.isHidden = true
avatarCircle.backgroundColor = .clear
} else {
avatarImageView.image = nil
avatarImageView.isHidden = true
avatarInitialLabel.isHidden = false
let initial = String(candidate.title.prefix(1)).uppercased()
avatarInitialLabel.text = initial
let colorIndex = RosettaColors.avatarColorIndex(for: candidate.title, publicKey: candidate.publicKey)
// Mantine "light" variant: dark base + tint at 15% opacity (dark mode)
let isDark = traitCollection.userInterfaceStyle == .dark
let tintColor = RosettaColors.avatarColor(for: colorIndex)
let baseColor = isDark ? UIColor(red: 0.102, green: 0.106, blue: 0.118, alpha: 1) : .white // #1A1B1E
let overlayAlpha: CGFloat = isDark ? 0.15 : 0.10
avatarCircle.backgroundColor = baseColor.blended(with: tintColor, alpha: overlayAlpha)
// Text: shade-3 (text) in dark, shade-6 (tint) in light
let textColor = isDark
? RosettaColors.avatarTextColor(for: colorIndex)
: RosettaColors.avatarColor(for: colorIndex)
avatarInitialLabel.textColor = textColor
}
}
// Text
nameLabel.text = candidate.title
usernameLabel.text = " @\(candidate.username)"
let nameSize = nameLabel.sizeThatFits(CGSize(width: w, height: h))
let maxTextW = w - textLeftOffset - rightPadding
let textY = (h - nameSize.height) / 2
let nameW = min(nameSize.width, maxTextW * 0.6)
nameLabel.frame = CGRect(x: textLeftOffset, y: textY, width: nameW, height: nameSize.height)
let usernameSize = usernameLabel.sizeThatFits(CGSize(width: w, height: h))
let remainingW = maxTextW - nameW
usernameLabel.frame = CGRect(
x: textLeftOffset + nameW, y: textY,
width: min(usernameSize.width, remainingW), height: usernameSize.height
)
// Separator (1px, Telegram: itemPlainSeparatorColor)
separatorLine.isHidden = !showSeparator
let sepH: CGFloat = 1.0 / UIScreen.main.scale
separatorLine.frame = CGRect(x: textLeftOffset, y: h - sepH, width: w - textLeftOffset, height: sepH)
}
override func setHighlighted(_ highlighted: Bool, animated: Bool) {
super.setHighlighted(highlighted, animated: animated)
UIView.animate(withDuration: highlighted ? 0 : 0.4) {
self.contentView.backgroundColor = highlighted
? UIColor.white.withAlphaComponent(0.08)
: .clear
}
}
}
// MARK: - Helpers
private extension UIFont {
func rounded() -> UIFont {
guard let descriptor = fontDescriptor.withDesign(.rounded) else { return self }
return UIFont(descriptor: descriptor, size: 0)
}
}
private extension UIColor {
func blended(with color: UIColor, alpha: CGFloat) -> UIColor {
var r1: CGFloat = 0, g1: CGFloat = 0, b1: CGFloat = 0, a1: CGFloat = 0
var r2: CGFloat = 0, g2: CGFloat = 0, b2: CGFloat = 0, a2: CGFloat = 0
getRed(&r1, green: &g1, blue: &b1, alpha: &a1)
color.getRed(&r2, green: &g2, blue: &b2, alpha: &a2)
return UIColor(
red: r1 * (1 - alpha) + r2 * alpha,
green: g1 * (1 - alpha) + g2 * alpha,
blue: b1 * (1 - alpha) + b2 * alpha,
alpha: 1
)
}
}

View File

@@ -17,6 +17,8 @@ final class MessageCellActions {
var onCall: (String) -> Void = { _ in } // peer public key
var onGroupInviteTap: (String) -> Void = { _ in } // invite string
var onGroupInviteOpen: (String) -> Void = { _ in } // group dialog key navigate
var onMentionTap: (String) -> Void = { _ in } // username (without @)
var onAvatarTap: (String) -> Void = { _ in } // sender public key (group chats)
// Multi-select
var onEnterSelection: (ChatMessage) -> Void = { _ in }

View File

@@ -26,7 +26,9 @@ struct MessageCellView: View, Equatable {
}
var body: some View {
#if DEBUG
let _ = PerformanceLogger.shared.track("chatDetail.rowEval")
#endif
let outgoing = message.isFromMe(myPublicKey: currentPublicKey)
let hasTail = position == .single || position == .bottom

View File

@@ -559,6 +559,9 @@ final class NativeMessageCell: UICollectionViewCell {
senderAvatarContainer.layer.cornerRadius = 18 // 36pt circle
senderAvatarContainer.clipsToBounds = true
senderAvatarContainer.isHidden = true
senderAvatarContainer.isUserInteractionEnabled = true
let avatarTap = UITapGestureRecognizer(target: self, action: #selector(handleAvatarTap))
senderAvatarContainer.addGestureRecognizer(avatarTap)
contentView.addSubview(senderAvatarContainer)
// Match AvatarView: size * 0.38, bold, rounded design
@@ -874,13 +877,29 @@ final class NativeMessageCell: UICollectionViewCell {
duration: previewParts.duration,
isOutgoing: layout.isOutgoing
)
let voiceId = voiceAtt.id
let voiceFileName = voiceAtt.preview.components(separatedBy: "::").last ?? ""
let isCurrentVoice = VoiceMessagePlayer.shared.currentMessageId == message.id
voiceView.updatePlaybackState(
isPlaying: isCurrentVoice && VoiceMessagePlayer.shared.isPlaying,
progress: isCurrentVoice ? CGFloat(VoiceMessagePlayer.shared.progress) : 0
)
let voiceAttachment = voiceAtt
let storedPassword = message.attachmentPassword
let playbackDuration = previewParts.duration
let playbackMessageId = message.id
voiceView.onPlayTapped = { [weak self] in
guard let self else { return }
let fileName = "voice_\(Int(previewParts.duration))s.m4a"
if let url = AttachmentCache.shared.fileURL(forAttachmentId: voiceId, fileName: fileName) {
VoiceMessagePlayer.shared.play(messageId: message.id, fileURL: url)
Task.detached(priority: .userInitiated) {
guard let playableURL = await Self.resolvePlayableVoiceURL(
attachment: voiceAttachment,
duration: playbackDuration,
storedPassword: storedPassword
) else {
return
}
await MainActor.run {
guard self.message?.id == playbackMessageId else { return }
VoiceMessagePlayer.shared.play(messageId: playbackMessageId, fileURL: playableURL)
}
}
}
fileIconView.isHidden = true
@@ -1504,6 +1523,100 @@ final class NativeMessageCell: UICollectionViewCell {
return (0, preview)
}
private static func resolvePlayableVoiceURL(
attachment: MessageAttachment,
duration: TimeInterval,
storedPassword: String?
) async -> URL? {
let fileName = "voice_\(Int(duration))s.m4a"
if let cached = playableVoiceURLFromCache(attachmentId: attachment.id, fileName: fileName) {
return cached
}
guard let downloaded = await downloadVoiceData(attachment: attachment, storedPassword: storedPassword) else {
return nil
}
_ = AttachmentCache.shared.saveFile(downloaded, forAttachmentId: attachment.id, fileName: fileName)
return writePlayableVoiceTempFile(
data: downloaded,
attachmentId: attachment.id,
fileName: fileName
)
}
private static func playableVoiceURLFromCache(attachmentId: String, fileName: String) -> URL? {
guard let decrypted = AttachmentCache.shared.loadFileData(
forAttachmentId: attachmentId,
fileName: fileName
) else {
return nil
}
return writePlayableVoiceTempFile(data: decrypted, attachmentId: attachmentId, fileName: fileName)
}
private static func writePlayableVoiceTempFile(data: Data, attachmentId: String, fileName: String) -> URL? {
let safeFileName = fileName.replacingOccurrences(of: "/", with: "_")
let tempURL = FileManager.default.temporaryDirectory
.appendingPathComponent("voice_play_\(attachmentId)_\(safeFileName)")
try? FileManager.default.removeItem(at: tempURL)
do {
try data.write(to: tempURL, options: .atomic)
return tempURL
} catch {
return nil
}
}
private static func downloadVoiceData(attachment: MessageAttachment, storedPassword: String?) async -> Data? {
let tag = attachment.effectiveDownloadTag
guard !tag.isEmpty else { return nil }
guard let storedPassword, !storedPassword.isEmpty else { return nil }
do {
let encryptedData = try await TransportManager.shared.downloadFile(
tag: tag,
server: attachment.transportServer
)
let encryptedString = String(decoding: encryptedData, as: UTF8.self)
let passwords = MessageCrypto.attachmentPasswordCandidates(from: storedPassword)
guard let decrypted = decryptAttachmentData(encryptedString: encryptedString, passwords: passwords) else {
return nil
}
return parseAttachmentFileData(decrypted)
} catch {
return nil
}
}
private static func decryptAttachmentData(encryptedString: String, passwords: [String]) -> Data? {
let crypto = CryptoManager.shared
for password in passwords {
if let data = try? crypto.decryptWithPassword(
encryptedString,
password: password,
requireCompression: true
) {
return data
}
}
for password in passwords {
if let data = try? crypto.decryptWithPassword(encryptedString, password: password) {
return data
}
}
return nil
}
private static func parseAttachmentFileData(_ data: Data) -> Data {
if let string = String(data: data, encoding: .utf8),
string.hasPrefix("data:"),
let comma = string.firstIndex(of: ",") {
let payload = String(string[string.index(after: comma)...])
return Data(base64Encoded: payload) ?? data
}
return data
}
private static func fileIcon(for fileName: String) -> String {
let ext = (fileName as NSString).pathExtension.lowercased()
switch ext {
@@ -1640,12 +1753,25 @@ final class NativeMessageCell: UICollectionViewCell {
return
}
let pointInText = gesture.location(in: textLabel)
guard let url = textLabel.textLayout?.linkAt(point: pointInText) else { return }
var finalURL = url
if finalURL.scheme == nil || finalURL.scheme?.isEmpty == true {
finalURL = URL(string: "https://\(url.absoluteString)") ?? url
// Check links first
if let url = textLabel.textLayout?.linkAt(point: pointInText) {
var finalURL = url
if finalURL.scheme == nil || finalURL.scheme?.isEmpty == true {
finalURL = URL(string: "https://\(url.absoluteString)") ?? url
}
UIApplication.shared.open(finalURL)
return
}
UIApplication.shared.open(finalURL)
// Then check @mentions
if let username = textLabel.textLayout?.mentionAt(point: pointInText) {
actions?.onMentionTap(username)
return
}
}
@objc private func handleAvatarTap() {
guard let key = message?.fromPublicKey, !key.isEmpty else { return }
actions?.onAvatarTap(key)
}
// MARK: - Context Menu (Telegram-style)
@@ -2363,7 +2489,7 @@ final class NativeMessageCell: UICollectionViewCell {
}
photoBlurHashTasks[attachmentId] = Task { [weak self] in
let decoded = await Task.detached(priority: .utility) {
let decoded = await Task.detached(priority: .background) {
UIImage.fromBlurHash(hash, width: 48, height: 48)
}.value
guard !Task.isCancelled else { return }

View File

@@ -1,6 +1,15 @@
import SwiftUI
import UIKit
/// Cached reply/forward data parsed from JSON blob (immutable per message).
struct ReplyDataCacheEntry {
let replyName: String?
let replyText: String?
let replyMessageId: String?
let forwardSenderName: String?
let forwardSenderKey: String?
}
// MARK: - NativeMessageListController
/// UICollectionView-based message list with inverted scroll (newest at bottom).
@@ -19,6 +28,7 @@ final class NativeMessageListController: UIViewController {
static let scrollButtonIconCanvas: CGFloat = 38
static let scrollButtonBaseTrailing: CGFloat = 17
static let scrollButtonCompactExtraTrailing: CGFloat = 18
static let recordingScrollLift: CGFloat = 76
}
@@ -45,6 +55,8 @@ final class NativeMessageListController: UIViewController {
var onScrollToBottomVisibilityChange: ((Bool) -> Void)?
var onPaginationTrigger: (() -> Void)?
var onBottomPaginationTrigger: (() -> Void)?
var hasNewerMessages: Bool = false
var onTapBackground: (() -> Void)?
var onComposerHeightChange: ((CGFloat) -> Void)?
var onKeyboardDidHide: (() -> Void)?
@@ -139,6 +151,9 @@ final class NativeMessageListController: UIViewController {
// MARK: - Layout Cache (Telegram asyncLayout pattern)
/// Generation counter for discarding stale async layout results.
private var layoutGeneration: UInt64 = 0
/// Cache: messageId pre-calculated layout from background thread.
/// All frame rects computed once, applied on main thread (just sets frames).
private var layoutCache: [String: MessageCellLayout] = [:]
@@ -147,6 +162,15 @@ final class NativeMessageListController: UIViewController {
/// Eliminates double CoreText computation (measure + render measure once, render from cache).
private var textLayoutCache: [String: CoreTextTextLayout] = [:]
/// Cache: messageId parsed reply/forward data. Reply blobs are immutable after creation,
/// so JSON decode only happens once per message instead of every cell configure.
private var replyDataCache: [String: ReplyDataCacheEntry] = [:]
// MARK: - Skeleton Loading
private var skeletonView: NativeSkeletonView?
private var isShowingSkeleton = false
// MARK: - Init
init(config: Config) {
@@ -200,10 +224,20 @@ final class NativeMessageListController: UIViewController {
let members = try? await GroupService.shared.requestMembers(
groupDialogKey: self.config.opponentPublicKey
)
if let adminKey = members?.first, !adminKey.isEmpty {
self.config.groupAdminKey = adminKey
self.calculateLayouts()
self.collectionView.reloadData()
if let members, !members.isEmpty {
// Cache members for mention autocomplete
GroupRepository.shared.updateMemberCache(
account: account,
groupDialogKey: self.config.opponentPublicKey,
memberKeys: members
)
if let adminKey = members.first, !adminKey.isEmpty {
self.config.groupAdminKey = adminKey
self.calculateLayouts()
self.collectionView.reloadData()
}
// Trigger mention preload now that cache is populated
self.composerView?.preloadMentionMembers()
}
}
}
@@ -218,6 +252,36 @@ final class NativeMessageListController: UIViewController {
self.calculateLayouts()
self.refreshAllMessageCells()
}
// Show skeleton placeholder while messages load from DB
if messages.isEmpty {
showSkeleton()
}
}
// MARK: - Skeleton
private func showSkeleton() {
guard skeletonView == nil else { return }
let skeleton = NativeSkeletonView()
skeleton.frame = view.bounds
skeleton.autoresizingMask = [.flexibleWidth, .flexibleHeight]
view.addSubview(skeleton)
skeletonView = skeleton
isShowingSkeleton = true
}
private func hideSkeletonAnimated() {
guard let skeleton = skeletonView else { return }
isShowingSkeleton = false
skeletonView = nil
skeleton.animateOut {
skeleton.removeFromSuperview()
}
// Fallback: force remove after 1s if animation didn't complete
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { [weak skeleton] in
skeleton?.removeFromSuperview()
}
}
@objc private func handleAvatarDidUpdate() {
@@ -234,6 +298,8 @@ final class NativeMessageListController: UIViewController {
composerView?.isFocusBlocked = true
view.endEditing(true)
onComposerFocusChange?(false)
dateHideTimer?.invalidate()
dateHideTimer = nil
}
override func viewDidAppear(_ animated: Bool) {
@@ -286,6 +352,7 @@ final class NativeMessageListController: UIViewController {
collectionView.translatesAutoresizingMaskIntoConstraints = false
collectionView.backgroundColor = .clear
collectionView.delegate = self
collectionView.prefetchDataSource = self
collectionView.keyboardDismissMode = .interactive
collectionView.showsVerticalScrollIndicator = false
collectionView.showsHorizontalScrollIndicator = false
@@ -334,45 +401,41 @@ final class NativeMessageListController: UIViewController {
cell.apply(layout: layout)
}
// Parse reply data for quote display
let replyAtt = msg.attachments.first { $0.type == .messages }
var replyName: String?
var replyText: String?
var replyMessageId: String?
var forwardSenderName: String?
var forwardSenderKey: String?
if let att = replyAtt {
if let data = att.blob.data(using: .utf8),
let replies = try? JSONDecoder().decode([ReplyMessageData].self, from: data),
let first = replies.first {
let senderKey = first.publicKey
let name: String
if senderKey == self.config.currentPublicKey {
name = "You"
} else if senderKey == self.config.opponentPublicKey {
name = self.config.opponentTitle.isEmpty
? String(senderKey.prefix(8)) + ""
: self.config.opponentTitle
} else {
name = DialogRepository.shared.dialogs[senderKey]?.opponentTitle
?? String(senderKey.prefix(8)) + ""
}
let displayText = MessageCellLayout.isGarbageOrEncrypted(msg.text) ? "" : msg.text
if displayText.isEmpty {
// Forward
forwardSenderName = name
forwardSenderKey = senderKey
} else {
// Reply quote
replyName = name
let rawReplyMsg = first.message.isEmpty ? "Photo" : first.message
replyText = EmojiParser.replaceShortcodes(in: rawReplyMsg)
replyMessageId = first.message_id
}
// Parse reply data for quote display (cached JSON decode once per message)
let replyEntry: ReplyDataCacheEntry? = {
guard msg.attachments.contains(where: { $0.type == .messages }) else { return nil }
if let cached = self.replyDataCache[msg.id] { return cached }
guard let att = msg.attachments.first(where: { $0.type == .messages }),
let data = att.blob.data(using: .utf8),
let replies = try? JSONDecoder().decode([ReplyMessageData].self, from: data),
let first = replies.first else {
let empty = ReplyDataCacheEntry(replyName: nil, replyText: nil, replyMessageId: nil, forwardSenderName: nil, forwardSenderKey: nil)
self.replyDataCache[msg.id] = empty
return empty
}
}
let senderKey = first.publicKey
let name: String
if senderKey == self.config.currentPublicKey {
name = "You"
} else if senderKey == self.config.opponentPublicKey {
name = self.config.opponentTitle.isEmpty
? String(senderKey.prefix(8)) + ""
: self.config.opponentTitle
} else {
name = DialogRepository.shared.dialogs[senderKey]?.opponentTitle
?? String(senderKey.prefix(8)) + ""
}
let displayText = MessageCellLayout.isGarbageOrEncrypted(msg.text) ? "" : msg.text
let entry: ReplyDataCacheEntry
if displayText.isEmpty {
entry = ReplyDataCacheEntry(replyName: nil, replyText: nil, replyMessageId: nil, forwardSenderName: name, forwardSenderKey: senderKey)
} else {
let rawReplyMsg = first.message.isEmpty ? "Photo" : first.message
entry = ReplyDataCacheEntry(replyName: name, replyText: EmojiParser.replaceShortcodes(in: rawReplyMsg), replyMessageId: first.message_id, forwardSenderName: nil, forwardSenderKey: nil)
}
self.replyDataCache[msg.id] = entry
return entry
}()
cell.isSavedMessages = self.config.isSavedMessages
cell.isSystemAccount = self.config.isSystemAccount
@@ -381,11 +444,11 @@ final class NativeMessageListController: UIViewController {
timestamp: self.formatTimestamp(msg.timestamp),
textLayout: self.textLayoutCache[msg.id],
actions: self.config.actions,
replyName: replyName,
replyText: replyText,
replyMessageId: replyMessageId,
forwardSenderName: forwardSenderName,
forwardSenderKey: forwardSenderKey
replyName: replyEntry?.replyName,
replyText: replyEntry?.replyText,
replyMessageId: replyEntry?.replyMessageId,
forwardSenderName: replyEntry?.forwardSenderName,
forwardSenderKey: replyEntry?.forwardSenderKey
)
// Multi-select: apply selection state on cell (re)configuration
@@ -444,6 +507,11 @@ final class NativeMessageListController: UIViewController {
private func performSetupComposer() {
let composer = ComposerView(frame: .zero)
composer.delegate = self
composer.isGroupChat = config.isGroupChat
composer.groupDialogKey = config.opponentPublicKey
if config.isGroupChat {
composer.preloadMentionMembers()
}
composer.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(composer)
@@ -550,7 +618,14 @@ final class NativeMessageListController: UIViewController {
updateScrollToBottomBadge()
}
var onJumpToBottom: (() -> Void)?
@objc private func scrollToBottomTapped() {
// If detached from bottom (sliding window trimmed newest), reload latest first
if hasNewerMessages {
onJumpToBottom?()
return
}
scrollToBottom(animated: true)
onScrollToBottomVisibilityChange?(true)
}
@@ -579,7 +654,17 @@ final class NativeMessageListController: UIViewController {
let safeBottom = view.safeAreaInsets.bottom
let compactShift = safeBottom <= 32 ? UIConstants.scrollButtonCompactExtraTrailing : 0
scrollToBottomTrailingConstraint?.constant = -(UIConstants.scrollButtonBaseTrailing + compactShift)
scrollToBottomBottomConstraint?.constant = -(lastComposerHeight + 4)
scrollToBottomBottomConstraint?.constant = -(lastComposerHeight + 4 + recordingAwareScrollLift())
}
private func recordingAwareScrollLift() -> CGFloat {
guard let composer = composerView else { return 0 }
switch composer.recordingFlowState {
case .recordingLocked, .waitingForPreview, .draftPreview:
return UIConstants.recordingScrollLift
default:
return 0
}
}
private func updateScrollToBottomBadge() {
@@ -947,6 +1032,11 @@ final class NativeMessageListController: UIViewController {
/// Called from SwiftUI when messages array changes.
func update(messages: [ChatMessage], animated: Bool = false) {
// Hide skeleton on first message arrival
if isShowingSkeleton && !messages.isEmpty {
hideSkeletonAnimated()
}
let oldIds = Set(self.messages.map(\.id))
let oldNewestId = self.messages.last?.id
@@ -975,21 +1065,58 @@ final class NativeMessageListController: UIViewController {
self.messages = messages
// Recalculate ALL layouts BubblePosition depends on neighbors in the FULL
// array, so inserting one message changes the previous message's position/tail.
// CoreText measurement is ~0.1ms per message; 50 msgs 5ms well under 16ms.
calculateLayouts()
// Evict caches for messages no longer in the sliding window
if !layoutCache.isEmpty {
let currentIds = Set(messages.map(\.id))
layoutCache = layoutCache.filter { currentIds.contains($0.key) }
textLayoutCache = textLayoutCache.filter { currentIds.contains($0.key) }
replyDataCache = replyDataCache.filter { currentIds.contains($0.key) }
}
// Layout calculation: sync for first load, async for subsequent updates.
if layoutCache.isEmpty {
// First load: synchronous to avoid blank cells
calculateLayouts()
} else if !newIds.isEmpty && newIds.count <= 20 {
// Incremental: only new messages + neighbors, on background
var dirtyIds = newIds
for i in messages.indices where newIds.contains(messages[i].id) {
if i > 0 { dirtyIds.insert(messages[i - 1].id) }
if i < messages.count - 1 { dirtyIds.insert(messages[i + 1].id) }
}
calculateLayoutsAsync(dirtyIds: dirtyIds)
} else {
// Bulk update (pagination, sync): async full recalculation
calculateLayoutsAsync()
}
var snapshot = NSDiffableDataSourceSnapshot<Int, String>()
snapshot.appendSections([0])
let itemIds = messages.reversed().map(\.id)
snapshot.appendItems(itemIds)
// Reconfigure existing cells whose BubblePosition/tail may have changed.
// Without this, DiffableDataSource reuses stale cells (wrong corners/tail).
let existingItems = itemIds.filter { oldIds.contains($0) }
if !existingItems.isEmpty {
snapshot.reconfigureItems(existingItems)
// Reconfigure only neighbor cells whose BubblePosition/tail may have changed.
// BubblePosition depends only on immediate predecessor/successor (shouldMerge),
// so we only need to reconfigure messages adjacent to each new/changed message.
if !newIds.isEmpty && newIds.count <= 20 {
var neighborsToReconfigure = Set<String>()
for i in messages.indices {
if newIds.contains(messages[i].id) {
if i > 0 { neighborsToReconfigure.insert(messages[i - 1].id) }
if i < messages.count - 1 { neighborsToReconfigure.insert(messages[i + 1].id) }
}
}
// Only reconfigure old cells that were already in the snapshot
let toReconfigure = neighborsToReconfigure.filter { oldIds.contains($0) }
if !toReconfigure.isEmpty {
snapshot.reconfigureItems(Array(toReconfigure))
}
} else {
// Bulk update (pagination, sync): reconfigure all existing cells
let existingItems = itemIds.filter { oldIds.contains($0) }
if !existingItems.isEmpty {
snapshot.reconfigureItems(existingItems)
}
}
dataSource.apply(snapshot, animatingDifferences: false)
@@ -1088,10 +1215,9 @@ final class NativeMessageListController: UIViewController {
// MARK: - Layout Calculation (Telegram asyncLayout pattern)
/// Recalculate layouts for ALL messages using the full array.
/// BubblePosition is computed from neighbors partial recalculation produces
/// stale positions (wrong corners, missing tails on live insertion).
private func calculateLayouts() {
/// Recalculate layouts for messages. When `dirtyIds` is provided, only those
/// messages are recalculated (incremental mode). Otherwise recalculates all.
private func calculateLayouts(dirtyIds: Set<String>? = nil) {
guard !messages.isEmpty else {
layoutCache.removeAll()
textLayoutCache.removeAll()
@@ -1107,12 +1233,50 @@ final class NativeMessageListController: UIViewController {
opponentTitle: config.opponentTitle,
isGroupChat: config.isGroupChat,
groupAdminKey: config.groupAdminKey,
isDarkMode: isDark
isDarkMode: isDark,
dirtyIds: dirtyIds,
existingLayouts: dirtyIds != nil ? layoutCache : nil,
existingTextLayouts: dirtyIds != nil ? textLayoutCache : nil
)
layoutCache = layouts
textLayoutCache = textLayouts
}
/// Async layout calculation on background thread via LayoutEngine actor.
/// Results applied on main thread; stale results discarded via generation counter.
private func calculateLayoutsAsync(dirtyIds: Set<String>? = nil) {
guard !messages.isEmpty else {
layoutCache.removeAll()
textLayoutCache.removeAll()
return
}
let themeMode = UserDefaults.standard.string(forKey: "rosetta_theme_mode") ?? "dark"
let isDark = themeMode != "light"
let request = LayoutEngine.LayoutRequest(
messages: messages,
maxBubbleWidth: config.maxBubbleWidth,
currentPublicKey: config.currentPublicKey,
opponentPublicKey: config.opponentPublicKey,
opponentTitle: config.opponentTitle,
isGroupChat: config.isGroupChat,
groupAdminKey: config.groupAdminKey,
isDarkMode: isDark,
dirtyIds: dirtyIds,
existingLayouts: dirtyIds != nil ? layoutCache : nil,
existingTextLayouts: dirtyIds != nil ? textLayoutCache : nil
)
Task { @MainActor [weak self] in
let result = await LayoutEngine.shared.calculate(request)
guard let self, result.generation >= self.layoutGeneration else { return }
self.layoutGeneration = result.generation
self.layoutCache = result.layouts
self.textLayoutCache = result.textLayouts
self.reconfigureVisibleCells()
}
}
// MARK: - Inset Management
/// Update content insets for composer overlap + keyboard.
@@ -1370,14 +1534,50 @@ extension NativeMessageListController: UICollectionViewDelegate {
setScrollToBottomVisible(!isAtBottom)
}
// Top pagination (older messages) inverted scroll: visual top = content bottom
// Telegram parity: prefetch well ahead of the edge (~4 cells at 500pt).
let offsetFromTop = scrollView.contentSize.height - scrollView.contentOffset.y - scrollView.bounds.height
if offsetFromTop < 200, hasMoreMessages {
if offsetFromTop < 500, hasMoreMessages {
onPaginationTrigger?()
}
// Bottom pagination (newer messages) reuse offsetFromBottom from above
if offsetFromBottom < 500, hasNewerMessages {
onBottomPaginationTrigger?()
}
}
}
// MARK: - UICollectionViewDataSourcePrefetching
extension NativeMessageListController: UICollectionViewDataSourcePrefetching {
/// Telegram parity: proactively warm encrypted disk memory image cache
/// for upcoming cells. Eliminates placeholderimage flash during scroll.
func collectionView(_ collectionView: UICollectionView,
prefetchItemsAt indexPaths: [IndexPath]) {
for indexPath in indexPaths {
guard let messageId = dataSource.itemIdentifier(for: indexPath),
let idx = messages.firstIndex(where: { $0.id == messageId }) else { continue }
let msg = messages[idx]
for att in msg.attachments where att.type == .image {
let attId = att.id
// Skip if already in memory cache
if AttachmentCache.shared.cachedImage(forAttachmentId: attId) != nil { continue }
Task.detached(priority: .utility) {
let _ = AttachmentCache.shared.loadImage(forAttachmentId: attId)
}
}
}
}
func collectionView(_ collectionView: UICollectionView,
cancelPrefetchingForItemsAt indexPaths: [IndexPath]) {
// No-op: disk-to-memory prefetch is cheap to complete
}
}
// MARK: - ComposerViewDelegate
extension NativeMessageListController: ComposerViewDelegate {
@@ -1430,9 +1630,14 @@ extension NativeMessageListController: ComposerViewDelegate {
func composerDidStartRecording(_ composer: ComposerView) {
// Recording started handled by ComposerView internally
updateScrollToBottomButtonConstraints()
view.layoutIfNeeded()
}
func composerDidFinishRecording(_ composer: ComposerView, sendImmediately: Bool) {
updateScrollToBottomButtonConstraints()
view.layoutIfNeeded()
guard sendImmediately,
let url = composer.lastRecordedURL,
let data = try? Data(contentsOf: url) else { return }
@@ -1461,10 +1666,14 @@ extension NativeMessageListController: ComposerViewDelegate {
func composerDidCancelRecording(_ composer: ComposerView) {
// Recording cancelled no action needed
updateScrollToBottomButtonConstraints()
view.layoutIfNeeded()
}
func composerDidLockRecording(_ composer: ComposerView) {
// Recording locked UI handled by ComposerView
updateScrollToBottomButtonConstraints()
view.layoutIfNeeded()
}
private func animateVoiceSendTransition(source: VoiceSendTransitionSource, messageId: String) {
@@ -1597,9 +1806,12 @@ struct NativeMessageListView: UIViewControllerRepresentable {
var emptyChatInfo: EmptyChatInfo?
var scrollToMessageId: String?
var shouldScrollToBottom: Bool = false
@Binding var scrollToBottomRequested: Bool
var scrollToBottomTrigger: UInt = 0
var onAtBottomChange: ((Bool) -> Void)?
var onPaginate: (() -> Void)?
var onBottomPaginate: (() -> Void)?
var onJumpToBottom: (() -> Void)?
var hasNewerMessagesFlag: Bool = false
var onTapBackground: (() -> Void)?
var onNewMessageAutoScroll: (() -> Void)?
var onComposerHeightChange: ((CGFloat) -> Void)?
@@ -1675,6 +1887,7 @@ struct NativeMessageListView: UIViewControllerRepresentable {
}
controller.hasMoreMessages = hasMoreMessages
controller.hasNewerMessages = hasNewerMessagesFlag
wireCallbacks(controller, context: context)
@@ -1719,11 +1932,11 @@ struct NativeMessageListView: UIViewControllerRepresentable {
controller.reconfigureVisibleCells()
}
// Scroll-to-bottom button request
if scrollToBottomRequested {
// Scroll-to-bottom button request (counter avoids Binding write-back cycle)
if scrollToBottomTrigger != coordinator.lastScrollTrigger {
coordinator.lastScrollTrigger = scrollToBottomTrigger
DispatchQueue.main.async {
controller.scrollToBottom(animated: true)
scrollToBottomRequested = false
}
}
@@ -1754,6 +1967,8 @@ struct NativeMessageListView: UIViewControllerRepresentable {
DispatchQueue.main.async { onAtBottomChange?(isAtBottom) }
}
controller.onPaginationTrigger = { onPaginate?() }
controller.onBottomPaginationTrigger = { onBottomPaginate?() }
controller.onJumpToBottom = { onJumpToBottom?() }
controller.onTapBackground = { onTapBackground?() }
controller.onComposerHeightChange = { h in
DispatchQueue.main.async { onComposerHeightChange?(h) }
@@ -1809,6 +2024,7 @@ struct NativeMessageListView: UIViewControllerRepresentable {
var lastMessageFingerprint: String = ""
var lastNewestMessageId: String?
var lastScrollTargetId: String?
var lastScrollTrigger: UInt = 0
var isAtBottom: Bool = true
}
}

View File

@@ -0,0 +1,283 @@
import UIKit
// MARK: - NativeSkeletonView
/// Telegram-quality skeleton loading for chat message list.
/// Shows 14 incoming bubble placeholders stacked from bottom up with shimmer animation.
/// Telegram parity: ChatLoadingNode.swift 14 bubbles, shimmer via screenBlendMode.
final class NativeSkeletonView: UIView {
// MARK: - Telegram-Exact Dimensions
private static let shortHeight: CGFloat = 71
private static let tallHeight: CGFloat = 93
private static let avatarSize: CGFloat = 38
private static let avatarLeftInset: CGFloat = 8
private static let bubbleLeftInset: CGFloat = 54 // avatar + spacing
private static let verticalGap: CGFloat = 4 // Small gap between skeleton bubbles
private static let initialBottomOffset: CGFloat = 5
/// Telegram-exact width fractions and heights for 14 skeleton bubbles.
private static let bubbleSpecs: [(widthFrac: CGFloat, height: CGFloat)] = [
(0.47, tallHeight), (0.58, tallHeight), (0.69, tallHeight), (0.47, tallHeight),
(0.58, shortHeight), (0.36, tallHeight), (0.47, tallHeight), (0.36, shortHeight),
(0.58, tallHeight), (0.69, tallHeight), (0.58, tallHeight), (0.36, shortHeight),
(0.47, tallHeight), (0.58, tallHeight),
]
// MARK: - Shimmer Parameters (Telegram-exact)
private static let shimmerDuration: CFTimeInterval = 1.6
private static let shimmerEffectSize: CGFloat = 280
private static let shimmerOpacity: CGFloat = 0.14
private static let borderShimmerEffectSize: CGFloat = 320
private static let borderShimmerOpacity: CGFloat = 0.35
// MARK: - Subviews
private let containerView = UIView()
private var bubbleLayers: [CAShapeLayer] = []
private var avatarLayers: [CAShapeLayer] = []
private var shimmerLayer: CALayer?
private var borderShimmerLayer: CALayer?
// MARK: - Init
override init(frame: CGRect) {
super.init(frame: frame)
setup()
}
@available(*, unavailable)
required init?(coder: NSCoder) { fatalError() }
// MARK: - Setup
private func setup() {
backgroundColor = .clear
isUserInteractionEnabled = false
containerView.frame = bounds
containerView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
addSubview(containerView)
}
// MARK: - Layout
override func layoutSubviews() {
super.layoutSubviews()
containerView.frame = bounds
rebuildBubbles()
}
private func rebuildBubbles() {
// Remove old layers
bubbleLayers.forEach { $0.removeFromSuperlayer() }
avatarLayers.forEach { $0.removeFromSuperlayer() }
bubbleLayers.removeAll()
avatarLayers.removeAll()
shimmerLayer?.removeFromSuperlayer()
borderShimmerLayer?.removeFromSuperlayer()
let width = bounds.width
let height = bounds.height
guard width > 0, height > 0 else { return }
let themeMode = UserDefaults.standard.string(forKey: "rosetta_theme_mode") ?? "dark"
let isDark = themeMode != "light"
let bubbleColor = isDark
? UIColor.gray.withAlphaComponent(0.08)
: UIColor.gray.withAlphaComponent(0.10)
let avatarColor = isDark
? UIColor.gray.withAlphaComponent(0.06)
: UIColor.gray.withAlphaComponent(0.08)
// Build mask from all bubbles combined
let combinedMaskPath = CGMutablePath()
let combinedBorderMaskPath = CGMutablePath()
// Stack from bottom up
var y = height - Self.initialBottomOffset
let metrics = BubbleMetrics.telegram()
for spec in Self.bubbleSpecs {
let bubbleWidth = floor(spec.widthFrac * width)
let bubbleHeight = spec.height
y -= bubbleHeight
guard y > -bubbleHeight else { break } // Off screen
let bubbleFrame = CGRect(
x: Self.bubbleLeftInset,
y: y,
width: bubbleWidth,
height: bubbleHeight
)
// Bubble shape layer (visible fill)
let bubblePath = BubbleGeometryEngine.makeBezierPath(
in: CGRect(origin: .zero, size: bubbleFrame.size),
mergeType: .none,
outgoing: false,
metrics: metrics
)
let bubbleLayer = CAShapeLayer()
bubbleLayer.frame = bubbleFrame
bubbleLayer.path = bubblePath.cgPath
bubbleLayer.fillColor = bubbleColor.cgColor
containerView.layer.addSublayer(bubbleLayer)
bubbleLayers.append(bubbleLayer)
// Add to combined mask for shimmer clipping
var translateTransform = CGAffineTransform(translationX: bubbleFrame.minX, y: bubbleFrame.minY)
if let translatedPath = bubblePath.cgPath.copy(using: &translateTransform) {
combinedMaskPath.addPath(translatedPath)
}
// Border mask (stroke only)
let borderStrokePath = bubblePath.cgPath.copy(
strokingWithWidth: 2,
lineCap: CGLineCap.round,
lineJoin: CGLineJoin.round,
miterLimit: 10
)
if let translatedBorderPath = borderStrokePath.copy(using: &translateTransform) {
combinedBorderMaskPath.addPath(translatedBorderPath)
}
// Avatar circle
let avatarFrame = CGRect(
x: Self.avatarLeftInset,
y: y + bubbleHeight - Self.avatarSize, // bottom-aligned with bubble
width: Self.avatarSize,
height: Self.avatarSize
)
let avatarLayer = CAShapeLayer()
avatarLayer.frame = avatarFrame
avatarLayer.path = UIBezierPath(ovalIn: CGRect(origin: .zero, size: avatarFrame.size)).cgPath
avatarLayer.fillColor = avatarColor.cgColor
avatarLayer.strokeColor = bubbleColor.cgColor
avatarLayer.lineWidth = 1
containerView.layer.addSublayer(avatarLayer)
avatarLayers.append(avatarLayer)
// Add avatar to combined mask
let avatarCircle = CGPath(ellipseIn: avatarFrame, transform: nil)
combinedMaskPath.addPath(avatarCircle)
y -= Self.verticalGap
}
// Content shimmer layer with bubble mask
let contentShimmer = makeShimmerLayer(
size: bounds.size,
effectSize: Self.shimmerEffectSize,
color: UIColor.white.withAlphaComponent(Self.shimmerOpacity),
duration: Self.shimmerDuration
)
let contentMask = CAShapeLayer()
contentMask.path = combinedMaskPath
contentShimmer.mask = contentMask
containerView.layer.addSublayer(contentShimmer)
shimmerLayer = contentShimmer
// Border shimmer layer
let borderShimmer = makeShimmerLayer(
size: bounds.size,
effectSize: Self.borderShimmerEffectSize,
color: UIColor.white.withAlphaComponent(Self.borderShimmerOpacity),
duration: Self.shimmerDuration
)
let borderMask = CAShapeLayer()
borderMask.path = combinedBorderMaskPath
borderShimmer.mask = borderMask
containerView.layer.addSublayer(borderShimmer)
borderShimmerLayer = borderShimmer
}
// MARK: - Shimmer Layer Factory
private func makeShimmerLayer(
size: CGSize,
effectSize: CGFloat,
color: UIColor,
duration: CFTimeInterval
) -> CALayer {
let container = CALayer()
container.frame = CGRect(origin: .zero, size: size)
container.compositingFilter = "screenBlendMode"
// Gradient image (horizontal: transparent color transparent)
let gradientLayer = CAGradientLayer()
gradientLayer.colors = [
color.withAlphaComponent(0).cgColor,
color.cgColor,
color.withAlphaComponent(0).cgColor,
]
gradientLayer.locations = [0.0, 0.5, 1.0]
gradientLayer.startPoint = CGPoint(x: 0, y: 0.5)
gradientLayer.endPoint = CGPoint(x: 1, y: 0.5)
gradientLayer.frame = CGRect(x: -effectSize, y: 0, width: effectSize, height: size.height)
container.addSublayer(gradientLayer)
// Animate position.x from -effectSize to size.width + effectSize
let animation = CABasicAnimation(keyPath: "position.x")
animation.fromValue = -effectSize / 2
animation.toValue = size.width + effectSize / 2
animation.duration = duration
animation.timingFunction = CAMediaTimingFunction(name: .easeOut)
animation.repeatCount = .infinity
gradientLayer.add(animation, forKey: "shimmer")
return container
}
// MARK: - Staggered Fade Out
func animateOut(completion: @escaping () -> Void) {
let totalLayers = bubbleLayers.count + avatarLayers.count
guard totalLayers > 0 else {
completion()
return
}
// Fade out shimmer first (CALayer use CATransaction, not UIView.animate)
CATransaction.begin()
CATransaction.setAnimationDuration(0.15)
shimmerLayer?.opacity = 0
borderShimmerLayer?.opacity = 0
CATransaction.commit()
// Staggered fade-out per bubble (bottom = index 0 fades first)
for (i, bubbleLayer) in bubbleLayers.enumerated() {
let delay = Double(i) * 0.02
let fade = CABasicAnimation(keyPath: "opacity")
fade.fromValue = 1.0
fade.toValue = 0.0
fade.duration = 0.2
fade.beginTime = CACurrentMediaTime() + delay
fade.fillMode = .forwards
fade.isRemovedOnCompletion = false
bubbleLayer.add(fade, forKey: "fadeOut")
}
for (i, avatarLayer) in avatarLayers.enumerated() {
let delay = Double(i) * 0.02
let fade = CABasicAnimation(keyPath: "opacity")
fade.fromValue = 1.0
fade.toValue = 0.0
fade.duration = 0.2
fade.beginTime = CACurrentMediaTime() + delay
fade.fillMode = .forwards
fade.isRemovedOnCompletion = false
avatarLayer.add(fade, forKey: "fadeOut")
}
// Call completion after all animations complete
let totalDuration = Double(bubbleLayers.count) * 0.02 + 0.2
DispatchQueue.main.asyncAfter(deadline: .now() + totalDuration) {
completion()
}
}
}

View File

@@ -1,39 +1,69 @@
import QuartzCore
import Lottie
import UIKit
// MARK: - RecordingLockView
/// Lock indicator shown above mic button during voice recording.
/// Telegram parity from TGModernConversationInputMicButton.m:
/// - Frame: 40×72pt, positioned 122pt above mic center
/// - Padlock icon (CAShapeLayer) + upward arrow
/// - Spring entry: damping 0.55, duration 0.5s
/// - Lockness progress: arrow fades, panel shrinks
/// Telegram parity contract:
/// - Wrapper frame: 40x72
/// - Lock morph: y = 40 * lockness, height = 72 - 32 * lockness
/// - Lock icon shift: -11 * lockness, arrow shift: -39 * lockness
/// - Stop mode: clean 40x40 circle, delayed fade-in (0.56s)
final class RecordingLockView: UIView {
private enum VisualState {
case lock
case stop
}
// MARK: - Layout Constants (Telegram exact)
private let panelWidth: CGFloat = 40
private let panelFullHeight: CGFloat = 72
private let panelLockedHeight: CGFloat = 40 // 72 - 32
private let verticalOffset: CGFloat = 122 // above mic center
private let cornerRadius: CGFloat = 20
private let panelLockedHeight: CGFloat = 40
private let verticalOffset: CGFloat = 122
// MARK: - Subviews
private let backgroundView = UIView()
private let lockIcon = CAShapeLayer()
private let arrowLayer = CAShapeLayer()
private let stopButton = UIButton(type: .system)
private let panelGlassView = TelegramGlassUIView(frame: .zero)
private let panelBorderView = UIView()
private let lockAnimationContainer = UIView()
private let idleLockView = LottieAnimationView()
private let lockingView = LottieAnimationView()
private let lockFallbackGlyphView = UIImageView()
private let lockArrowView = UIImageView()
private let stopButton = UIButton(type: .custom)
private let stopGlassView = TelegramGlassUIView(frame: .zero)
private let stopBorderView = UIView()
private let stopGlyphView = UIImageView()
private var onStopTap: (() -> Void)?
// MARK: - State
private var currentLockness: CGFloat = 0
private var visualState: VisualState = .lock
private var usesComponentVisuals: Bool {
if #available(iOS 26.0, *) {
return true
}
return false
}
private var isShowingStopButton: Bool {
visualState == .stop
}
// MARK: - Init
override init(frame: CGRect) {
super.init(frame: frame)
isUserInteractionEnabled = false
setupBackground()
setupLockIcon()
clipsToBounds = false
isUserInteractionEnabled = true
setupPanel()
setupLockAnimations()
setupArrow()
setupStopButton()
}
@@ -43,115 +73,199 @@ final class RecordingLockView: UIView {
// MARK: - Setup
private func setupBackground() {
backgroundView.backgroundColor = UIColor(white: 0.15, alpha: 0.9)
backgroundView.layer.cornerRadius = cornerRadius
backgroundView.layer.cornerCurve = .continuous
backgroundView.layer.borderWidth = 1.0 / UIScreen.main.scale
backgroundView.layer.borderColor = UIColor(white: 0.3, alpha: 0.5).cgColor
addSubview(backgroundView)
private func setupPanel() {
panelGlassView.isUserInteractionEnabled = false
panelGlassView.fixedCornerRadius = panelFullHeight / 2.0
addSubview(panelGlassView)
panelBorderView.isUserInteractionEnabled = false
panelBorderView.backgroundColor = .clear
panelBorderView.layer.cornerCurve = .continuous
panelBorderView.layer.borderWidth = 1.0 / UIScreen.main.scale
addSubview(panelBorderView)
}
private func setupLockIcon() {
// Simple padlock: body (rounded rect) + shackle (arc)
let path = UIBezierPath()
private func setupLockAnimations() {
lockAnimationContainer.isUserInteractionEnabled = false
addSubview(lockAnimationContainer)
// Shackle (arc above body)
let shackleW: CGFloat = 10
let shackleH: CGFloat = 8
let bodyTop: CGFloat = 10
let centerX: CGFloat = panelWidth / 2
path.move(to: CGPoint(x: centerX - shackleW / 2, y: bodyTop))
path.addLine(to: CGPoint(x: centerX - shackleW / 2, y: bodyTop - shackleH + 3))
path.addCurve(
to: CGPoint(x: centerX + shackleW / 2, y: bodyTop - shackleH + 3),
controlPoint1: CGPoint(x: centerX - shackleW / 2, y: bodyTop - shackleH - 2),
controlPoint2: CGPoint(x: centerX + shackleW / 2, y: bodyTop - shackleH - 2)
)
path.addLine(to: CGPoint(x: centerX + shackleW / 2, y: bodyTop))
if let animation = LottieAnimation.named(VoiceRecordingLottieAsset.lockWait.rawValue) {
idleLockView.animation = animation
}
idleLockView.backgroundBehavior = .pauseAndRestore
idleLockView.loopMode = .autoReverse
idleLockView.contentMode = .scaleAspectFit
idleLockView.isUserInteractionEnabled = false
lockAnimationContainer.addSubview(idleLockView)
lockIcon.path = path.cgPath
lockIcon.strokeColor = UIColor.white.cgColor
lockIcon.fillColor = UIColor.clear.cgColor
lockIcon.lineWidth = 1.5
lockIcon.lineCap = .round
if let animation = LottieAnimation.named(VoiceRecordingLottieAsset.lock.rawValue) {
lockingView.animation = animation
}
lockingView.backgroundBehavior = .pauseAndRestore
lockingView.loopMode = .playOnce
lockingView.contentMode = .scaleAspectFit
lockingView.currentProgress = 0
lockingView.isHidden = true
lockingView.isUserInteractionEnabled = false
lockAnimationContainer.addSubview(lockingView)
// Body (rounded rect below shackle)
let bodyW: CGFloat = 14
let bodyH: CGFloat = 10
let bodyPath = UIBezierPath(
roundedRect: CGRect(
x: centerX - bodyW / 2,
y: bodyTop,
width: bodyW,
height: bodyH
),
cornerRadius: 2
)
let bodyLayer = CAShapeLayer()
bodyLayer.path = bodyPath.cgPath
bodyLayer.fillColor = UIColor.white.cgColor
layer.addSublayer(bodyLayer)
layer.addSublayer(lockIcon)
let lockConfig = UIImage.SymbolConfiguration(pointSize: 24, weight: .regular)
lockFallbackGlyphView.image = UIImage(systemName: "lock", withConfiguration: lockConfig)
lockFallbackGlyphView.contentMode = .center
lockFallbackGlyphView.isHidden = true
lockFallbackGlyphView.isUserInteractionEnabled = false
lockAnimationContainer.addSubview(lockFallbackGlyphView)
}
private func setupArrow() {
// Upward chevron arrow below the lock
let arrowPath = UIBezierPath()
let centerX = panelWidth / 2
let arrowY: CGFloat = 30
arrowPath.move(to: CGPoint(x: centerX - 5, y: arrowY + 5))
arrowPath.addLine(to: CGPoint(x: centerX, y: arrowY))
arrowPath.addLine(to: CGPoint(x: centerX + 5, y: arrowY + 5))
arrowLayer.path = arrowPath.cgPath
arrowLayer.strokeColor = UIColor.white.withAlphaComponent(0.6).cgColor
arrowLayer.fillColor = UIColor.clear.cgColor
arrowLayer.lineWidth = 1.5
arrowLayer.lineCap = .round
arrowLayer.lineJoin = .round
layer.addSublayer(arrowLayer)
lockArrowView.image = VoiceRecordingAssets.image(.videoRecordArrow, templated: true)
lockArrowView.contentMode = .center
lockArrowView.isUserInteractionEnabled = false
addSubview(lockArrowView)
}
private func setupStopButton() {
stopButton.isHidden = true
stopButton.alpha = 0
stopButton.backgroundColor = UIColor(red: 1, green: 45/255.0, blue: 85/255.0, alpha: 1)
stopButton.tintColor = .white
stopButton.layer.cornerRadius = 14
stopButton.clipsToBounds = true
let iconConfig = UIImage.SymbolConfiguration(pointSize: 12, weight: .bold)
stopButton.setImage(UIImage(systemName: "stop.fill", withConfiguration: iconConfig), for: .normal)
stopButton.addTarget(self, action: #selector(stopTapped), for: .touchUpInside)
stopButton.isAccessibilityElement = true
stopButton.clipsToBounds = false
stopButton.backgroundColor = .clear
stopButton.isUserInteractionEnabled = false
stopButton.accessibilityIdentifier = "voice.recording.stop"
stopButton.accessibilityLabel = "Stop recording"
stopButton.accessibilityHint = "Stops voice recording and opens preview."
stopButton.addTarget(self, action: #selector(stopTapped), for: .touchUpInside)
stopGlassView.isUserInteractionEnabled = false
stopGlassView.fixedCornerRadius = 20
stopButton.addSubview(stopGlassView)
stopBorderView.isUserInteractionEnabled = false
stopBorderView.backgroundColor = .clear
stopBorderView.layer.cornerCurve = .continuous
stopBorderView.layer.borderWidth = 1.0 / UIScreen.main.scale
stopBorderView.isHidden = true
stopButton.addSubview(stopBorderView)
stopGlyphView.isUserInteractionEnabled = false
stopGlyphView.contentMode = .center
stopGlyphView.image = VoiceRecordingAssets.image(.pause, templated: true)
?? Self.makeStopGlyphFallback()
stopButton.addSubview(stopGlyphView)
addSubview(stopButton)
}
override func layoutSubviews() {
super.layoutSubviews()
updatePanelGeometry()
}
override func traitCollectionDidChange(_ previousTraitCollection: UITraitCollection?) {
super.traitCollectionDidChange(previousTraitCollection)
guard previousTraitCollection?.userInterfaceStyle != traitCollection.userInterfaceStyle else { return }
updateAppearance()
}
private func updatePanelGeometry() {
let clampedLockness = max(0, min(1, currentLockness))
let panelHeight: CGFloat
let panelY: CGFloat
switch visualState {
case .lock:
panelHeight = panelFullHeight - (panelFullHeight - panelLockedHeight) * clampedLockness
panelY = 40.0 * clampedLockness
case .stop:
panelHeight = panelLockedHeight
panelY = 32.0
}
let panelFrame = CGRect(x: 0, y: panelY, width: panelWidth, height: panelHeight)
panelGlassView.frame = panelFrame
panelGlassView.fixedCornerRadius = panelHeight / 2.0
panelGlassView.applyCornerRadius()
panelBorderView.frame = panelFrame
panelBorderView.layer.cornerRadius = panelHeight / 2.0
lockAnimationContainer.frame = CGRect(x: 0, y: 6.0, width: 40.0, height: 60.0)
idleLockView.frame = lockAnimationContainer.bounds
lockingView.frame = lockAnimationContainer.bounds
lockFallbackGlyphView.frame = CGRect(x: 8.0, y: 8.0, width: 24.0, height: 24.0)
let arrowSize = lockArrowView.image?.size ?? CGSize(width: 18, height: 9)
lockArrowView.frame = CGRect(
x: floor((panelWidth - arrowSize.width) / 2.0),
y: 54.0,
width: arrowSize.width,
height: arrowSize.height
)
stopButton.frame = CGRect(x: 0, y: 32.0, width: 40.0, height: 40.0)
stopGlassView.frame = stopButton.bounds
stopGlassView.fixedCornerRadius = stopButton.bounds.height / 2.0
stopGlassView.applyCornerRadius()
stopBorderView.frame = stopButton.bounds
stopBorderView.layer.cornerRadius = stopButton.bounds.height / 2.0
stopGlyphView.frame = stopButton.bounds
}
// MARK: - Present
/// Position above anchor (mic button) and animate in with spring.
func present(anchorCenter: CGPoint, in parent: UIView) {
frame = CGRect(
x: floor(anchorCenter.x - panelWidth / 2),
y: floor(anchorCenter.y - verticalOffset - panelFullHeight / 2),
x: floor(anchorCenter.x - panelWidth / 2.0),
y: floor(anchorCenter.y - verticalOffset - panelFullHeight / 2.0),
width: panelWidth,
height: panelFullHeight
)
backgroundView.frame = bounds
stopButton.frame = CGRect(x: floor((panelWidth - 28) / 2), y: panelFullHeight - 34, width: 28, height: 28)
currentLockness = 0
visualState = .lock
onStopTap = nil
lockAnimationContainer.transform = .identity
lockAnimationContainer.alpha = 1
lockAnimationContainer.isHidden = false
lockAnimationContainer.isUserInteractionEnabled = false
lockArrowView.transform = .identity
lockArrowView.alpha = 1
lockArrowView.isHidden = false
stopButton.isHidden = true
stopButton.alpha = 0
stopButton.transform = .identity
stopButton.isUserInteractionEnabled = false
lockingView.isHidden = true
lockingView.currentProgress = 0
lockFallbackGlyphView.isHidden = true
if idleLockView.animation != nil {
idleLockView.isHidden = false
if !idleLockView.isAnimationPlaying {
idleLockView.play()
}
} else {
idleLockView.isHidden = true
lockFallbackGlyphView.isHidden = false
}
updatePanelGeometry()
updateAppearance()
parent.addSubview(self)
// Start offscreen below
transform = CGAffineTransform(translationX: 0, y: 100)
alpha = 0
UIView.animate(
withDuration: 0.5, delay: 0,
withDuration: 0.5,
delay: 0,
usingSpringWithDamping: 0.55,
initialSpringVelocity: 0, options: []
initialSpringVelocity: 0,
options: [.beginFromCurrentState]
) {
self.transform = .identity
self.alpha = 1
@@ -161,51 +275,89 @@ final class RecordingLockView: UIView {
// MARK: - Lockness Update
/// Update lock progress (0 = idle, 1 = locked).
/// Telegram: arrow alpha = max(0, 1 - lockness * 1.6)
func updateLockness(_ lockness: CGFloat) {
CATransaction.begin()
CATransaction.setDisableActions(true)
arrowLayer.opacity = Float(max(0, 1 - lockness * 1.6))
CATransaction.commit()
guard visualState == .lock else { return }
// Lock icon shifts up slightly
let yOffset = -16 * lockness
CATransaction.begin()
CATransaction.setDisableActions(true)
lockIcon.transform = CATransform3DMakeTranslation(0, yOffset, 0)
CATransaction.commit()
}
currentLockness = max(0, min(1, lockness))
updatePanelGeometry()
// MARK: - Animate Lock Complete
if currentLockness > 0 {
if idleLockView.isAnimationPlaying {
idleLockView.stop()
}
idleLockView.isHidden = true
/// Shrink and dismiss the lock panel after lock is committed.
/// Telegram: panel height 7240, then slides down off-screen.
func animateLockComplete() {
UIView.animate(withDuration: 0.2) {
self.arrowLayer.opacity = 0
self.lockIcon.transform = CATransform3DMakeTranslation(0, -16, 0)
if lockingView.animation != nil {
lockingView.isHidden = false
lockingView.currentProgress = currentLockness
lockFallbackGlyphView.isHidden = true
} else {
lockingView.isHidden = true
lockFallbackGlyphView.isHidden = false
lockFallbackGlyphView.alpha = 0.85 + 0.15 * currentLockness
lockFallbackGlyphView.transform = CGAffineTransform(
scaleX: 0.94 + 0.06 * currentLockness,
y: 0.94 + 0.06 * currentLockness
)
}
} else {
lockingView.isHidden = true
lockFallbackGlyphView.transform = .identity
if idleLockView.animation != nil {
idleLockView.isHidden = false
lockFallbackGlyphView.isHidden = true
if !idleLockView.isAnimationPlaying {
idleLockView.play()
}
} else {
idleLockView.isHidden = true
lockFallbackGlyphView.isHidden = false
}
}
// Slide down and fade after 0.45s
UIView.animate(withDuration: 0.2, delay: 0.45, options: []) {
self.transform = CGAffineTransform(translationX: 0, y: 120)
} completion: { _ in
self.alpha = 0
self.removeFromSuperview()
}
lockAnimationContainer.transform = CGAffineTransform(translationX: 0, y: -11.0 * currentLockness)
lockArrowView.transform = CGAffineTransform(translationX: 0, y: -39.0 * currentLockness)
lockArrowView.alpha = max(0, 1 - currentLockness * 1.6)
}
func showStopButton(onTap: @escaping () -> Void) {
onStopTap = onTap
stopButton.isHidden = false
stopButton.transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
visualState = .stop
currentLockness = 1.0
UIView.animate(withDuration: 0.2) {
self.arrowLayer.opacity = 0
self.lockIcon.transform = CATransform3DMakeTranslation(0, -16, 0)
if idleLockView.isAnimationPlaying {
idleLockView.stop()
}
idleLockView.isHidden = true
lockingView.isHidden = true
lockFallbackGlyphView.isHidden = true
UIView.animate(withDuration: 0.2, delay: 0.02, options: [.curveEaseOut]) {
lockAnimationContainer.layer.removeAllAnimations()
lockAnimationContainer.alpha = 0
lockAnimationContainer.isHidden = true
lockAnimationContainer.isUserInteractionEnabled = false
lockArrowView.layer.removeAllAnimations()
lockArrowView.alpha = 0
lockArrowView.isHidden = true
lockArrowView.isUserInteractionEnabled = false
updatePanelGeometry()
updateAppearance()
// Hide panel glass so it doesn't stack under stopGlassView (double glass fix)
panelGlassView.alpha = 0
panelBorderView.alpha = 0
stopButton.isHidden = false
stopButton.isUserInteractionEnabled = true
stopButton.transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
stopButton.alpha = 0
stopButton.layer.zPosition = 100
bringSubviewToFront(stopButton)
UIView.animate(withDuration: 0.25, delay: 0.56, options: [.curveEaseOut]) {
self.stopButton.alpha = 1
self.stopButton.transform = .identity
}
@@ -214,6 +366,11 @@ final class RecordingLockView: UIView {
// MARK: - Dismiss
func dismiss() {
visualState = .lock
if idleLockView.isAnimationPlaying {
idleLockView.stop()
}
UIView.animate(withDuration: 0.18) {
self.alpha = 0
self.transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
@@ -222,7 +379,77 @@ final class RecordingLockView: UIView {
}
}
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
if isShowingStopButton {
return stopButton.frame.insetBy(dx: -10, dy: -10).contains(point)
}
return bounds.insetBy(dx: -6, dy: -6).contains(point)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
guard self.point(inside: point, with: event), !isHidden, alpha > 0.01 else { return nil }
if isShowingStopButton {
let stopPoint = convert(point, to: stopButton)
if stopButton.point(inside: stopPoint, with: event) {
return stopButton
}
return nil
}
return super.hitTest(point, with: event)
}
private func updateAppearance() {
let isDark = traitCollection.userInterfaceStyle == .dark
let iconColor: UIColor
let borderColor: UIColor
if usesComponentVisuals {
iconColor = isDark ? UIColor(white: 0.95, alpha: 0.92) : UIColor(white: 0.05, alpha: 0.92)
borderColor = isDark ? UIColor(white: 1.0, alpha: 0.22) : UIColor(white: 0.0, alpha: 0.14)
} else {
iconColor = UIColor(white: 0.58, alpha: 1.0)
borderColor = UIColor(white: 0.7, alpha: 0.55)
}
panelBorderView.layer.borderColor = borderColor.cgColor
panelBorderView.isHidden = usesComponentVisuals
panelBorderView.alpha = visualState == .lock ? 1.0 : 0.0
stopBorderView.layer.borderColor = borderColor.cgColor
stopGlyphView.tintColor = iconColor
lockArrowView.tintColor = iconColor
lockFallbackGlyphView.tintColor = iconColor
}
// MARK: - Stop Action
@objc private func stopTapped() {
stopButton.isUserInteractionEnabled = false
UIView.animate(withDuration: 0.2) {
self.stopButton.alpha = 0
}
onStopTap?()
}
// MARK: - Images
private static func makeStopGlyphFallback() -> UIImage? {
let size = CGSize(width: 30, height: 30)
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { ctx in
ctx.cgContext.setFillColor(UIColor.black.cgColor)
let leftBar = UIBezierPath(
roundedRect: CGRect(x: 7.5, y: 8.0, width: 5.0, height: 14.0),
cornerRadius: 1.0
)
leftBar.fill()
let rightBar = UIBezierPath(
roundedRect: CGRect(x: 17.5, y: 8.0, width: 5.0, height: 14.0),
cornerRadius: 1.0
)
rightBar.fill()
}
}
}

View File

@@ -58,13 +58,13 @@ final class RecordingMicButton: UIControl {
// MARK: - Gesture Thresholds (Telegram parity)
private let holdThreshold: TimeInterval = 0.19
private let cancelDistanceThreshold: CGFloat = -150
private let cancelHapticThreshold: CGFloat = -100
private let lockDistanceThreshold: CGFloat = -110
private let lockHapticThreshold: CGFloat = -60
private let velocityGate: CGFloat = -400
private let preHoldCancelDistance: CGFloat = 10
private let holdThreshold: TimeInterval = VoiceRecordingParityConstants.holdThreshold
private let cancelDistanceThreshold: CGFloat = VoiceRecordingParityConstants.cancelDistanceThreshold
private let cancelHapticThreshold: CGFloat = VoiceRecordingParityConstants.cancelHapticThreshold
private let lockDistanceThreshold: CGFloat = VoiceRecordingParityConstants.lockDistanceThreshold
private let lockHapticThreshold: CGFloat = VoiceRecordingParityConstants.lockHapticThreshold
private let velocityGate: CGFloat = VoiceRecordingParityConstants.velocityGate
private let preHoldCancelDistance: CGFloat = VoiceRecordingParityConstants.preHoldCancelDistance
// MARK: - Tracking State
@@ -191,28 +191,32 @@ final class RecordingMicButton: UIControl {
}
if recordingState == .recording {
// Telegram velocity gate: fast flick left/up commits immediately.
if velocityX < velocityGate {
commitCancel()
return
}
if velocityY < velocityGate {
commitLock()
return
}
// Fallback to distance thresholds on release.
if let touch {
let location = touch.location(in: window)
let distanceX = location.x - touchStartLocation.x
let distanceY = location.y - touchStartLocation.y
if distanceX < cancelDistanceThreshold {
var distanceX = min(0, location.x - touchStartLocation.x)
var distanceY = min(0, location.y - touchStartLocation.y)
// Telegram parity: keep only dominant direction on release.
(distanceX, distanceY) = VoiceRecordingParityMath.dominantAxisDistances(
distanceX: distanceX,
distanceY: distanceY
)
switch VoiceRecordingParityMath.releaseDecision(
velocityX: velocityX,
velocityY: velocityY,
distanceX: distanceX,
distanceY: distanceY
) {
case .cancel:
commitCancel()
return
}
if distanceY < lockDistanceThreshold {
case .lock:
commitLock()
return
case .finish:
break
}
}
@@ -223,8 +227,12 @@ final class RecordingMicButton: UIControl {
override func cancelTracking(with event: UIEvent?) {
if recordingState == .recording {
// Touch cancelled (e.g. system gesture) lock instead of cancel
commitLock()
// Telegram parity: delayed lock after cancelTracking.
targetLockTranslation = 0
DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { [weak self] in
guard let self, self.recordingState == .recording else { return }
self.commitLock()
}
} else {
cancelHoldTimer()
recordingState = .idle
@@ -234,7 +242,7 @@ final class RecordingMicButton: UIControl {
}
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
bounds.insetBy(dx: -10, dy: 0).contains(point)
bounds.insetBy(dx: VoiceRecordingParityConstants.micHitInsetX, dy: 0).contains(point)
}
// MARK: - State Transitions
@@ -286,11 +294,18 @@ final class RecordingMicButton: UIControl {
currentLockTranslation = 0
}
#if DEBUG
func debugSetRecordingState(_ state: VoiceRecordingState) {
recordingState = state
}
#endif
// MARK: - Display Link
private func startDisplayLink() {
guard displayLink == nil else { return }
let link = CADisplayLink(target: self, selector: #selector(displayLinkUpdate))
link.preferredFrameRateRange = CAFrameRateRange(minimum: 24, maximum: 30, preferred: 30)
link.add(to: .main, forMode: .common)
displayLink = link
}

View File

@@ -1,4 +1,5 @@
import AVFAudio
import Lottie
import QuartzCore
import UIKit
@@ -23,6 +24,11 @@ final class RecordingPreviewPanel: UIView {
case trimRight
}
private enum PlayPauseVisualState {
case play
case pause
}
weak var delegate: RecordingPreviewPanelDelegate?
// MARK: - Subviews
@@ -30,6 +36,7 @@ final class RecordingPreviewPanel: UIView {
private let glassBackground = TelegramGlassUIView(frame: .zero)
private let deleteButton = UIButton(type: .system)
private let playButton = UIButton(type: .system)
private let playPauseAnimationView = LottieAnimationView()
private let waveformContainer = UIView()
private let waveformView = WaveformView()
private let leftTrimMask = UIView()
@@ -45,6 +52,7 @@ final class RecordingPreviewPanel: UIView {
private var audioPlayer: AVAudioPlayer?
private var displayLink: CADisplayLink?
private var isPlaying = false
private var playPauseState: PlayPauseVisualState = .pause
private let fileURL: URL
private let duration: TimeInterval
private let waveformSamples: [Float]
@@ -56,6 +64,24 @@ final class RecordingPreviewPanel: UIView {
private var minTrimDuration: TimeInterval = 1
private var activePanMode: PanMode?
private var panelControlColor: UIColor {
UIColor { traits in
traits.userInterfaceStyle == .dark ? UIColor.white : UIColor.black
}
}
private var panelControlAccentColor: UIColor {
UIColor(red: 0, green: 136 / 255.0, blue: 1.0, alpha: 1.0)
}
private var panelSecondaryTextColor: UIColor {
panelControlColor.withAlphaComponent(0.7)
}
private var panelWaveformBackgroundColor: UIColor {
panelControlColor.withAlphaComponent(0.4)
}
var selectedTrimRange: ClosedRange<TimeInterval> {
trimStart...trimEnd
}
@@ -71,6 +97,7 @@ final class RecordingPreviewPanel: UIView {
clipsToBounds = true
layer.cornerRadius = 21
layer.cornerCurve = .continuous
accessibilityIdentifier = "voice.preview.panel"
setupSubviews()
}
@@ -84,20 +111,28 @@ final class RecordingPreviewPanel: UIView {
glassBackground.isUserInteractionEnabled = false
addSubview(glassBackground)
let trashConfig = UIImage.SymbolConfiguration(pointSize: 18, weight: .medium)
deleteButton.setImage(UIImage(systemName: "trash", withConfiguration: trashConfig), for: .normal)
deleteButton.setImage(VoiceRecordingAssets.image(.delete, templated: true), for: .normal)
deleteButton.tintColor = UIColor(red: 1, green: 45/255.0, blue: 85/255.0, alpha: 1)
deleteButton.addTarget(self, action: #selector(deleteTapped), for: .touchUpInside)
deleteButton.isAccessibilityElement = true
deleteButton.accessibilityLabel = "Delete recording"
deleteButton.accessibilityHint = "Deletes the current voice draft."
deleteButton.accessibilityIdentifier = "voice.preview.delete"
addSubview(deleteButton)
configurePlayButton(playing: false)
playPauseAnimationView.backgroundBehavior = .pauseAndRestore
playPauseAnimationView.contentMode = .scaleAspectFit
playPauseAnimationView.isUserInteractionEnabled = false
playButton.addSubview(playPauseAnimationView)
if let animation = LottieAnimation.named(VoiceRecordingLottieAsset.playPause.rawValue) {
playPauseAnimationView.animation = animation
}
configurePlayButton(playing: false, animated: false)
playButton.addTarget(self, action: #selector(playTapped), for: .touchUpInside)
playButton.isAccessibilityElement = true
playButton.accessibilityLabel = "Play recording"
playButton.accessibilityHint = "Plays or pauses voice preview."
playButton.accessibilityIdentifier = "voice.preview.playPause"
addSubview(playButton)
waveformContainer.clipsToBounds = true
@@ -126,29 +161,35 @@ final class RecordingPreviewPanel: UIView {
waveformContainer.isAccessibilityElement = true
waveformContainer.accessibilityLabel = "Waveform trim area"
waveformContainer.accessibilityHint = "Drag to scrub, or drag edges to trim."
waveformContainer.accessibilityIdentifier = "voice.preview.waveform"
durationLabel.font = .monospacedDigitSystemFont(ofSize: 13, weight: .semibold)
durationLabel.textColor = .white.withAlphaComponent(0.72)
durationLabel.textColor = panelSecondaryTextColor
durationLabel.textAlignment = .right
addSubview(durationLabel)
let recordMoreConfig = UIImage.SymbolConfiguration(pointSize: 17, weight: .semibold)
recordMoreButton.setImage(UIImage(systemName: "plus.circle", withConfiguration: recordMoreConfig), for: .normal)
recordMoreButton.tintColor = .white.withAlphaComponent(0.85)
recordMoreButton.setImage(VoiceRecordingAssets.image(.iconMicrophone, templated: true), for: .normal)
recordMoreButton.tintColor = panelControlColor.withAlphaComponent(0.85)
recordMoreButton.addTarget(self, action: #selector(recordMoreTapped), for: .touchUpInside)
recordMoreButton.isAccessibilityElement = true
recordMoreButton.accessibilityLabel = "Record more"
recordMoreButton.accessibilityHint = "Resume recording and append more audio."
recordMoreButton.accessibilityIdentifier = "voice.preview.recordMore"
addSubview(recordMoreButton)
let sendConfig = UIImage.SymbolConfiguration(pointSize: 20, weight: .semibold)
sendButton.setImage(UIImage(systemName: "arrow.up.circle.fill", withConfiguration: sendConfig), for: .normal)
sendButton.tintColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
sendButton.setImage(VoiceRecordingAssets.image(.send, templated: true), for: .normal)
sendButton.backgroundColor = UIColor(red: 0, green: 136/255.0, blue: 1, alpha: 1)
sendButton.layer.cornerRadius = 18
sendButton.clipsToBounds = true
sendButton.tintColor = .white
sendButton.addTarget(self, action: #selector(sendTapped), for: .touchUpInside)
sendButton.isAccessibilityElement = true
sendButton.accessibilityLabel = "Send recording"
sendButton.accessibilityHint = "Sends current trimmed voice message."
sendButton.accessibilityIdentifier = "voice.preview.send"
addSubview(sendButton)
updateThemeColors()
}
// MARK: - Layout
@@ -157,17 +198,25 @@ final class RecordingPreviewPanel: UIView {
super.layoutSubviews()
let h = bounds.height
let w = bounds.width
let trailingInset: CGFloat = 4
let controlGap: CGFloat = 4
glassBackground.frame = bounds
glassBackground.applyCornerRadius()
deleteButton.frame = CGRect(x: 4, y: (h - 40) / 2, width: 40, height: 40)
playButton.frame = CGRect(x: 44, y: (h - 30) / 2, width: 30, height: 30)
playPauseAnimationView.frame = CGRect(x: 4, y: 4, width: 22, height: 22)
sendButton.frame = CGRect(x: w - 40, y: (h - 36) / 2, width: 36, height: 36)
recordMoreButton.frame = CGRect(x: sendButton.frame.minX - 34, y: (h - 30) / 2, width: 30, height: 30)
sendButton.frame = CGRect(x: w - trailingInset - 36, y: (h - 36) / 2, width: 36, height: 36)
recordMoreButton.frame = CGRect(
x: sendButton.frame.minX - controlGap - 30,
y: (h - 30) / 2,
width: 30,
height: 30
)
let durationW: CGFloat = 44
let durationW: CGFloat = 48
durationLabel.frame = CGRect(
x: recordMoreButton.frame.minX - durationW - 6,
y: (h - 20) / 2,
@@ -180,12 +229,21 @@ final class RecordingPreviewPanel: UIView {
waveformContainer.frame = CGRect(x: waveX, y: 4, width: max(0, waveW), height: h - 8)
waveformView.frame = waveformContainer.bounds
minTrimDuration = max(1.0, 56.0 * duration / max(waveformContainer.bounds.width, 1))
minTrimDuration = VoiceRecordingParityConstants.minTrimDuration(
duration: duration,
waveformWidth: waveformContainer.bounds.width
)
trimEnd = max(trimEnd, min(duration, trimStart + minTrimDuration))
updateTrimVisuals()
updateDurationLabel(isPlaying ? remainingFromPlayer() : (trimEnd - trimStart))
}
override func traitCollectionDidChange(_ previousTraitCollection: UITraitCollection?) {
super.traitCollectionDidChange(previousTraitCollection)
guard previousTraitCollection?.userInterfaceStyle != traitCollection.userInterfaceStyle else { return }
updateThemeColors()
}
// MARK: - Play/Pause
@objc private func playTapped() {
@@ -207,14 +265,14 @@ final class RecordingPreviewPanel: UIView {
}
player.play()
isPlaying = true
configurePlayButton(playing: true)
configurePlayButton(playing: true, animated: true)
startDisplayLink()
}
private func pausePlayback() {
audioPlayer?.pause()
isPlaying = false
configurePlayButton(playing: false)
configurePlayButton(playing: false, animated: true)
stopDisplayLink()
}
@@ -227,16 +285,44 @@ final class RecordingPreviewPanel: UIView {
waveformView.progress = 0
}
isPlaying = false
configurePlayButton(playing: false)
configurePlayButton(playing: false, animated: false)
updateDurationLabel(trimEnd - trimStart)
stopDisplayLink()
}
private func configurePlayButton(playing: Bool) {
private func configurePlayButton(playing: Bool, animated: Bool) {
let targetState: PlayPauseVisualState = playing ? .pause : .play
guard playPauseState != targetState else { return }
let previous = playPauseState
playPauseState = targetState
if playPauseAnimationView.animation != nil {
playButton.setImage(nil, for: .normal)
switch (previous, targetState) {
case (.play, .pause):
if animated {
playPauseAnimationView.play(fromFrame: 0, toFrame: 41, loopMode: .playOnce)
} else {
playPauseAnimationView.currentFrame = 41
}
case (.pause, .play):
if animated {
playPauseAnimationView.play(fromFrame: 41, toFrame: 83, loopMode: .playOnce)
} else {
playPauseAnimationView.currentFrame = 0
}
case (.play, .play):
playPauseAnimationView.currentFrame = 0
case (.pause, .pause):
playPauseAnimationView.currentFrame = 41
}
return
}
let config = UIImage.SymbolConfiguration(pointSize: 18, weight: .semibold)
let name = playing ? "pause.fill" : "play.fill"
playButton.setImage(UIImage(systemName: name, withConfiguration: config), for: .normal)
playButton.tintColor = .white
let fallbackName = playing ? "pause.fill" : "play.fill"
playButton.setImage(UIImage(systemName: fallbackName, withConfiguration: config), for: .normal)
playButton.tintColor = panelSecondaryTextColor
}
// MARK: - Display Link
@@ -244,6 +330,7 @@ final class RecordingPreviewPanel: UIView {
private func startDisplayLink() {
guard displayLink == nil else { return }
let link = CADisplayLink(target: self, selector: #selector(displayLinkTick))
link.preferredFrameRateRange = CAFrameRateRange(minimum: 24, maximum: 30, preferred: 30)
link.add(to: .main, forMode: .common)
displayLink = link
}
@@ -359,6 +446,30 @@ final class RecordingPreviewPanel: UIView {
durationLabel.text = String(format: "%d:%02d", minutes, seconds)
}
private func updateThemeColors() {
durationLabel.textColor = panelSecondaryTextColor
recordMoreButton.tintColor = panelControlColor.withAlphaComponent(0.85)
waveformView.backgroundColor_ = panelWaveformBackgroundColor
waveformView.foregroundColor_ = panelControlAccentColor
waveformView.setNeedsDisplay()
applyPlayPauseTintColor(panelSecondaryTextColor)
if playPauseAnimationView.animation == nil {
playButton.tintColor = panelSecondaryTextColor
}
}
private func applyPlayPauseTintColor(_ color: UIColor) {
var r: CGFloat = 0
var g: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 0
color.getRed(&r, green: &g, blue: &b, alpha: &a)
playPauseAnimationView.setValueProvider(
ColorValueProvider(LottieColor(r: Double(r), g: Double(g), b: Double(b), a: Double(a))),
keypath: AnimationKeypath(keypath: "**.Color")
)
}
// MARK: - Actions
@objc private func deleteTapped() {

View File

@@ -0,0 +1,37 @@
import UIKit
enum VoiceRecordingAsset: String {
case iconMicrophone = "VoiceRecordingIconMicrophone"
case iconVideo = "VoiceRecordingIconVideo"
case cancelArrow = "VoiceRecordingCancelArrow"
case inputMicOverlay = "VoiceRecordingInputMicOverlay"
case recordSendIcon = "VoiceRecordingRecordSendIcon"
case videoRecordArrow = "VoiceRecordingVideoRecordArrow"
case pause = "VoiceRecordingPause"
case switchCamera = "VoiceRecordingSwitchCamera"
case viewOnce = "VoiceRecordingViewOnce"
case viewOnceEnabled = "VoiceRecordingViewOnceEnabled"
case delete = "VoiceRecordingDelete"
case send = "VoiceRecordingSend"
}
enum VoiceRecordingLottieAsset: String {
case binRed = "voice_bin_red"
case binBlue = "voice_bin_blue"
case lockWait = "voice_lock_wait"
case lock = "voice_lock"
case lockPause = "voice_lock_pause"
case playPause = "voice_anim_playpause"
case micToVideo = "voice_anim_mic_to_video"
case videoToMic = "voice_anim_video_to_mic"
}
enum VoiceRecordingAssets {
static func image(_ asset: VoiceRecordingAsset, templated: Bool = false) -> UIImage? {
guard let image = UIImage(named: asset.rawValue) else {
assertionFailure("Missing voice recording asset: \(asset.rawValue)")
return nil
}
return templated ? image.withRenderingMode(.alwaysTemplate) : image
}
}

View File

@@ -11,7 +11,7 @@ import UIKit
/// Z-order (backfront): outerCircle innerCircle micIcon
/// Inner circle: 110pt, #0088FF, opaque
/// Outer circle: 160pt, #0088FF alpha 0.2, scales with audio
/// Mic icon: white SVG, 25x34pt, top-most layer
/// Mic icon: Telegram `InputMicRecordingOverlay`, top-most layer
final class VoiceRecordingOverlay {
// Telegram exact (lines 11-13 of TGModernConversationInputMicButton.m)
@@ -27,7 +27,7 @@ final class VoiceRecordingOverlay {
private let containerView = UIView()
private let outerCircle = UIView()
private let innerCircle = UIView()
private let micIconLayer = CAShapeLayer()
private let iconView = UIImageView()
// MARK: - Display Link
@@ -36,6 +36,8 @@ final class VoiceRecordingOverlay {
private var animationStartTime: Double = 0
private var currentLevel: CGFloat = 0
private var inputLevel: CGFloat = 0
private var dragDistanceX: CGFloat = 0
private var dragDistanceY: CGFloat = 0
private var isLocked = false
private var onTapStop: (() -> Void)?
@@ -55,13 +57,12 @@ final class VoiceRecordingOverlay {
innerCircle.bounds = CGRect(origin: .zero, size: CGSize(width: innerDiameter, height: innerDiameter))
innerCircle.layer.cornerRadius = innerDiameter / 2
// Mic icon SVG
configureMicIcon()
configureIcon()
// Z-order: outer (back) inner icon (front)
containerView.addSubview(outerCircle)
containerView.addSubview(innerCircle)
containerView.layer.addSublayer(micIconLayer)
containerView.addSubview(iconView)
}
deinit {
@@ -69,18 +70,11 @@ final class VoiceRecordingOverlay {
containerView.removeFromSuperview()
}
private func configureMicIcon() {
let viewBox = CGSize(width: 17.168, height: 23.555)
let targetSize = CGSize(width: 25, height: 34)
var parser = SVGPathParser(pathData: TelegramIconPath.microphone)
let cgPath = parser.parse()
let sx = targetSize.width / viewBox.width
let sy = targetSize.height / viewBox.height
micIconLayer.path = cgPath.copy(using: [CGAffineTransform(scaleX: sx, y: sy)])
micIconLayer.fillColor = UIColor.white.cgColor
micIconLayer.bounds = CGRect(origin: .zero, size: targetSize)
private func configureIcon() {
iconView.image = VoiceRecordingAssets.image(.inputMicOverlay, templated: true)
iconView.tintColor = .white
iconView.contentMode = .center
iconView.isUserInteractionEnabled = false
}
// MARK: - Present (Telegram exact: spring damping 0.55, duration 0.5s)
@@ -90,7 +84,7 @@ final class VoiceRecordingOverlay {
// Telegram: centerOffset = (0, -1 + screenPixel)
var center = superview.convert(anchorView.center, to: window)
center.y -= 1.0
center.y += (-1.0 + (1.0 / UIScreen.main.scale))
containerView.bounds = CGRect(origin: .zero, size: CGSize(width: outerDiameter, height: outerDiameter))
containerView.center = center
@@ -100,10 +94,13 @@ final class VoiceRecordingOverlay {
let mid = CGPoint(x: outerDiameter / 2, y: outerDiameter / 2)
outerCircle.center = mid
innerCircle.center = mid
CATransaction.begin()
CATransaction.setDisableActions(true)
micIconLayer.position = mid
CATransaction.commit()
let iconSize = iconView.image?.size ?? CGSize(width: 30, height: 30)
iconView.frame = CGRect(
x: floor(mid.x - iconSize.width / 2.0),
y: floor(mid.y - iconSize.height / 2.0),
width: iconSize.width,
height: iconSize.height
)
window.addSubview(containerView)
@@ -112,37 +109,34 @@ final class VoiceRecordingOverlay {
innerCircle.alpha = 0.2
outerCircle.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
outerCircle.alpha = 0.2
micIconLayer.opacity = 0.2
iconView.alpha = 0.2
iconView.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
// Alpha fade: 0.15s (Telegram exact)
UIView.animate(withDuration: 0.15) {
self.innerCircle.alpha = 1
self.outerCircle.alpha = 1
self.iconView.alpha = 1
}
let iconFade = CABasicAnimation(keyPath: "opacity")
iconFade.fromValue = 0.2
iconFade.toValue = 1.0
iconFade.duration = 0.15
micIconLayer.opacity = 1.0
micIconLayer.add(iconFade, forKey: "fadeIn")
// Spring scale: damping 0.55, duration 0.5s (Telegram exact)
// Inner 1.0
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 0.55, initialSpringVelocity: 0, options: .beginFromCurrentState) {
self.innerCircle.transform = .identity
self.iconView.transform = .identity
}
// Outer outerMinScale (0.6875)
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 0.55, initialSpringVelocity: 0, options: .beginFromCurrentState) {
self.outerCircle.transform = CGAffineTransform(scaleX: self.outerMinScale, y: self.outerMinScale)
self.applyCurrentTransforms()
}
animationStartTime = CACurrentMediaTime()
startDisplayLink()
}
// MARK: - Lock Transition (mic stop icon, tappable)
// MARK: - Lock Transition (mic overlay send icon, tappable)
/// Transition to locked state: mic icon stop icon, overlay becomes tappable.
/// Transition to locked state: icon morphs to Telegram `RecordSendIcon`.
/// Telegram: TGModernConversationInputMicButton.m line 616-693
func transitionToLocked(onTapStop: @escaping () -> Void) {
isLocked = true
@@ -162,74 +156,40 @@ final class VoiceRecordingOverlay {
self.outerCircle.transform = CGAffineTransform(
scaleX: self.outerMinScale, y: self.outerMinScale
)
self.iconView.transform = .identity
}
// Transition icon: mic stop (two vertical bars)
transitionToStopIcon()
dragDistanceX = 0
dragDistanceY = 0
// Transition icon: mic overlay send icon.
transitionToSendIcon()
}
/// Animate mic icon stop icon (Telegram: snapshot + cross-fade, 0.3s)
private func transitionToStopIcon() {
// Create stop icon path (two parallel vertical bars, Telegram exact)
let stopPath = UIBezierPath()
let barW: CGFloat = 4
let barH: CGFloat = 16
let gap: CGFloat = 6
let totalW = barW * 2 + gap
let originX = -totalW / 2
let originY = -barH / 2
// Left bar
stopPath.append(UIBezierPath(
roundedRect: CGRect(x: originX, y: originY, width: barW, height: barH),
cornerRadius: 1
))
// Right bar
stopPath.append(UIBezierPath(
roundedRect: CGRect(x: originX + barW + gap, y: originY, width: barW, height: barH),
cornerRadius: 1
))
/// Telegram-style icon transition using snapshot shrink + new icon grow.
private func transitionToSendIcon() {
guard let sendImage = VoiceRecordingAssets.image(.recordSendIcon, templated: true) else {
return
}
let snapshot = iconView.snapshotView(afterScreenUpdates: false)
snapshot?.frame = iconView.frame
if let snapshot {
containerView.addSubview(snapshot)
}
// Animate: old icon scales down, new icon scales up
let newIconLayer = CAShapeLayer()
newIconLayer.path = stopPath.cgPath
newIconLayer.fillColor = UIColor.white.cgColor
let mid = CGPoint(x: outerDiameter / 2, y: outerDiameter / 2)
newIconLayer.position = mid
newIconLayer.transform = CATransform3DMakeScale(0.3, 0.3, 1)
newIconLayer.opacity = 0
containerView.layer.addSublayer(newIconLayer)
iconView.image = sendImage
iconView.tintColor = .white
iconView.transform = CGAffineTransform(scaleX: 0.3, y: 0.3)
iconView.alpha = 0
// Old mic icon scales to 0
let shrink = CABasicAnimation(keyPath: "transform.scale")
shrink.toValue = 0.001
shrink.duration = 0.3
shrink.fillMode = .forwards
shrink.isRemovedOnCompletion = false
micIconLayer.add(shrink, forKey: "shrink")
let fadeOut = CABasicAnimation(keyPath: "opacity")
fadeOut.toValue = 0
fadeOut.duration = 0.2
fadeOut.fillMode = .forwards
fadeOut.isRemovedOnCompletion = false
micIconLayer.add(fadeOut, forKey: "fadeOutMic")
// New stop icon grows in
let grow = CABasicAnimation(keyPath: "transform.scale")
grow.fromValue = 0.3
grow.toValue = 1.0
grow.duration = 0.3
grow.fillMode = .forwards
grow.isRemovedOnCompletion = false
newIconLayer.add(grow, forKey: "grow")
let fadeIn = CABasicAnimation(keyPath: "opacity")
fadeIn.fromValue = 0
fadeIn.toValue = 1
fadeIn.duration = 0.25
fadeIn.fillMode = .forwards
fadeIn.isRemovedOnCompletion = false
newIconLayer.add(fadeIn, forKey: "fadeInStop")
UIView.animate(withDuration: 0.3, delay: 0, options: [.curveEaseInOut]) {
self.iconView.transform = .identity
self.iconView.alpha = 1
snapshot?.transform = CGAffineTransform(scaleX: 0.001, y: 0.001)
snapshot?.alpha = 0
} completion: { _ in
snapshot?.removeFromSuperview()
}
}
// MARK: - Dismiss (Telegram exact: 0.18s, scale0.2, alpha0)
@@ -246,16 +206,10 @@ final class VoiceRecordingOverlay {
self.innerCircle.alpha = 0
self.outerCircle.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
self.outerCircle.alpha = 0
self.iconView.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
self.iconView.alpha = 0
})
let iconFade = CABasicAnimation(keyPath: "opacity")
iconFade.fromValue = 1.0
iconFade.toValue = 0.0
iconFade.duration = 0.18
iconFade.fillMode = .forwards
iconFade.isRemovedOnCompletion = false
micIconLayer.add(iconFade, forKey: "fadeOut")
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
container.removeFromSuperview()
}
@@ -271,17 +225,14 @@ final class VoiceRecordingOverlay {
self.innerCircle.transform = CGAffineTransform(translationX: -80, y: 0)
.scaledBy(x: 0.2, y: 0.2)
self.innerCircle.alpha = 0
self.outerCircle.transform = CGAffineTransform(scaleX: 0.2, y: 0.2)
self.outerCircle.transform = CGAffineTransform(translationX: -80, y: 0)
.scaledBy(x: 0.2, y: 0.2)
self.outerCircle.alpha = 0
self.iconView.transform = CGAffineTransform(translationX: -80, y: 0)
.scaledBy(x: 0.2, y: 0.2)
self.iconView.alpha = 0
})
let iconFade = CABasicAnimation(keyPath: "opacity")
iconFade.toValue = 0.0
iconFade.duration = 0.18
iconFade.fillMode = .forwards
iconFade.isRemovedOnCompletion = false
micIconLayer.add(iconFade, forKey: "fadeOut")
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
container.removeFromSuperview()
}
@@ -300,39 +251,9 @@ final class VoiceRecordingOverlay {
/// Telegram exact from TGModernConversationInputMicButton.m
func applyDragTransform(distanceX: CGFloat, distanceY: CGFloat) {
guard CACurrentMediaTime() > animationStartTime else { return }
// Telegram cancel-transform threshold: 8pt
guard abs(distanceX) > 8 || abs(distanceY) > 8 else { return }
// Telegram line 763: normalize to 0..1 over 300pt range
let valueX = max(0, min(1, abs(distanceX) / 300))
// Telegram line 768: inner scale squeezes from 1.0 0.4
let innerScale = max(0.4, min(1.0, 1.0 - valueX))
// Vertical translation (follows finger)
let translation = CGAffineTransform(translationX: 0, y: distanceY)
// Telegram line 922-924: outer circle = translation + audio scale
let outerScale = outerMinScale + currentLevel * (1.0 - outerMinScale)
outerCircle.transform = translation.scaledBy(x: outerScale, y: outerScale)
// Telegram line 931-932: inner circle = translation + cancel scale + horizontal offset
let innerTransform = translation
.scaledBy(x: innerScale, y: innerScale)
.translatedBy(x: distanceX, y: 0)
innerCircle.transform = innerTransform
// Icon follows inner circle
CATransaction.begin()
CATransaction.setDisableActions(true)
let mid = CGPoint(x: outerDiameter / 2, y: outerDiameter / 2)
micIconLayer.position = CGPoint(
x: mid.x + distanceX * innerScale,
y: mid.y + distanceY
)
micIconLayer.transform = CATransform3DMakeScale(innerScale, innerScale, 1)
CATransaction.commit()
dragDistanceX = min(0, distanceX)
dragDistanceY = min(0, distanceY)
applyCurrentTransforms()
}
// MARK: - Display Link (Telegram: displayLinkEvent, 0.8/0.2 smoothing)
@@ -341,6 +262,7 @@ final class VoiceRecordingOverlay {
guard displayLink == nil else { return }
let target = DisplayLinkTarget { [weak self] in self?.tick() }
let link = CADisplayLink(target: target, selector: #selector(DisplayLinkTarget.tick))
link.preferredFrameRateRange = CAFrameRateRange(minimum: 24, maximum: 30, preferred: 30)
link.add(to: .main, forMode: .common)
displayLink = link
displayLinkTarget = target
@@ -359,9 +281,27 @@ final class VoiceRecordingOverlay {
// Telegram exact: TGModernConversationInputMicButton.m line 916 (0.9/0.1)
currentLevel = currentLevel * 0.9 + inputLevel * 0.1
// Telegram exact: outerCircleMinScale + currentLevel * (1.0 - outerCircleMinScale)
let scale = outerMinScale + currentLevel * (1.0 - outerMinScale)
outerCircle.transform = CGAffineTransform(scaleX: scale, y: scale)
applyCurrentTransforms()
}
private func applyCurrentTransforms() {
let valueX = max(0, min(1, abs(dragDistanceX) / 300))
let innerScale = max(0.4, min(1.0, 1.0 - valueX))
let translatedX = dragDistanceX * innerScale
let translation = CGAffineTransform(translationX: translatedX, y: dragDistanceY)
let outerScale = outerMinScale + currentLevel * (1.0 - outerMinScale)
outerCircle.transform = translation.scaledBy(x: outerScale, y: outerScale)
let innerTransform = translation.scaledBy(x: innerScale, y: innerScale)
innerCircle.transform = innerTransform
let mid = CGPoint(x: outerDiameter / 2, y: outerDiameter / 2)
iconView.center = CGPoint(
x: mid.x + translatedX,
y: mid.y + dragDistanceY
)
iconView.transform = CGAffineTransform(scaleX: innerScale, y: innerScale)
}
}

View File

@@ -1,5 +1,6 @@
import QuartzCore
import UIKit
import Lottie
// MARK: - VoiceRecordingPanelDelegate
@@ -46,11 +47,34 @@ final class VoiceRecordingPanel: UIView {
// MARK: - Telegram-exact layout constants
private let dotX: CGFloat = 5 // Telegram: indicator X=5
private let timerX: CGFloat = 40 // Telegram: timer X=40
private let dotX: CGFloat = 14 // Left margin for red dot
private let timerX: CGFloat = 38 // Timer X position
private let dotSize: CGFloat = 10
private let timerMinWidth: CGFloat = 72
private let arrowLabelGap: CGFloat = 6
private var panelControlColor: UIColor {
UIColor { traits in
traits.userInterfaceStyle == .dark ? UIColor.white : UIColor.black
}
}
private var panelControlAccentColor: UIColor {
UIColor { traits in
traits.userInterfaceStyle == .dark
? UIColor.white
: UIColor(red: 0, green: 136 / 255.0, blue: 1.0, alpha: 1.0)
}
}
private var recordingDotColor: UIColor {
UIColor { traits in
traits.userInterfaceStyle == .dark
? UIColor(red: 0xEB / 255.0, green: 0x55 / 255.0, blue: 0x45 / 255.0, alpha: 1.0)
: UIColor(red: 0xED / 255.0, green: 0x25 / 255.0, blue: 0x21 / 255.0, alpha: 1.0)
}
}
// MARK: - Init
override init(frame: CGRect) {
@@ -72,8 +96,8 @@ final class VoiceRecordingPanel: UIView {
glassBackground.isUserInteractionEnabled = false
addSubview(glassBackground)
// Red dot: 10×10, Telegram #FF2D55
redDot.backgroundColor = UIColor(red: 1.0, green: 45/255.0, blue: 85/255.0, alpha: 1)
// Red dot: 10×10, theme-aware Telegram recording color.
redDot.backgroundColor = recordingDotColor
redDot.layer.cornerRadius = dotSize / 2
addSubview(redDot)
@@ -84,34 +108,42 @@ final class VoiceRecordingPanel: UIView {
} else {
timerLabel.font = .monospacedDigitSystemFont(ofSize: 15, weight: .regular)
}
timerLabel.textColor = .white
timerLabel.textColor = panelControlColor
timerLabel.text = "0:00"
timerLabel.lineBreakMode = .byClipping
timerLabel.isAccessibilityElement = true
timerLabel.accessibilityIdentifier = "voice.recording.timer"
addSubview(timerLabel)
// Arrow: exact Telegram SVG "AudioRecordingCancelArrow" (arrowleft.svg, 9×18pt)
arrowIcon.image = Self.makeCancelArrowImage()
// Arrow: Telegram asset "AudioRecordingCancelArrow" (arrowleft.svg, 9×18pt)
arrowIcon.image = VoiceRecordingAssets.image(.cancelArrow, templated: true)
arrowIcon.tintColor = panelControlColor
arrowIcon.contentMode = .center
cancelContainer.addSubview(arrowIcon)
// "Slide to cancel": 14pt regular, panelControlColor = #FFFFFF (dark theme)
slideLabel.font = .systemFont(ofSize: 14, weight: .regular)
slideLabel.textColor = .white
slideLabel.textColor = panelControlColor
slideLabel.text = "Slide to cancel"
cancelContainer.addSubview(slideLabel)
cancelContainer.isAccessibilityElement = true
cancelContainer.accessibilityLabel = "Slide left to cancel recording"
cancelContainer.accessibilityIdentifier = "voice.recording.slideToCancel"
addSubview(cancelContainer)
// Cancel button (for locked state): 17pt
cancelButton.setTitle("Cancel", for: .normal)
cancelButton.setTitleColor(.white, for: .normal)
cancelButton.setTitleColor(panelControlAccentColor, for: .normal)
cancelButton.titleLabel?.font = .systemFont(ofSize: 17, weight: .regular)
cancelButton.addTarget(self, action: #selector(cancelTapped), for: .touchUpInside)
cancelButton.isAccessibilityElement = true
cancelButton.accessibilityLabel = "Cancel recording"
cancelButton.accessibilityHint = "Discards the current recording."
cancelButton.accessibilityIdentifier = "voice.recording.cancel"
cancelButton.alpha = 0
addSubview(cancelButton)
updateThemeColors()
}
// MARK: - Layout
@@ -137,29 +169,44 @@ final class VoiceRecordingPanel: UIView {
)
// Timer: at X=34
timerLabel.frame = CGRect(x: timerX, y: timerY, width: timerSize.width + 4, height: timerSize.height)
let timerWidth = max(timerMinWidth, timerSize.width + 4)
timerLabel.frame = CGRect(x: timerX, y: timerY, width: timerWidth, height: timerSize.height)
// Cancel indicator: centered in full panel width
// Telegram: frame.width = arrowSize.width + 12.0 + labelLayout.size.width
// Cancel indicator: centered in available space after timer
let labelSize = slideLabel.sizeThatFits(CGSize(width: 200, height: h))
let arrowW: CGFloat = 9 // Telegram SVG: 9pt wide
let arrowH: CGFloat = 18 // Telegram SVG: 18pt tall
let totalCancelW = arrowW + 12 + labelSize.width // Telegram: arrowWidth + 12 + labelWidth
let cancelX = floor((w - totalCancelW) / 2)
let totalCancelW = arrowW + 12 + labelSize.width
let timerTrailingX = timerX + timerWidth
let availableWidth = w - timerTrailingX
let cancelX = timerTrailingX + floor((availableWidth - totalCancelW) / 2)
cancelContainer.frame = CGRect(x: cancelX, y: 0, width: totalCancelW, height: h)
arrowIcon.frame = CGRect(x: 0, y: floor((h - arrowH) / 2), width: arrowW, height: arrowH)
// Telegram: label X = arrowSize.width + 6.0
slideLabel.frame = CGRect(
x: arrowW + 6,
x: arrowW + arrowLabelGap,
y: 1 + floor((h - labelSize.height) / 2),
width: labelSize.width,
height: labelSize.height
)
// Cancel button: centered
// Cancel button: centered in available space after timer
cancelButton.sizeToFit()
cancelButton.center = CGPoint(x: w / 2, y: h / 2)
cancelButton.center = CGPoint(x: timerTrailingX + availableWidth / 2, y: h / 2)
}
override func traitCollectionDidChange(_ previousTraitCollection: UITraitCollection?) {
super.traitCollectionDidChange(previousTraitCollection)
guard previousTraitCollection?.userInterfaceStyle != traitCollection.userInterfaceStyle else { return }
updateThemeColors()
}
private func updateThemeColors() {
redDot.backgroundColor = recordingDotColor
timerLabel.textColor = panelControlColor
arrowIcon.tintColor = panelControlColor
slideLabel.textColor = panelControlColor
cancelButton.setTitleColor(panelControlAccentColor, for: .normal)
}
// MARK: - Public API
@@ -170,7 +217,29 @@ final class VoiceRecordingPanel: UIView {
let minutes = totalSeconds / 60
let seconds = totalSeconds % 60
let centiseconds = Int(duration * 100) % 100
timerLabel.text = String(format: "%d:%02d,%02d", minutes, seconds, centiseconds)
let text = String(format: "%d:%02d,%02d", minutes, seconds, centiseconds)
guard timerLabel.text != text else { return }
timerLabel.text = text
// Keep label width live-updated so long durations never render with ellipsis.
let h = bounds.height
guard h > 0 else {
setNeedsLayout()
return
}
let timerSize = timerLabel.sizeThatFits(CGSize(width: .greatestFiniteMagnitude, height: h))
let timerY = floor((h - timerSize.height) / 2) + 1
let timerWidth = max(timerMinWidth, ceil(timerSize.width + 4))
CATransaction.begin()
CATransaction.setDisableActions(true)
timerLabel.frame = CGRect(x: timerX, y: timerY, width: timerWidth, height: timerSize.height)
redDot.frame = CGRect(
x: dotX,
y: timerY + floor((timerSize.height - dotSize) / 2),
width: dotSize,
height: dotSize
)
CATransaction.commit()
}
/// Updates cancel indicator position based on horizontal drag.
@@ -180,14 +249,16 @@ final class VoiceRecordingPanel: UIView {
// Only apply transform when actually dragging (threshold 8pt)
let drag = abs(translation)
guard drag > 8 else { return }
guard VoiceRecordingParityMath.shouldApplyCancelTransform(translation) else {
cancelContainer.transform = .identity
cancelContainer.alpha = 1
return
}
let offset = drag - 8
let offset = drag - VoiceRecordingParityConstants.cancelTransformThreshold
cancelContainer.transform = CGAffineTransform(translationX: -offset * 0.5, y: 0)
// Fade: starts at 60% of cancel threshold (90pt drag), fully hidden at threshold
let fadeProgress = max(0, min(1, (drag - 90) / 60))
cancelContainer.alpha = 1 - fadeProgress
let currentMinX = cancelContainer.frame.minX + cancelContainer.transform.tx
cancelContainer.alpha = max(0, min(1, (currentMinX - 100) / 10))
}
/// Animate panel in. Called when recording begins.
@@ -208,7 +279,6 @@ final class VoiceRecordingPanel: UIView {
// Timer: slide in from left, spring 0.5s
timerLabel.alpha = 0
let timerStartX = timerLabel.frame.origin.x - 30
timerLabel.transform = CGAffineTransform(translationX: -30, y: 0)
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 0.55, initialSpringVelocity: 0, options: []) {
self.timerLabel.alpha = 1
@@ -252,35 +322,68 @@ final class VoiceRecordingPanel: UIView {
func animateOutCancel(completion: (() -> Void)? = nil) {
stopDotPulsing()
stopCancelJiggle()
isUserInteractionEnabled = false
// Red dot: scale pulse 11.30, color redgray
UIView.animate(withDuration: 0.15, animations: {
self.redDot.transform = CGAffineTransform(scaleX: 1.3, y: 1.3)
self.redDot.backgroundColor = .gray
}, completion: { _ in
var didFinishBin = false
var didFinishRest = false
let completeIfReady: () -> Void = { [weak self] in
guard let self else { return }
if didFinishBin && didFinishRest {
self.removeFromSuperview()
completion?()
}
}
// Telegram parity: on cancel, panel content disappears quickly while
// bin animation keeps playing near the leading edge.
let indicatorFrame = CGRect(x: 0, y: floor((bounds.height - 40) / 2.0), width: 40, height: 40)
let binHostView = superview ?? self
let binFrameInHost = convert(indicatorFrame, to: binHostView)
if let animation = LottieAnimation.named(VoiceRecordingLottieAsset.binRed.rawValue) {
let binView = LottieAnimationView(animation: animation)
binView.frame = binFrameInHost
binView.backgroundBehavior = .pauseAndRestore
binView.contentMode = .scaleAspectFit
binView.loopMode = .playOnce
binHostView.addSubview(binView)
redDot.alpha = 0
binView.play { _ in
binView.removeFromSuperview()
didFinishBin = true
completeIfReady()
}
} else {
didFinishBin = true
UIView.animate(withDuration: 0.15, animations: {
self.redDot.transform = CGAffineTransform(scaleX: 0.01, y: 0.01)
self.redDot.alpha = 0
self.redDot.transform = CGAffineTransform(scaleX: 1.3, y: 1.3)
self.redDot.backgroundColor = .gray
}, completion: { _ in
UIView.animate(withDuration: 0.15, animations: {
self.redDot.transform = CGAffineTransform(scaleX: 0.01, y: 0.01)
self.redDot.alpha = 0
})
})
})
}
// Timer: scale to 0, slide left
UIView.animate(withDuration: 0.25) {
// Timer: scale to 0, slide left.
UIView.animate(withDuration: 0.2) {
self.timerLabel.transform = CGAffineTransform(translationX: -30, y: 0)
.scaledBy(x: 0.001, y: 0.001)
self.timerLabel.alpha = 0
}
// Cancel indicator: fade out
UIView.animate(withDuration: 0.25) {
// Hide panel visuals quickly so only trash animation remains visible.
UIView.animate(withDuration: 0.12) {
self.glassBackground.alpha = 0
self.redDot.alpha = 0
self.cancelContainer.alpha = 0
self.cancelButton.alpha = 0
}
// Remove after animation completes
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { [weak self] in
self?.removeFromSuperview()
completion?()
DispatchQueue.main.asyncAfter(deadline: .now() + 0.16) {
didFinishRest = true
completeIfReady()
}
}
@@ -344,49 +447,4 @@ final class VoiceRecordingPanel: UIView {
@objc private func cancelTapped() {
delegate?.recordingPanelDidTapCancel(self)
}
// MARK: - Telegram Cancel Arrow (exact SVG from arrowleft.svg, 9×18pt)
private static func makeCancelArrowImage() -> UIImage {
let size = CGSize(width: 9, height: 18)
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { ctx in
let path = UIBezierPath()
// Exact path from Telegram's arrowleft.svg
path.move(to: CGPoint(x: 8.438, y: 0.500))
path.addCurve(
to: CGPoint(x: 8.500, y: 1.438),
controlPoint1: CGPoint(x: 8.714, y: 0.741),
controlPoint2: CGPoint(x: 8.742, y: 1.161)
)
path.addLine(to: CGPoint(x: 1.884, y: 9.000))
path.addLine(to: CGPoint(x: 8.500, y: 16.562))
path.addCurve(
to: CGPoint(x: 8.438, y: 17.500),
controlPoint1: CGPoint(x: 8.742, y: 16.839),
controlPoint2: CGPoint(x: 8.714, y: 17.259)
)
path.addCurve(
to: CGPoint(x: 7.500, y: 17.438),
controlPoint1: CGPoint(x: 8.161, y: 17.742),
controlPoint2: CGPoint(x: 7.741, y: 17.714)
)
path.addLine(to: CGPoint(x: 0.499, y: 9.438))
path.addCurve(
to: CGPoint(x: 0.499, y: 8.562),
controlPoint1: CGPoint(x: 0.280, y: 9.187),
controlPoint2: CGPoint(x: 0.280, y: 8.813)
)
path.addLine(to: CGPoint(x: 7.500, y: 0.562))
path.addCurve(
to: CGPoint(x: 8.438, y: 0.500),
controlPoint1: CGPoint(x: 7.741, y: 0.286),
controlPoint2: CGPoint(x: 8.161, y: 0.258)
)
path.close()
UIColor.white.setFill()
path.fill()
}
}
}

View File

@@ -0,0 +1,92 @@
import CoreGraphics
import Foundation
enum VoiceRecordingReleaseDecision: String {
case finish
case cancel
case lock
}
enum VoiceRecordingParityConstants {
static let holdThreshold: TimeInterval = 0.19
static let cancelDistanceThreshold: CGFloat = -150
static let cancelHapticThreshold: CGFloat = -100
static let lockDistanceThreshold: CGFloat = -110
static let lockHapticThreshold: CGFloat = -60
static let velocityGate: CGFloat = -400
static let preHoldCancelDistance: CGFloat = 10
static let micHitInsetX: CGFloat = -10
static let locknessDivisor: CGFloat = 105
static let dragNormalizeDivisor: CGFloat = 300
static let cancelTransformThreshold: CGFloat = 8
static let sendAccessibilityHitSize: CGFloat = 120
static let minVoiceDuration: TimeInterval = 0.5
static let minFreeDiskBytes: Int64 = 8 * 1024 * 1024
static func minTrimDuration(duration: TimeInterval, waveformWidth: CGFloat) -> TimeInterval {
max(1.0, 56.0 * duration / max(waveformWidth, 1))
}
}
enum VoiceRecordingParityMath {
static func dominantAxisDistances(distanceX: CGFloat, distanceY: CGFloat) -> (CGFloat, CGFloat) {
if abs(distanceX) > abs(distanceY) {
return (distanceX, 0)
} else {
return (0, distanceY)
}
}
static func releaseDecision(
velocityX: CGFloat,
velocityY: CGFloat,
distanceX: CGFloat,
distanceY: CGFloat
) -> VoiceRecordingReleaseDecision {
if velocityX < VoiceRecordingParityConstants.velocityGate ||
distanceX < VoiceRecordingParityConstants.cancelHapticThreshold {
return .cancel
}
if velocityY < VoiceRecordingParityConstants.velocityGate ||
distanceY < VoiceRecordingParityConstants.lockHapticThreshold {
return .lock
}
return .finish
}
static func lockness(distanceY: CGFloat) -> CGFloat {
min(1, max(0, abs(distanceY) / VoiceRecordingParityConstants.locknessDivisor))
}
static func normalizedDrag(distance: CGFloat) -> CGFloat {
max(0, min(1, abs(distance) / VoiceRecordingParityConstants.dragNormalizeDivisor))
}
static func shouldApplyCancelTransform(_ translation: CGFloat) -> Bool {
abs(translation) > VoiceRecordingParityConstants.cancelTransformThreshold
}
static func shouldDiscard(duration: TimeInterval) -> Bool {
duration < VoiceRecordingParityConstants.minVoiceDuration
}
static func clampTrimRange(_ trimRange: ClosedRange<TimeInterval>, duration: TimeInterval) -> ClosedRange<TimeInterval> {
let lower = max(0, min(trimRange.lowerBound, duration))
let upper = max(lower, min(trimRange.upperBound, duration))
return lower...upper
}
static func waveformSliceRange(
sampleCount: Int,
totalDuration: TimeInterval,
trimRange: ClosedRange<TimeInterval>
) -> Range<Int>? {
guard sampleCount > 0, totalDuration > 0 else { return nil }
let startIndex = max(0, Int(floor((trimRange.lowerBound / totalDuration) * Double(sampleCount))))
let endIndex = min(sampleCount, Int(ceil((trimRange.upperBound / totalDuration) * Double(sampleCount))))
guard startIndex < endIndex else { return nil }
return startIndex..<endIndex
}
}

View File

@@ -0,0 +1,215 @@
import SwiftUI
import UIKit
private final class VoiceRecordingFixtureComposerDelegate: NSObject, ComposerViewDelegate {
func composerDidTapSend(_ composer: ComposerView) {}
func composerDidTapAttach(_ composer: ComposerView) {}
func composerTextDidChange(_ composer: ComposerView, text: String) {}
func composerFocusDidChange(_ composer: ComposerView, isFocused: Bool) {}
func composerHeightDidChange(_ composer: ComposerView, height: CGFloat) {}
func composerDidCancelReply(_ composer: ComposerView) {}
func composerUserDidType(_ composer: ComposerView) {}
func composerKeyboardHeightDidChange(_ composer: ComposerView, height: CGFloat) {}
func composerDidStartRecording(_ composer: ComposerView) {}
func composerDidFinishRecording(_ composer: ComposerView, sendImmediately: Bool) {}
func composerDidCancelRecording(_ composer: ComposerView) {}
func composerDidLockRecording(_ composer: ComposerView) {}
}
private final class VoiceRecordingUITestFixtureContainer: UIView {
enum Mode: String {
case idle
case armed
case recordingUnlocked
case locking30
case locking70
case cancelDrag
case recordingLocked
case stop
case waitingPreview
case preview
case previewTrimmed
}
private let mode: Mode
private let composer = ComposerView(frame: .zero)
private let composerDelegate = VoiceRecordingFixtureComposerDelegate()
private let stopTapMarker = UIView()
private var previewPanel: RecordingPreviewPanel?
private var recordingPanel: VoiceRecordingPanel?
private var lockView: RecordingLockView?
private var didSetup = false
init(mode: Mode) {
self.mode = mode
super.init(frame: .zero)
backgroundColor = .clear
composer.delegate = composerDelegate
composer.accessibilityIdentifier = "voice.fixture.composer"
addSubview(composer)
stopTapMarker.isHidden = true
stopTapMarker.isAccessibilityElement = true
stopTapMarker.accessibilityIdentifier = "voice.fixture.stopTapped"
addSubview(stopTapMarker)
}
@available(*, unavailable)
required init?(coder: NSCoder) { fatalError() }
override func layoutSubviews() {
super.layoutSubviews()
let composerHeight: CGFloat = 64
composer.frame = CGRect(
x: 0,
y: bounds.height - composerHeight,
width: bounds.width,
height: composerHeight
)
stopTapMarker.frame = CGRect(x: 0, y: 0, width: 1, height: 1)
guard !didSetup else { return }
didSetup = true
configureMode()
}
private func configureMode() {
switch mode {
case .idle:
break
case .armed:
break
case .recordingUnlocked:
showRecordingPanel(locked: false)
case .locking30:
showRecordingPanel(locked: false)
showLockView(lockness: 0.3, showStop: false)
case .locking70:
showRecordingPanel(locked: false)
showLockView(lockness: 0.7, showStop: false)
case .cancelDrag:
showRecordingPanel(locked: false, cancelTranslation: -120)
case .recordingLocked:
showRecordingPanel(locked: true)
case .stop:
showRecordingPanel(locked: false)
showLockView(lockness: 1.0, showStop: true)
case .waitingPreview:
showRecordingPanel(locked: true)
case .preview:
showPreviewPanel()
case .previewTrimmed:
showPreviewPanel()
}
}
private func showRecordingPanel(locked: Bool, cancelTranslation: CGFloat = 0) {
let panelWidth = max(220, bounds.width - 16 - 42 - 6 - 16)
let panel = VoiceRecordingPanel(
frame: CGRect(x: 16, y: bounds.height - 58, width: panelWidth, height: 42)
)
panel.updateDuration(95.78)
panel.accessibilityIdentifier = "voice.fixture.recordingPanel"
addSubview(panel)
recordingPanel = panel
if VoiceRecordingParityMath.shouldApplyCancelTransform(cancelTranslation) {
panel.updateCancelTranslation(cancelTranslation)
}
if locked {
panel.showCancelButton()
showLockView(lockness: 1.0, showStop: true)
#if DEBUG
DispatchQueue.main.async { [weak self] in
self?.composer.debugSetRecordingStopAccessibilityAreaEnabled(true)
}
#endif
}
}
private func showLockView(lockness: CGFloat, showStop: Bool) {
let lock = RecordingLockView(frame: CGRect(x: bounds.width - 58, y: bounds.height - 190, width: 40, height: 72))
addSubview(lock)
lock.updateLockness(lockness)
if showStop {
lock.showStopButton(onTap: { [weak self] in
self?.stopTapMarker.isHidden = false
})
}
lock.accessibilityIdentifier = "voice.fixture.lockView"
lockView = lock
}
private func showPreviewPanel() {
let tmpURL = FileManager.default.temporaryDirectory
.appendingPathComponent("voice_recording_fixture.m4a")
if !FileManager.default.fileExists(atPath: tmpURL.path) {
FileManager.default.createFile(atPath: tmpURL.path, contents: Data(), attributes: nil)
}
let waveform = (0..<180).map { idx in
let t = Double(idx) / 12.0
return Float(abs(sin(t)))
}
#if DEBUG
composer.debugShowPreviewReplacingInputRow(
fileURL: tmpURL,
duration: 12.0,
waveform: waveform
)
#else
let panelWidth = bounds.width - 32
let panel = RecordingPreviewPanel(
frame: CGRect(x: 16, y: bounds.height - 58, width: panelWidth, height: 42),
fileURL: tmpURL,
duration: 12.0,
waveform: waveform
)
panel.accessibilityIdentifier = "voice.fixture.previewPanel"
addSubview(panel)
previewPanel = panel
#endif
}
}
private struct VoiceRecordingUITestFixtureRepresentable: UIViewRepresentable {
let mode: VoiceRecordingUITestFixtureContainer.Mode
func makeUIView(context: Context) -> VoiceRecordingUITestFixtureContainer {
let view = VoiceRecordingUITestFixtureContainer(mode: mode)
view.accessibilityIdentifier = "voice.fixture.root"
return view
}
func updateUIView(_ uiView: VoiceRecordingUITestFixtureContainer, context: Context) {}
}
struct VoiceRecordingUITestFixtureView: View {
private let mode: VoiceRecordingUITestFixtureContainer.Mode
init(modeRawValue: String) {
self.mode = VoiceRecordingUITestFixtureContainer.Mode(rawValue: modeRawValue) ?? .idle
}
var body: some View {
VStack(spacing: 12) {
Text("Voice Recording Fixture")
.font(.headline)
Text("Mode: \(mode.rawValue)")
.font(.footnote)
.foregroundStyle(.secondary)
VoiceRecordingUITestFixtureRepresentable(mode: mode)
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
.padding(.top, 24)
.padding(.horizontal, 12)
.background(Color(.systemBackground))
.accessibilityIdentifier("voice.fixture.screen")
}
}

View File

@@ -647,7 +647,9 @@ private struct ChatListDialogContent: View {
@State private var typingDialogs: [String: Set<String>] = [:]
var body: some View {
#if DEBUG
let _ = PerformanceLogger.shared.track("chatList.bodyEval")
#endif
// CRITICAL: Read DialogRepository.dialogs directly to establish @Observable tracking.
// Without this, ChatListDialogContent only observes viewModel (ObservableObject)
// which never publishes objectWillChange for dialog mutations.
@@ -725,78 +727,6 @@ private struct ChatListDialogContent: View {
}
}
// MARK: - Sync-Aware Chat Row (observation-isolated)
/// Reads `SessionManager.syncBatchInProgress` (@Observable) in its own
/// observation scope. Without this wrapper, every sync state change would
/// invalidate the entire `ChatListDialogContent.body` and rebuild all rows.
/// Reads `SessionManager.syncBatchInProgress` (@Observable) in its own
/// observation scope. Without this wrapper, every sync state change would
/// invalidate the entire `ChatListDialogContent.body` and rebuild all rows.
///
/// **Performance:** `viewModel` and `navigationState` are stored as plain `let`
/// (not @ObservedObject). Class references compare by pointer in SwiftUI's
/// memcmp-based view diffing stable pointers mean unchanged rows are NOT
/// re-evaluated when the parent body rebuilds. Closures are defined inline
/// (not passed from parent) to avoid non-diffable closure props that force
/// every row dirty on every parent re-render.
struct SyncAwareChatRow: View {
let dialog: Dialog
let isTyping: Bool
let typingSenderNames: [String]
let isFirst: Bool
let viewModel: ChatListViewModel
let navigationState: ChatListNavigationState
var body: some View {
let isSyncing = SessionManager.shared.syncBatchInProgress
Button {
navigationState.path.append(ChatRoute(dialog: dialog))
} label: {
ChatRowView(
dialog: dialog,
isSyncing: isSyncing,
isTyping: isTyping,
typingSenderNames: typingSenderNames
)
}
.buttonStyle(.plain)
.listRowInsets(EdgeInsets())
.listRowSeparator(isFirst ? .hidden : .visible, edges: .top)
.listRowSeparator(.visible, edges: .bottom)
.listRowSeparatorTint(RosettaColors.Adaptive.divider)
.alignmentGuide(.listRowSeparatorLeading) { _ in 82 }
.swipeActions(edge: .trailing, allowsFullSwipe: false) {
Button(role: .destructive) {
withAnimation { viewModel.deleteDialog(dialog) }
} label: {
Label("Delete", systemImage: "trash")
}
if !dialog.isSavedMessages {
Button {
withAnimation { viewModel.toggleMute(dialog) }
} label: {
Label(
dialog.isMuted ? "Unmute" : "Mute",
systemImage: dialog.isMuted ? "bell" : "bell.slash"
)
}
.tint(dialog.isMuted ? .green : .indigo)
}
}
.swipeActions(edge: .leading, allowsFullSwipe: true) {
Button {
withAnimation { viewModel.togglePin(dialog) }
} label: {
Label(dialog.isPinned ? "Unpin" : "Pin", systemImage: dialog.isPinned ? "pin.slash" : "pin")
}
.tint(.orange)
}
}
}
// MARK: - Device Approval Banner
/// Desktop parity: clean banner with "New login from {device} ({os})" and Accept/Decline.

View File

@@ -1,459 +0,0 @@
import SwiftUI
import Combine
// MARK: - ChatRowView
/// Chat row matching Figma "Row - Chats" component spec (node 3994:38947):
///
/// Row: height 78, pl-10, pr-16, items-center
/// Avatar: 62px circle, pr-10
/// Contents: flex-col, h-full, items-start, justify-center, pb-px
/// Title and Trailing Accessories: flex-1, gap-6, items-center, w-full
/// Title and Detail: flex-1, h-63, items-start, overflow-clip
/// Title: gap-4, items-center SF Pro Medium 17/22, tracking -0.43
/// Message: h-41 SF Pro Regular 15/20, tracking -0.23, secondary
/// Accessories: h-full, items-center, justify-end
/// Contents-Trailing: flex-col, h-full, items-end, justify-between, pt-8
/// Time: SF Pro Regular 14/20, tracking -0.23, secondary
/// Other: flex-1, items-end, justify-end, pb-14
/// Badge: bg-#008BFF, min-w-20, max-w-37, px-4, rounded-full
/// SF Pro Regular 15/20, black, tracking -0.23
struct ChatRowView: View {
let dialog: Dialog
/// Desktop parity: suppress unread badge during sync.
var isSyncing: Bool = false
/// Desktop parity: show "typing..." instead of last message.
var isTyping: Bool = false
/// Group typing: sender names for "Name typing..." / "Name and N typing..." display.
var typingSenderNames: [String] = []
var displayTitle: String {
if dialog.isSavedMessages { return "Saved Messages" }
if dialog.isGroup {
let meta = GroupRepository.shared.groupMetadata(
account: dialog.account,
groupDialogKey: dialog.opponentKey
)
if let title = meta?.title, !title.isEmpty { return title }
}
if !dialog.opponentTitle.isEmpty { return dialog.opponentTitle }
if !dialog.opponentUsername.isEmpty { return "@\(dialog.opponentUsername)" }
return String(dialog.opponentKey.prefix(12))
}
var body: some View {
let _ = PerformanceLogger.shared.track("chatRow.bodyEval")
HStack(spacing: 0) {
avatarSection
.padding(.trailing, 10)
contentSection
}
.padding(.leading, 10)
.padding(.trailing, 16)
.frame(height: 78)
.contentShape(Rectangle())
}
}
// MARK: - Avatar
/// Observation-isolated: reads `AvatarRepository.avatarVersion` in its own
/// scope so only the avatar re-renders when opponent avatar changes not the
/// entire ChatRowView (title, message preview, badge, etc.).
private struct ChatRowAvatar: View {
let dialog: Dialog
var body: some View {
if dialog.isGroup {
groupAvatarView
} else {
directAvatarView
}
}
private var directAvatarView: some View {
// Establish @Observable tracking re-renders this view on avatar save/remove.
let _ = AvatarRepository.shared.avatarVersion
return AvatarView(
initials: dialog.initials,
colorIndex: dialog.avatarColorIndex,
size: 62,
isOnline: dialog.isOnline,
isSavedMessages: dialog.isSavedMessages,
image: dialog.isSavedMessages ? nil : AvatarRepository.shared.loadAvatar(publicKey: dialog.opponentKey)
)
}
private var groupAvatarView: some View {
let _ = AvatarRepository.shared.avatarVersion
let groupImage = AvatarRepository.shared.loadAvatar(publicKey: dialog.opponentKey)
return ZStack {
if let image = groupImage {
Image(uiImage: image)
.resizable()
.scaledToFill()
.frame(width: 62, height: 62)
.clipShape(Circle())
} else {
Circle()
.fill(RosettaColors.avatarColors[dialog.avatarColorIndex % RosettaColors.avatarColors.count].tint)
.frame(width: 62, height: 62)
Image(systemName: "person.2.fill")
.font(.system(size: 24, weight: .medium))
.foregroundStyle(.white.opacity(0.9))
}
}
}
}
private extension ChatRowView {
var avatarSection: some View {
ChatRowAvatar(dialog: dialog)
}
}
// MARK: - Content Section
// Figma "Contents": flex-col, h-full, items-start, justify-center, pb-px
// "Title and Trailing Accessories": flex-1, gap-6, items-center
private extension ChatRowView {
var contentSection: some View {
HStack(alignment: .center, spacing: 6) {
// "Title and Detail": flex-1, h-63, items-start, overflow-clip
VStack(alignment: .leading, spacing: 0) {
titleRow
messageRow
}
.frame(maxWidth: .infinity, alignment: .leading)
.frame(height: 63)
.clipped()
// "Accessories and Grabber": h-full, items-center, justify-end
trailingColumn
.frame(maxHeight: .infinity)
}
.frame(maxHeight: .infinity)
.padding(.bottom, 1)
}
}
// MARK: - Title Row (name + badges)
// Figma "Title": gap-4, items-center, w-full
private extension ChatRowView {
var titleRow: some View {
HStack(spacing: 4) {
Text(displayTitle)
.font(.system(size: 17, weight: .medium))
.tracking(-0.43)
.foregroundStyle(RosettaColors.Adaptive.text)
.lineLimit(1)
if !dialog.isSavedMessages && dialog.effectiveVerified > 0 {
VerifiedBadge(
verified: dialog.effectiveVerified,
size: 16
)
}
if dialog.isMuted {
Image(systemName: "speaker.slash.fill")
.font(.system(size: 12))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
}
}
}
}
// MARK: - Message Row
// Figma "Message": h-41, SF Pro Regular 15/20, tracking -0.23, secondary
private extension ChatRowView {
var messageRow: some View {
Text(messageText)
.font(.system(size: 15))
.tracking(-0.23)
.foregroundStyle(
isTyping && !dialog.isSavedMessages
? RosettaColors.figmaBlue
: RosettaColors.Adaptive.textSecondary
)
.lineLimit(2)
.frame(height: 41, alignment: .topLeading)
}
/// Static cache for emoji-parsed message text (avoids regex per row per render).
private static var messageTextCache: [String: String] = [:]
var messageText: String {
// Desktop parity: show "typing..." in chat list row when opponent is typing.
if isTyping && !dialog.isSavedMessages {
if dialog.isGroup && !typingSenderNames.isEmpty {
if typingSenderNames.count == 1 {
return "\(typingSenderNames[0]) typing..."
} else {
return "\(typingSenderNames[0]) and \(typingSenderNames.count - 1) typing..."
}
}
return "typing..."
}
let raw = dialog.lastMessage.trimmingCharacters(in: .whitespacesAndNewlines)
if raw.isEmpty {
return "No messages yet"
}
// Desktop parity: show "Group invite" for #group: invite messages.
if raw.hasPrefix("#group:") {
return "Group invite"
}
// Safety net: never show encrypted ciphertext (ivBase64:ctBase64) to user.
// This catches stale data persisted before isGarbageText was improved.
if Self.looksLikeCiphertext(raw) {
return "No messages yet"
}
if let cached = Self.messageTextCache[dialog.lastMessage] {
return cached
}
// Strip inline markdown markers and convert emoji shortcodes for clean preview.
let cleaned = dialog.lastMessage.replacingOccurrences(of: "**", with: "")
let result = EmojiParser.replaceShortcodes(in: cleaned)
if Self.messageTextCache.count > 500 {
let keysToRemove = Array(Self.messageTextCache.keys.prefix(250))
for key in keysToRemove { Self.messageTextCache.removeValue(forKey: key) }
}
Self.messageTextCache[dialog.lastMessage] = result
return result
}
/// Detects encrypted payload formats that should never be shown in UI.
private static func looksLikeCiphertext(_ text: String) -> Bool {
// CHNK: chunked format
if text.hasPrefix("CHNK:") { return true }
// ivBase64:ctBase64 or hex-encoded XChaCha20 ciphertext
let parts = text.components(separatedBy: ":")
if parts.count == 2 {
let base64Chars = CharacterSet.alphanumerics.union(CharacterSet(charactersIn: "+/="))
let bothBase64 = parts.allSatisfy { part in
part.count >= 16 && part.unicodeScalars.allSatisfy { base64Chars.contains($0) }
}
if bothBase64 { return true }
}
// Pure hex string (40 chars, only hex digits) XChaCha20 wire format
if text.count >= 40 {
let hexChars = CharacterSet(charactersIn: "0123456789abcdefABCDEF")
if text.unicodeScalars.allSatisfy({ hexChars.contains($0) }) { return true }
}
return false
}
}
// MARK: - Trailing Column
// Figma "Contents - Trailing": flex-col, h-full, items-end, justify-between, pt-8
// "Read Status and Time": gap-2, items-center
// "Other": flex-1, items-end, justify-end, pb-14
private extension ChatRowView {
var trailingColumn: some View {
VStack(alignment: .trailing, spacing: 0) {
// Top: read status + time
HStack(spacing: 2) {
if dialog.lastMessageFromMe && !dialog.isSavedMessages {
deliveryIcon
}
Text(formattedTime)
.font(.system(size: 14))
.tracking(-0.23)
.foregroundStyle(
dialog.unreadCount > 0 && !dialog.isMuted
? RosettaColors.figmaBlue
: RosettaColors.Adaptive.textSecondary
)
}
.padding(.top, 8)
Spacer(minLength: 0)
// Bottom: pin or unread badge
HStack(spacing: 8) {
if dialog.isPinned && dialog.unreadCount == 0 {
Image(systemName: "pin.fill")
.font(.system(size: 15))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
.rotationEffect(.degrees(45))
}
// Show unread badge whenever there are unread messages.
// Previously hidden when lastMessageFromMe (desktop parity),
// but this caused invisible unreads when user sent a reply
// without reading prior incoming messages first.
if dialog.hasMention && dialog.unreadCount > 0 && !isSyncing {
mentionBadge
}
if dialog.unreadCount > 0 && !isSyncing {
unreadBadge
}
}
.padding(.bottom, 14)
}
}
/// Telegram-style `@` mention indicator (shown left of unread count).
var mentionBadge: some View {
Text("@")
.font(.system(size: 14, weight: .medium))
.foregroundStyle(.white)
.frame(width: 20, height: 20)
.background {
Circle()
.fill(dialog.isMuted ? Color(hex: 0x787880) : RosettaColors.figmaBlue)
}
}
@ViewBuilder
var deliveryIcon: some View {
if dialog.lastMessageDelivered == .delivered && dialog.lastMessageRead {
DoubleCheckmarkShape()
.fill(RosettaColors.figmaBlue)
.frame(width: 17, height: 9.3)
} else {
switch dialog.lastMessageDelivered {
case .waiting:
// Timer isolated to sub-view only .waiting rows create a timer.
DeliveryWaitingIcon(sentTimestamp: dialog.lastMessageTimestamp)
case .delivered:
SingleCheckmarkShape()
.fill(RosettaColors.Adaptive.textSecondary)
.frame(width: 14, height: 10.3)
case .error:
Image(systemName: "exclamationmark.circle.fill")
.font(.system(size: 14))
.foregroundStyle(RosettaColors.error)
}
}
}
var unreadBadge: some View {
let count = dialog.unreadCount
let text = count > 999 ? "\(count / 1000)K" : (count > 99 ? "99+" : "\(count)")
let isMuted = dialog.isMuted
let isSmall = count < 10
return Text(text)
.font(.system(size: 15))
.tracking(-0.23)
.foregroundStyle(.white)
.padding(.horizontal, isSmall ? 0 : 4)
.frame(
minWidth: 20,
maxWidth: isSmall ? 20 : 37,
minHeight: 20
)
.background {
Capsule()
.fill(isMuted ? Color(hex: 0x787880) : RosettaColors.figmaBlue)
}
}
}
// MARK: - Delivery Waiting Icon (timer-isolated)
/// Desktop parity: clock error after 80s. Timer only exists on rows with
/// `.waiting` delivery status all other rows have zero timer overhead.
private struct DeliveryWaitingIcon: View {
let sentTimestamp: Int64
@State private var now = Date()
private let recheckTimer = Timer.publish(every: 40, on: .main, in: .common).autoconnect()
private var isWithinWindow: Bool {
guard sentTimestamp > 0 else { return true }
let sentDate = Date(timeIntervalSince1970: Double(sentTimestamp) / 1000)
return now.timeIntervalSince(sentDate) < 80
}
var body: some View {
Group {
if isWithinWindow {
Image(systemName: "clock")
.font(.system(size: 13))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
} else {
Image(systemName: "exclamationmark.circle.fill")
.font(.system(size: 14))
.foregroundStyle(RosettaColors.error)
}
}
.onReceive(recheckTimer) { now = $0 }
}
}
// MARK: - Time Formatting
private extension ChatRowView {
private static let timeFormatter: DateFormatter = {
let f = DateFormatter(); f.dateFormat = "h:mm a"; return f
}()
private static let dayFormatter: DateFormatter = {
let f = DateFormatter(); f.dateFormat = "EEE"; return f
}()
private static let dateFormatter: DateFormatter = {
let f = DateFormatter(); f.dateFormat = "dd.MM.yy"; return f
}()
/// Static cache for formatted time strings (avoids Date/Calendar per row per render).
private static var timeStringCache: [Int64: String] = [:]
var formattedTime: String {
guard dialog.lastMessageTimestamp > 0 else { return "" }
if let cached = Self.timeStringCache[dialog.lastMessageTimestamp] {
return cached
}
let date = Date(timeIntervalSince1970: Double(dialog.lastMessageTimestamp) / 1000)
let now = Date()
let calendar = Calendar.current
let result: String
if calendar.isDateInToday(date) {
result = Self.timeFormatter.string(from: date)
} else if calendar.isDateInYesterday(date) {
result = "Yesterday"
} else if let days = calendar.dateComponents([.day], from: date, to: now).day, days < 7 {
result = Self.dayFormatter.string(from: date)
} else {
result = Self.dateFormatter.string(from: date)
}
if Self.timeStringCache.count > 500 {
let keysToRemove = Array(Self.timeStringCache.keys.prefix(250))
for key in keysToRemove { Self.timeStringCache.removeValue(forKey: key) }
}
Self.timeStringCache[dialog.lastMessageTimestamp] = result
return result
}
}
// MARK: - Preview
#Preview {
let sampleDialog = Dialog(
id: "preview", account: "mykey", opponentKey: "abc001",
opponentTitle: "Alice Johnson",
opponentUsername: "alice",
lastMessage: "Hey, how are you?",
lastMessageTimestamp: Int64(Date().timeIntervalSince1970 * 1000),
unreadCount: 3, isOnline: true, lastSeen: 0,
verified: 1, iHaveSent: true,
isPinned: false, isMuted: false,
lastMessageFromMe: true, lastMessageDelivered: .delivered,
lastMessageRead: true
)
VStack(spacing: 0) {
ChatRowView(dialog: sampleDialog)
ChatRowView(dialog: sampleDialog, isTyping: true)
}
.background(RosettaColors.Adaptive.background)
}

View File

@@ -1,34 +1,33 @@
import Lottie
import SwiftUI
import UIKit
// MARK: - RequestChatsView (SwiftUI shell toolbar + navigation only)
/// Screen showing incoming message requests opened from the "Request Chats"
/// row at the top of the main chat list (Telegram Archive style).
/// List content rendered by UIKit RequestChatsController for performance parity.
struct RequestChatsView: View {
@ObservedObject var viewModel: ChatListViewModel
@ObservedObject var navigationState: ChatListNavigationState
@Environment(\.dismiss) private var dismiss
/// Desktop parity: track typing dialogs from MessageRepository (@Published).
@State private var typingDialogs: [String: Set<String>] = [:]
var body: some View {
Group {
if viewModel.requestsModeDialogs.isEmpty {
RequestsEmptyStateView()
} else {
List {
ForEach(Array(viewModel.requestsModeDialogs.enumerated()), id: \.element.id) { index, dialog in
requestRow(dialog, isFirst: index == 0)
let isSyncing = SessionManager.shared.syncBatchInProgress
RequestChatsCollectionView(
dialogs: viewModel.requestsModeDialogs,
isSyncing: isSyncing,
onSelectDialog: { dialog in
navigationState.path.append(ChatRoute(dialog: dialog))
},
onDeleteDialog: { dialog in
viewModel.deleteDialog(dialog)
}
Color.clear.frame(height: 80)
.listRowInsets(EdgeInsets())
.listRowBackground(Color.clear)
.listRowSeparator(.hidden)
}
.listStyle(.plain)
.scrollContentBackground(.hidden)
.scrollIndicators(.hidden)
)
}
}
.background(RosettaColors.Adaptive.background.ignoresSafeArea())
@@ -50,7 +49,6 @@ struct RequestChatsView: View {
}
.modifier(ChatListToolbarBackgroundModifier())
.enableSwipeBack()
.onReceive(MessageRepository.shared.$typingDialogs) { typingDialogs = $0 }
}
// MARK: - Capsule Back Button (matches ChatDetailView)
@@ -67,30 +65,173 @@ struct RequestChatsView: View {
.frame(height: 44)
.padding(.horizontal, 4)
.background {
glassCapsule(strokeOpacity: 0.22, strokeColor: .white)
TelegramGlassCapsule()
}
}
}
// MARK: - RequestChatsCollectionView (UIViewControllerRepresentable bridge)
private struct RequestChatsCollectionView: UIViewControllerRepresentable {
let dialogs: [Dialog]
let isSyncing: Bool
var onSelectDialog: ((Dialog) -> Void)?
var onDeleteDialog: ((Dialog) -> Void)?
func makeUIViewController(context: Context) -> RequestChatsController {
let controller = RequestChatsController()
controller.onSelectDialog = onSelectDialog
controller.onDeleteDialog = onDeleteDialog
controller.updateDialogs(dialogs, isSyncing: isSyncing)
return controller
}
func updateUIViewController(_ controller: RequestChatsController, context: Context) {
controller.onSelectDialog = onSelectDialog
controller.onDeleteDialog = onDeleteDialog
controller.updateDialogs(dialogs, isSyncing: isSyncing)
}
}
// MARK: - RequestChatsController (UIKit)
/// Pure UIKit UICollectionView controller for request chats list.
/// Single flat section with ChatListCell same rendering as main chat list.
final class RequestChatsController: UIViewController {
var onSelectDialog: ((Dialog) -> Void)?
var onDeleteDialog: ((Dialog) -> Void)?
private var dialogs: [Dialog] = []
private var isSyncing: Bool = false
private var dialogMap: [String: Dialog] = [:]
private var collectionView: UICollectionView!
private var dataSource: UICollectionViewDiffableDataSource<Int, String>!
private var cellRegistration: UICollectionView.CellRegistration<ChatListCell, Dialog>!
// MARK: - Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .clear
setupCollectionView()
setupCellRegistration()
setupDataSource()
}
// MARK: - Collection View
private func setupCollectionView() {
var listConfig = UICollectionLayoutListConfiguration(appearance: .plain)
listConfig.showsSeparators = false
listConfig.backgroundColor = .clear
listConfig.trailingSwipeActionsConfigurationProvider = { [weak self] indexPath in
self?.trailingSwipeActions(for: indexPath)
}
let layout = UICollectionViewCompositionalLayout.list(using: listConfig)
collectionView = UICollectionView(frame: .zero, collectionViewLayout: layout)
collectionView.translatesAutoresizingMaskIntoConstraints = false
collectionView.backgroundColor = .clear
collectionView.delegate = self
collectionView.showsVerticalScrollIndicator = false
collectionView.alwaysBounceVertical = true
collectionView.contentInset.bottom = 80
view.addSubview(collectionView)
NSLayoutConstraint.activate([
collectionView.topAnchor.constraint(equalTo: view.topAnchor),
collectionView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
collectionView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
collectionView.bottomAnchor.constraint(equalTo: view.bottomAnchor),
])
}
private func setupCellRegistration() {
cellRegistration = UICollectionView.CellRegistration<ChatListCell, Dialog> {
[weak self] cell, indexPath, dialog in
guard let self else { return }
cell.configure(with: dialog, isSyncing: self.isSyncing)
cell.setSeparatorHidden(indexPath.item == 0)
}
}
// Use TelegramGlass* for ALL iOS versions SwiftUI .glassEffect() blocks touches.
private func glassCapsule(strokeOpacity: Double = 0.18, strokeColor: Color = .white) -> some View {
TelegramGlassCapsule()
private func setupDataSource() {
dataSource = UICollectionViewDiffableDataSource<Int, String>(
collectionView: collectionView
) { [weak self] collectionView, indexPath, itemId in
guard let self, let dialog = self.dialogMap[itemId] else {
return UICollectionViewCell()
}
return collectionView.dequeueConfiguredReusableCell(
using: self.cellRegistration,
for: indexPath,
item: dialog
)
}
}
private func requestRow(_ dialog: Dialog, isFirst: Bool) -> some View {
SyncAwareChatRow(
dialog: dialog,
isTyping: !(typingDialogs[dialog.opponentKey]?.isEmpty ?? true),
typingSenderNames: {
guard let senderKeys = typingDialogs[dialog.opponentKey] else { return [] }
return senderKeys.map { sk in
DialogRepository.shared.dialogs[sk]?.opponentTitle
?? String(sk.prefix(8))
}
}(),
isFirst: isFirst,
viewModel: viewModel,
navigationState: navigationState
)
// MARK: - Update Data
func updateDialogs(_ newDialogs: [Dialog], isSyncing: Bool) {
self.isSyncing = isSyncing
let oldIds = dialogs.map(\.id)
let newIds = newDialogs.map(\.id)
let structureChanged = oldIds != newIds
self.dialogs = newDialogs
dialogMap.removeAll(keepingCapacity: true)
for d in newDialogs { dialogMap[d.id] = d }
guard dataSource != nil else { return }
if structureChanged {
var snapshot = NSDiffableDataSourceSnapshot<Int, String>()
snapshot.appendSections([0])
snapshot.appendItems(newIds, toSection: 0)
dataSource.apply(snapshot, animatingDifferences: true)
}
// Reconfigure visible cells
for cell in collectionView.visibleCells {
guard let indexPath = collectionView.indexPath(for: cell),
let itemId = dataSource.itemIdentifier(for: indexPath),
let chatCell = cell as? ChatListCell,
let dialog = dialogMap[itemId] else { continue }
chatCell.configure(with: dialog, isSyncing: isSyncing)
}
}
// MARK: - Swipe Actions
private func trailingSwipeActions(for indexPath: IndexPath) -> UISwipeActionsConfiguration? {
guard let itemId = dataSource.itemIdentifier(for: indexPath),
let dialog = dialogMap[itemId] else { return nil }
let delete = UIContextualAction(style: .destructive, title: nil) { [weak self] _, _, completion in
DispatchQueue.main.async { self?.onDeleteDialog?(dialog) }
completion(true)
}
delete.image = UIImage(systemName: "trash.fill")
delete.backgroundColor = UIColor(red: 1, green: 0.23, blue: 0.19, alpha: 1)
return UISwipeActionsConfiguration(actions: [delete])
}
}
// MARK: - UICollectionViewDelegate
extension RequestChatsController: UICollectionViewDelegate {
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
collectionView.deselectItem(at: indexPath, animated: true)
guard let itemId = dataSource.itemIdentifier(for: indexPath),
let dialog = dialogMap[itemId] else { return }
DispatchQueue.main.async { [weak self] in
self?.onSelectDialog?(dialog)
}
}
}

View File

@@ -58,8 +58,7 @@ final class ChatListCell: UICollectionViewCell {
let statusImageView = UIImageView()
let badgeContainer = UIView()
let badgeLabel = UILabel()
let mentionBadgeContainer = UIView()
let mentionLabel = UILabel()
let mentionImageView = UIImageView()
let pinnedIconView = UIImageView()
// Separator
@@ -173,16 +172,11 @@ final class ChatListCell: UICollectionViewCell {
badgeLabel.textAlignment = .center
badgeContainer.addSubview(badgeLabel)
// Mention badge
mentionBadgeContainer.isHidden = true
mentionBadgeContainer.layer.cornerRadius = CellLayout.badgeDiameter / 2
contentView.addSubview(mentionBadgeContainer)
mentionLabel.font = .systemFont(ofSize: 14, weight: .medium)
mentionLabel.textColor = .white
mentionLabel.text = "@"
mentionLabel.textAlignment = .center
mentionBadgeContainer.addSubview(mentionLabel)
// Mention badge (Telegram-exact: tinted vector icon)
mentionImageView.image = UIImage(named: "MentionBadgeIcon")?.withRenderingMode(.alwaysTemplate)
mentionImageView.contentMode = .scaleAspectFit
mentionImageView.isHidden = true
contentView.addSubview(mentionImageView)
// Pin icon
pinnedIconView.contentMode = .scaleAspectFit
@@ -310,13 +304,12 @@ final class ChatListCell: UICollectionViewCell {
badgeRightEdge = badgeContainer.frame.minX - CellLayout.badgeSpacing
}
if !mentionBadgeContainer.isHidden {
mentionBadgeContainer.frame = CGRect(
if !mentionImageView.isHidden {
mentionImageView.frame = CGRect(
x: badgeRightEdge - CellLayout.badgeDiameter, y: badgeY,
width: CellLayout.badgeDiameter, height: CellLayout.badgeDiameter
)
mentionLabel.frame = mentionBadgeContainer.bounds
badgeRightEdge = mentionBadgeContainer.frame.minX - CellLayout.badgeSpacing
badgeRightEdge = mentionImageView.frame.minX - CellLayout.badgeSpacing
}
if !pinnedIconView.isHidden {
@@ -420,7 +413,7 @@ final class ChatListCell: UICollectionViewCell {
// Date
dateLabel.text = formatTime(dialog.lastMessageTimestamp)
dateLabel.textColor = (dialog.unreadCount > 0 && !dialog.isMuted) ? accentBlue : secondaryColor
dateLabel.textColor = secondaryColor
// Delivery status
configureDeliveryStatus(dialog: dialog, secondaryColor: secondaryColor, accentBlue: accentBlue)
@@ -584,7 +577,9 @@ final class ChatListCell: UICollectionViewCell {
private func configureBadge(dialog: Dialog, isSyncing: Bool, accentBlue: UIColor, mutedBadgeBg: UIColor) {
let count = dialog.unreadCount
let showBadge = count > 0 && !isSyncing
// Telegram: when mention + only 1 unread show only @ badge, no count
let showMention = dialog.hasMention && count > 0 && !isSyncing
let showBadge = count > 0 && !isSyncing && !(showMention && count == 1)
if showBadge {
let text: String
@@ -598,12 +593,11 @@ final class ChatListCell: UICollectionViewCell {
// Animate badge appear/disappear (Telegram: scale spring)
animateBadgeTransition(view: badgeContainer, shouldShow: showBadge, wasVisible: &wasBadgeVisible)
// Mention badge
let showMention = dialog.hasMention && count > 0 && !isSyncing
// Mention badge (Telegram: tinted vector icon)
if showMention {
mentionBadgeContainer.backgroundColor = dialog.isMuted ? mutedBadgeBg : accentBlue
mentionImageView.tintColor = dialog.isMuted ? mutedBadgeBg : accentBlue
}
animateBadgeTransition(view: mentionBadgeContainer, shouldShow: showMention, wasVisible: &wasMentionBadgeVisible)
animateBadgeTransition(view: mentionImageView, shouldShow: showMention, wasVisible: &wasMentionBadgeVisible)
}
/// Telegram badge animation: appear = scale 0.00011.2 (0.2s) 1.0 (0.12s settle);
@@ -777,7 +771,7 @@ final class ChatListCell: UICollectionViewCell {
mutedIconView.isHidden = true
statusImageView.isHidden = true
badgeContainer.isHidden = true
mentionBadgeContainer.isHidden = true
mentionImageView.isHidden = true
pinnedIconView.isHidden = true
onlineIndicator.isHidden = true
contentView.backgroundColor = .clear
@@ -788,7 +782,7 @@ final class ChatListCell: UICollectionViewCell {
wasBadgeVisible = false
wasMentionBadgeVisible = false
badgeContainer.transform = .identity
mentionBadgeContainer.transform = .identity
mentionImageView.transform = .identity
}
// MARK: - Highlight