Фикс: бэкграунд звонки — аудио, имя на CallKit, deactivation order, UUID race

This commit is contained in:
2026-04-06 00:18:37 +05:00
parent d65624ad35
commit 55cb120db3
32 changed files with 1548 additions and 688 deletions

View File

@@ -0,0 +1,30 @@
{
"images" : [
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "light"
}
],
"filename" : "wallpaper_light_01.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "dark"
}
],
"filename" : "wallpaper_dark_01.png",
"idiom" : "universal",
"scale" : "1x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

View File

@@ -0,0 +1,30 @@
{
"images" : [
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "light"
}
],
"filename" : "wallpaper_light_02.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "dark"
}
],
"filename" : "wallpaper_dark_02.png",
"idiom" : "universal",
"scale" : "1x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 867 KiB

View File

@@ -0,0 +1,30 @@
{
"images" : [
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "light"
}
],
"filename" : "wallpaper_light_03.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "dark"
}
],
"filename" : "wallpaper_dark_03.png",
"idiom" : "universal",
"scale" : "1x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

View File

@@ -53,6 +53,9 @@ struct Dialog: Identifiable, Codable, Equatable {
/// Android parity: separate read flag for last outgoing message.
var lastMessageRead: Bool
/// Desktop parity: true when an unread group message mentions the current user.
var hasMention: Bool = false
// MARK: - Computed
var isSavedMessages: Bool { opponentKey == account }

View File

@@ -282,6 +282,17 @@ final class DialogRepository {
/// Android parity: recalculate dialog from DB after marking messages as read.
func markAsRead(opponentKey: String) {
updateDialogFromMessages(opponentKey: opponentKey)
// Clear mention when dialog is read
setMention(opponentKey: opponentKey, hasMention: false)
}
/// Desktop parity: set/clear mention indicator on a dialog.
func setMention(opponentKey: String, hasMention: Bool) {
guard var dialog = dialogs[opponentKey] else { return }
if dialog.hasMention != hasMention {
dialog.hasMention = hasMention
dialogs[opponentKey] = dialog
}
}
/// Android parity: recalculate dialog from DB after opponent reads our messages.

View File

@@ -89,6 +89,7 @@ struct MessageCellLayout: Sendable {
case photoWithCaption
case file
case forward
case emojiOnly
}
}
@@ -157,8 +158,16 @@ extension MessageCellLayout {
/// Telegram-style tight bubbles: timestamp goes inline with last text line
/// when there's space, or on a new line when there isn't.
static func calculate(config: Config) -> (layout: MessageCellLayout, textLayout: CoreTextTextLayout?) {
let font = UIFont.systemFont(ofSize: 17, weight: .regular)
let tsFont = UIFont.systemFont(ofSize: floor(font.pointSize * 11.0 / 17.0), weight: .regular)
// emojiOnly detection needs text first, but font is needed for layout.
// Pre-check for emojiOnly to choose font size (40pt vs 17pt).
let isEmojiOnlyPrecheck = !config.text.isEmpty
&& config.imageCount == 0 && config.fileCount == 0
&& config.avatarCount == 0 && config.callCount == 0
&& !config.isForward && !config.hasReplyQuote
&& EmojiParser.isEmojiOnly(config.text)
// Telegram: messageEmojiFont = Font.regular(53.0) (ChatPresentationData.swift line 58)
let font = UIFont.systemFont(ofSize: isEmojiOnlyPrecheck ? 53 : 17, weight: .regular)
let tsFont = UIFont.systemFont(ofSize: floor(17 * 11.0 / 17.0), weight: .regular)
let screenPixel = 1.0 / max(UIScreen.main.scale, 1)
let metrics = BubbleMetrics.telegram()
let mergeType = BubbleGeometryEngine.mergeType(for: config.position)
@@ -168,7 +177,7 @@ extension MessageCellLayout {
let effectiveMaxBubbleWidth = max(40, config.maxBubbleWidth - deliveryFailedInset)
// Classify message type
let messageType: MessageType
var messageType: MessageType
if config.isForward {
messageType = .forward
} else if config.imageCount > 0 && !config.text.isEmpty {
@@ -182,10 +191,15 @@ extension MessageCellLayout {
} else {
messageType = .text
}
// Emoji-only: 1-3 emoji without other text large font, no bubble
if messageType == .text && !config.text.isEmpty && EmojiParser.isEmojiOnly(config.text) {
messageType = .emojiOnly
}
let isTextMessage = (messageType == .text || messageType == .textWithReply)
let isForwardWithCaption = messageType == .forward && !config.text.isEmpty
// Status lane metrics always use 17pt base (not the emoji-enlarged font).
let textStatusLaneMetrics = TextStatusLaneMetrics.telegram(
fontPointSize: font.pointSize,
fontPointSize: 17,
screenPixel: screenPixel
)
let groupGap: CGFloat = {
@@ -219,7 +233,7 @@ extension MessageCellLayout {
let textMeasurement: TextMeasurement
var cachedTextLayout: CoreTextTextLayout?
let needsDetailedTextLayout = isTextMessage || messageType == .photoWithCaption || (messageType == .forward && !config.text.isEmpty)
let needsDetailedTextLayout = isTextMessage || messageType == .emojiOnly || messageType == .photoWithCaption || (messageType == .forward && !config.text.isEmpty)
if !config.text.isEmpty && needsDetailedTextLayout {
// CoreText (CTTypesetter) returns per-line widths including lastLineWidth.
// Also captures CoreTextTextLayout for cell rendering (avoids double computation).
@@ -366,6 +380,23 @@ extension MessageCellLayout {
bubbleH += topPad + textMeasurement.size.height + bottomPad
if photoH > 0 { bubbleH += 6 }
}
} else if messageType == .emojiOnly {
// Emoji-only: no bubble rendered like a sticker.
// Telegram source: ChatMessageStickerItemNode.swift
// innerImageInset = 10pt (line 597)
// imageSize = CoreText measured size at 53pt font
// innerImageSize = imageSize + inset*2 on each side (line 598)
// outgoing adds statusHeight to layoutSize (line 955-957)
let emojiInset: CGFloat = 10
let emojiW = textMeasurement.size.width
let emojiH = textMeasurement.size.height
bubbleW = emojiW + emojiInset * 2
bubbleH = emojiH + emojiInset * 2
// Telegram: outgoing adds status height, incoming overlaps
let statusPillH = tsSize.height + 4 // pill vertical padding (2+2)
if config.isOutgoing {
bubbleH += statusPillH
}
} else if isTextMessage && !config.text.isEmpty {
// EXACT TELEGRAM MATH no other modifiers
let actualTextW = textMeasurement.size.width
@@ -469,11 +500,11 @@ extension MessageCellLayout {
// tsFrame.maxX = checkFrame.minX - timeToCheckGap
// checkFrame.minX = bubbleW - inset - checkW
let metadataRightInset: CGFloat
if messageType == .photo {
// Telegram: statusInsets = (top:0, left:0, bottom:6, right:6) from PHOTO edges.
// Pill right = statusEndX + mediaStatusInsets.right(7) = bubbleW - X + 7
// Photo right = bubbleW - 2. Gap = 6pt pill right = bubbleW - 8.
// statusEndX = bubbleW - 15 metadataRightInset = 15.
if messageType == .emojiOnly {
// Telegram: pill background insets = (top:2, left:7, bottom:2, right:7)
// Status pill right-aligned with 7pt pill padding
metadataRightInset = 7
} else if messageType == .photo {
metadataRightInset = 15
} else if isTextMessage || isForwardWithCaption || messageType == .photoWithCaption {
metadataRightInset = config.isOutgoing
@@ -488,6 +519,9 @@ extension MessageCellLayout {
let metadataBottomInset: CGFloat
if messageType == .photo {
metadataBottomInset = 10
} else if messageType == .emojiOnly {
// Telegram: outgoing dateOffset.y = 12pt, incoming overlaps emoji area
metadataBottomInset = config.isOutgoing ? 2 : 12
} else if messageType == .file && config.text.isEmpty {
metadataBottomInset = fileOnlyTsPad
} else {
@@ -520,11 +554,13 @@ extension MessageCellLayout {
let checkReadFrame: CGRect
let clockFrame: CGRect
if hasStatusIcon {
let checkImgW: CGFloat = floor(floor(font.pointSize * 11.0 / 17.0))
// Always use 17pt base for status icons (not the emoji-enlarged font).
let baseFontSize: CGFloat = 17
let checkImgW: CGFloat = floor(floor(baseFontSize * 11.0 / 17.0))
let checkImgH: CGFloat = floor(checkImgW * 9.0 / 11.0)
let checkOffset: CGFloat = (isTextMessage || isForwardWithCaption)
? textStatusLaneMetrics.checkOffset
: floor(font.pointSize * 6.0 / 17.0)
: floor(baseFontSize * 6.0 / 17.0)
let checkReadX = statusEndX - checkImgW
let checkSentX = checkReadX - checkOffset
let checkBaselineOffset: CGFloat = isTextMessage
@@ -564,12 +600,22 @@ extension MessageCellLayout {
}
}
let textFrame = CGRect(
x: leftPad,
y: textY,
width: bubbleW - leftPad - rightPad,
height: textMeasurement.size.height
)
let textFrame: CGRect
if messageType == .emojiOnly {
// Telegram: emoji at innerImageInset (10pt) from edges
textFrame = CGRect(
x: 10, y: 10,
width: textMeasurement.size.width,
height: textMeasurement.size.height
)
} else {
textFrame = CGRect(
x: leftPad,
y: textY,
width: bubbleW - leftPad - rightPad,
height: textMeasurement.size.height
)
}
// Accessory frames (reply, photo, file, forward)
let replyContainerFrame = CGRect(x: 5, y: 5, width: bubbleW - 10, height: replyContainerH)

View File

@@ -243,11 +243,14 @@ final class ProtocolManager: @unchecked Sendable {
/// Keeps the process alive during active calls so WebSocket survives brief background.
func beginCallBackgroundTask() {
guard callBackgroundTask == .invalid else { return }
let remaining = UIApplication.shared.backgroundTimeRemaining
Self.logger.info("📞 Background task starting — remaining=\(remaining, privacy: .public)s wsState=\(String(describing: self.connectionState), privacy: .public)")
callBackgroundTask = UIApplication.shared.beginBackgroundTask(withName: "RosettaCall") { [weak self] in
// Don't end the call here CallKit keeps the process alive for active calls.
// This background task only buys time for WebSocket reconnection.
// Killing the call on expiry was causing premature call termination
// during keyExchange phase (~30s before Desktop could respond).
Self.logger.info("📞 Background task EXPIRED — OS reclaiming")
self?.endCallBackgroundTask()
}
Self.logger.info("📞 Background task started for call")

View File

@@ -191,11 +191,12 @@ final class CallKitManager: NSObject {
// MARK: - Outgoing Call
func startOutgoingCall(peerKey: String) {
func startOutgoingCall(peerKey: String, displayName: String) {
let uuid = UUID()
currentCallUUID = uuid
let handle = CXHandle(type: .generic, value: peerKey)
let name = displayName.isEmpty ? "Rosetta" : displayName
let handle = CXHandle(type: .generic, value: name)
let action = CXStartCallAction(call: uuid, handle: handle)
action.isVideo = false
@@ -205,6 +206,13 @@ final class CallKitManager: NSObject {
}
}
// Report CXCallUpdate with localizedCallerName so CarPlay shows the
// contact name instead of the raw public key.
let update = CXCallUpdate()
update.localizedCallerName = name
update.remoteHandle = handle
provider.reportCall(with: uuid, updated: update)
provider.reportOutgoingCall(with: uuid, startedConnectingAt: nil)
}
@@ -216,7 +224,18 @@ final class CallKitManager: NSObject {
// MARK: - End Call
func endCall() {
guard let uuid = currentCallUUID else {
// Fall back to _pendingCallUUID currentCallUUID is set asynchronously
// via Task { @MainActor } in reportIncomingCallSynchronously completion.
// On background PushKit calls, the Task may not have run yet when
// finishCall() triggers endCall() "no UUID" CallKit never learns
// the call ended didDeactivate never fires audio session stuck.
let uuid: UUID? = currentCallUUID ?? {
uuidLock.lock()
let pending = _pendingCallUUID
uuidLock.unlock()
return pending
}()
guard let uuid else {
Self.logger.notice("CallKit.endCall: no UUID — skipping")
return
}
@@ -236,7 +255,13 @@ final class CallKitManager: NSObject {
}
func reportCallEndedByRemote(reason: CXCallEndedReason = .remoteEnded) {
guard let uuid = currentCallUUID else { return }
let uuid: UUID? = currentCallUUID ?? {
uuidLock.lock()
let pending = _pendingCallUUID
uuidLock.unlock()
return pending
}()
guard let uuid else { return }
Self.logger.notice("CallKit.reportCallEndedByRemote reason=\(reason.rawValue, privacy: .public) uuid=\(uuid.uuidString.prefix(8), privacy: .public)")
currentCallUUID = nil
uuidLock.lock()
@@ -276,16 +301,21 @@ extension CallKitManager: CXProviderDelegate {
}
nonisolated func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
Self.logger.info("CXAnswerCallAction")
Self.logger.info("CXAnswerCallAction — user tapped Accept")
action.fulfill()
Task { @MainActor in
let phase = CallManager.shared.uiState.phase
let mode = CallManager.shared.signalingMode
let wsState = ProtocolManager.shared.connectionState
Self.logger.info("CXAnswerCallAction: phase=\(phase.rawValue, privacy: .public) mode=\(mode.rawValue, privacy: .public) wsState=\(String(describing: wsState), privacy: .public)")
let result = CallManager.shared.acceptIncomingCall()
if result == .started {
Self.logger.info("CXAnswerCallAction: accepted")
Self.logger.info("CXAnswerCallAction: accepted → keyExchange")
} else {
// Phase not .incoming yet WebSocket hasn't delivered .call signal.
// Set pending flag so handleSignalPacket(.call) auto-accepts when it arrives.
Self.logger.info("CXAnswerCallAction: pending (phase not incoming yet, waiting for WebSocket)")
Self.logger.info("CXAnswerCallAction: pending (result=\(String(describing: result), privacy: .public), waiting for WebSocket)")
CallManager.shared.pendingCallKitAccept = true
}
}
@@ -318,35 +348,37 @@ extension CallKitManager: CXProviderDelegate {
}
nonisolated func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
Self.logger.info("Audio session activated by CallKit")
Self.logger.info("Audio session activated by CallKit — category=\(audioSession.category.rawValue, privacy: .public) mode=\(audioSession.mode.rawValue, privacy: .public)")
let rtcSession = RTCAudioSession.sharedInstance()
// 1. Tell WebRTC the session is active (updates internal isActive flag).
// 1. Tell WebRTC the session is active.
rtcSession.audioSessionDidActivate(audioSession)
// 2. Configure category BEFORE enabling audio when isAudioEnabled
// becomes true, ADM may immediately init the audio unit and will
// use whatever category is currently set. Without this, it may use
// .soloAmbient (default) instead of .playAndRecord silent audio.
// 2. Toggle isAudioEnabled OFF first to force ADM restart.
// If force-activation retry loop already ran and ADM entered a
// permanent failed state (cached bad activation), this toggle
// forces a fresh AudioUnit initialization with the REAL
// CallKit-blessed audio session.
rtcSession.isAudioEnabled = false
// 3. Configure category BEFORE re-enabling audio.
rtcSession.lockForConfiguration()
do {
try rtcSession.setCategory(
.playAndRecord, mode: .voiceChat,
options: [.allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
options: [.allowBluetooth, .defaultToSpeaker]
)
} catch {
Self.logger.error("Failed to set audio category: \(error.localizedDescription)")
}
rtcSession.unlockForConfiguration()
// 3. NOW enable audio ADM will init with correct .playAndRecord category.
// 4. Re-enable audio ADM reinitializes from scratch.
rtcSession.isAudioEnabled = true
// Guard: CallKit may fire didActivate twice (observed in logs).
// Second call is harmless for WebRTC but re-running onAudioSessionActivated
// wastes cycles and logs confusing duplicate entries.
// ALWAYS run onAudioSessionActivated it is idempotent.
// If force-activation already ran but failed (setActive threw), this is
// the first REAL activation with CallKit's audio entitlement.
// If force-activation succeeded, this re-applies routing and re-enables
// the audio track with the proper CallKit-blessed session fixes cases
// where force-activated audio was silent.
Task { @MainActor in
if !CallManager.shared.audioSessionActivated {
await CallManager.shared.onAudioSessionActivated()
} else {
callLogger.info("[Call] didActivate: skipping duplicate (already activated)")
}
await CallManager.shared.onAudioSessionActivated()
}
}

View File

@@ -74,25 +74,20 @@ extension CallManager {
}
}
/// Called by CallKit when audio session is activated (didActivate callback).
/// Category is already set in didActivate (before isAudioEnabled = true).
/// This method handles deferred WebRTC setup and non-audio-session config.
/// MUST be async directly awaits ensurePeerConnectionAndOffer() to avoid
/// double-async nesting (Task inside Task) that causes race conditions.
/// Called by CallKit when audio session is activated (didActivate callback)
/// or by force-activation fallback. Idempotent safe to call multiple times.
/// Peer connection is created separately (immediately on createRoom signal).
/// This method only enables audio on already-created tracks.
func onAudioSessionActivated() async {
let wasAlreadyActivated = audioSessionActivated
audioSessionActivated = true
callLogger.notice("[Call] didActivate: phase=\(self.uiState.phase.rawValue, privacy: .public) pendingWebRtcSetup=\(self.pendingWebRtcSetup.description, privacy: .public)")
pendingWebRtcSetup = false
callLogger.notice("[Call] didActivate: phase=\(self.uiState.phase.rawValue, privacy: .public) mode=\(self.signalingMode.rawValue, privacy: .public) wasAlready=\(wasAlreadyActivated.description, privacy: .public) hasPeer=\((self.peerConnection != nil).description, privacy: .public) hasTrack=\((self.localAudioTrack != nil).description, privacy: .public) wsState=\(String(describing: ProtocolManager.shared.connectionState), privacy: .public)")
guard uiState.phase != .idle else { return }
// Flush deferred WebRTC setup .createRoom arrived before didActivate.
// Direct await (no Task wrapper) eliminates the double-async race where
// remote offers arrive before peer connection exists.
if pendingWebRtcSetup {
pendingWebRtcSetup = false
await ensurePeerConnectionAndOffer()
}
// Apply routing AFTER peer connection is created (track now exists).
// ALWAYS re-apply routing and re-enable track even on duplicate calls.
// The real CallKit didActivate may arrive after a failed force-activation.
// Re-enabling ensures audio works when the proper entitlement is granted.
applyAudioOutputRouting()
UIDevice.current.isProximityMonitoringEnabled = true
localAudioTrack?.isEnabled = !uiState.isMuted
@@ -147,6 +142,8 @@ extension CallManager {
signalingMode = .undecided
callId = ""
joinToken = ""
hasConnectedTransport = false
hasRemoteAudioTrack = false
pendingIncomingAccept = false
uiState = CallUiState(
phase: .idle,
@@ -251,12 +248,21 @@ extension CallManager {
}
}
// Step 6: Reset all state.
// Step 6: Deactivate audio BEFORE resetting state.
// deactivateAudioSession() checks didReceiveCallKitDeactivation to avoid
// double-decrementing WebRTC's internal activation counter. Resetting the
// flag first would bypass the guard double-deactivate broken audio
// on the next call within the same process.
deactivateAudioSession()
// Step 7: Reset all state.
role = nil
signalingMode = .undecided
roomId = ""
callId = ""
joinToken = ""
hasConnectedTransport = false
hasRemoteAudioTrack = false
localPrivateKey = nil
localPublicKeyHex = ""
sharedKey = nil
@@ -275,7 +281,6 @@ extension CallManager {
uiState = finalState
}
deactivateAudioSession()
ProtocolManager.shared.endCallBackgroundTask()
}
@@ -296,6 +301,17 @@ extension CallManager {
uiState.peerUsername = dialog.opponentUsername
}
}
// Fallback: after cold start, DialogRepository may not be loaded yet.
// Check UserDefaults contact_display_names (same source VoIP push uses).
if uiState.peerTitle.isEmpty, uiState.peerUsername.isEmpty {
for defaults in [UserDefaults(suiteName: "group.com.rosetta.dev"), UserDefaults.standard] {
if let names = defaults?.dictionary(forKey: "contact_display_names") as? [String: String],
let name = names[publicKey], !name.isEmpty {
uiState.peerTitle = name
break
}
}
}
// If we just resolved a name that wasn't available before (e.g. DB loaded
// after cold start), update CallKit so lock screen shows the name.
if !hadName, !uiState.peerTitle.isEmpty {
@@ -368,6 +384,8 @@ extension CallManager {
self.localAudioSource = audioSource
self.localAudioTrack = audioTrack
self.peerConnection = connection
self.hasConnectedTransport = false
self.hasRemoteAudioTrack = false
callLogger.info("[Call] Peer connection created, audio track enabled=\((!self.uiState.isMuted).description, privacy: .public)")
return connection
}
@@ -546,6 +564,10 @@ extension CallManager: RTCPeerConnectionDelegate {
audioTrack.isEnabled = true
callLogger.info("[Call] Remote audio track: \(audioTrack.trackId, privacy: .public) enabled=\(audioTrack.isEnabled.description, privacy: .public) state=\(audioTrack.readyState.rawValue, privacy: .public)")
}
if !stream.audioTracks.isEmpty {
self.hasRemoteAudioTrack = true
self.setCallActiveIfNeeded()
}
}
}
nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {}
@@ -568,6 +590,14 @@ extension CallManager: RTCPeerConnectionDelegate {
callLogger.info("[Call] PeerConnection state: \(newState.rawValue, privacy: .public)")
if newState == .connected {
Task { @MainActor in
if !self.hasConnectedTransport {
// Fallback: on some devices we may observe PeerConnection.connected
// before ICE callback is delivered on MainActor.
self.hasConnectedTransport = true
}
if peerConnection.receivers.contains(where: { ($0.track?.kind ?? "") == "audio" }) {
self.hasRemoteAudioTrack = true
}
self.setCallActiveIfNeeded()
let localEnabled = self.localAudioTrack?.isEnabled ?? false
let senderCount = peerConnection.senders.count
@@ -595,6 +625,10 @@ extension CallManager: RTCPeerConnectionDelegate {
receiver.track?.isEnabled = true
let trackKind = transceiver.mediaType == .audio ? "audio" : "video"
callLogger.info("[Call] didStartReceivingOn: \(trackKind, privacy: .public) receiverId=\(receiver.receiverId, privacy: .public) trackEnabled=\((receiver.track?.isEnabled ?? false).description, privacy: .public)")
if transceiver.mediaType == .audio {
self.hasRemoteAudioTrack = true
self.setCallActiveIfNeeded()
}
self.attachReceiverCryptor(receiver)
}
}

View File

@@ -73,6 +73,10 @@ final class CallManager: NSObject, ObservableObject {
/// Recovery timer for ICE .disconnected state. Android waits 15s before ending call;
/// iOS was killing instantly on any brief network hiccup.
var disconnectRecoveryTask: Task<Void, Never>?
/// Media readiness gate for transitioning to `.active`.
/// We only start call timer after transport is connected and remote audio path exists.
var hasConnectedTransport = false
var hasRemoteAudioTrack = false
/// Periodic E2EE rebind loop (1.5s, Android parity). SFU may create new
/// RTP senders/receivers during renegotiation that need encryptor/decryptor attachment.
var e2eeRebindTask: Task<Void, Never>?
@@ -116,6 +120,10 @@ final class CallManager: NSObject, ObservableObject {
if hasLegacySessionMetadata {
setSignalingMode(.legacy, reason: "push metadata")
}
// If callId present but joinToken missing (VoIPApns.java:57-67 omits joinToken),
// keep mode as .undecided. The WebSocket CALL signal will deliver full metadata.
// Android parity: polling loop in completePendingIncomingAcceptIfPossible waits
// for metadata and sends ACCEPT (never KEY_EXCHANGE for callee).
guard uiState.phase == .idle else {
completePendingIncomingAcceptIfPossible(trigger: "push update")
@@ -123,7 +131,7 @@ final class CallManager: NSObject, ObservableObject {
}
callLogger.notice(
"setupIncomingCallFromPush: callerKey=\(peer.prefix(12), privacy: .public) name=\(callerName, privacy: .public) mode=\(self.signalingMode.rawValue, privacy: .public)"
"setupIncomingCallFromPush: callerKey=\(peer.prefix(12), privacy: .public) name=\(callerName, privacy: .public) mode=\(self.signalingMode.rawValue, privacy: .public) callId=\(callId ?? "nil", privacy: .public) joinToken=\((joinToken?.isEmpty == false).description, privacy: .public) ownKey=\(self.ownPublicKey.isEmpty ? "EMPTY" : String(self.ownPublicKey.prefix(12)), privacy: .public) wsState=\(String(describing: ProtocolManager.shared.connectionState), privacy: .public)"
)
// Don't call beginCallSession() it calls finishCall() which kills the
// CallKit call that PushKit just reported. Set state directly instead.
@@ -173,7 +181,7 @@ final class CallManager: NSObject, ObservableObject {
uiState.statusText = "Calling..."
ProtocolManager.shared.beginCallBackgroundTask()
CallKitManager.shared.startOutgoingCall(peerKey: target)
CallKitManager.shared.startOutgoingCall(peerKey: target, displayName: uiState.displayName)
ProtocolManager.shared.sendCallSignal(
signalType: .call,
@@ -202,7 +210,7 @@ final class CallManager: NSObject, ObservableObject {
uiState.isMinimized = false // Show full-screen custom overlay after accept
uiState.statusText = "Connecting..."
callLogger.notice(
"[Call] acceptIncomingCall: queued mode=\(self.signalingMode.rawValue, privacy: .public) hasMetadata=\(self.hasLegacySessionMetadata.description, privacy: .public)"
"[Call] acceptIncomingCall: queued mode=\(self.signalingMode.rawValue, privacy: .public) hasMetadata=\(self.hasLegacySessionMetadata.description, privacy: .public) audioActivated=\(self.audioSessionActivated.description, privacy: .public) wsState=\(String(describing: ProtocolManager.shared.connectionState), privacy: .public)"
)
completePendingIncomingAcceptIfPossible(trigger: "acceptIncomingCall")
return .started
@@ -211,6 +219,7 @@ final class CallManager: NSObject, ObservableObject {
func declineIncomingCall() {
callLogger.info("[Call] declineIncomingCall phase=\(self.uiState.phase.rawValue, privacy: .public)")
guard uiState.phase == .incoming else { return }
pendingCallKitAccept = false
if ownPublicKey.isEmpty == false, uiState.peerPublicKey.isEmpty == false {
ProtocolManager.shared.sendCallSignal(
signalType: .endCall,
@@ -355,7 +364,15 @@ final class CallManager: NSObject, ObservableObject {
callLogger.notice("[Call] pending accept completed via KEY_EXCHANGE (\(trigger, privacy: .public))")
case .undecided:
callLogger.info("[Call] pending accept waits for signaling mode decision (\(trigger, privacy: .public))")
callLogger.info("[Call] pending accept: mode undecided, waiting for metadata (\(trigger, privacy: .public))")
// Wait for metadata from VoIP push (now includes joinToken after server fix)
// or WebSocket CALL signal. 5-second timeout prevents infinite hang.
Task { @MainActor [weak self] in
try? await Task.sleep(for: .seconds(5))
guard let self, self.pendingIncomingAccept, self.signalingMode == .undecided else { return }
callLogger.error("[Call] undecided timeout (5s) — no metadata arrived")
self.finishCall(reason: "Connection timeout", notifyPeer: false)
}
}
}
@@ -398,7 +415,7 @@ final class CallManager: NSObject, ObservableObject {
if hasLegacySessionMetadata {
setSignalingMode(.legacy, reason: "incoming .call with call metadata")
} else if signalingMode == .undecided {
setSignalingMode(.createRoom, reason: "incoming .call without metadata")
callLogger.warning("[Call] WebSocket .call signal has no metadata — keeping undecided")
}
guard uiState.phase == .idle else {
// Already in a call with this peer ignore duplicate .call signal.
@@ -419,7 +436,25 @@ final class CallManager: NSObject, ObservableObject {
}
let cachedCallId = callId
let cachedJoinToken = joinToken
beginCallSession(peerPublicKey: incomingPeer, title: "", username: "")
// Resolve name from DialogRepository BEFORE beginCallSession resets state.
// Without this, CallKit shows the public key prefix instead of the name.
let cachedDialog = DialogRepository.shared.dialogs[incomingPeer]
var cachedTitle = cachedDialog?.opponentTitle ?? ""
let cachedUsername = cachedDialog?.opponentUsername ?? ""
// Fallback: after cold start from VoIP push, DialogRepository may not
// be loaded yet. Check UserDefaults contact_display_names (synced by
// DialogRepository.syncContactNamesToDefaults) same source the VoIP
// push handler uses for name resolution.
if cachedTitle.isEmpty, cachedUsername.isEmpty {
for defaults in [UserDefaults(suiteName: "group.com.rosetta.dev"), UserDefaults.standard] {
if let names = defaults?.dictionary(forKey: "contact_display_names") as? [String: String],
let name = names[incomingPeer], !name.isEmpty {
cachedTitle = name
break
}
}
}
beginCallSession(peerPublicKey: incomingPeer, title: cachedTitle, username: cachedUsername)
mergeLegacySessionMetadata(
callId: packet.callId,
joinToken: packet.joinToken,
@@ -435,7 +470,7 @@ final class CallManager: NSObject, ObservableObject {
if hasLegacySessionMetadata {
setSignalingMode(.legacy, reason: "incoming .call session start")
} else {
setSignalingMode(.createRoom, reason: "incoming .call session start")
callLogger.warning("[Call] .call signal has no metadata — mode stays undecided")
}
role = .callee
uiState.phase = .incoming
@@ -511,28 +546,33 @@ final class CallManager: NSObject, ObservableObject {
}
uiState.phase = .webRtcExchange
uiState.statusText = "Connecting..."
if audioSessionActivated {
Task(priority: .userInitiated) { [weak self] in
await self?.ensurePeerConnectionAndOffer()
}
} else {
// Create peer connection IMMEDIATELY don't wait for didActivate.
// With useManualAudio=true, WebRTC does NOT create AudioUnit on
// peer connection creation. AudioUnit is created lazily when
// isAudioEnabled becomes true. So SDP/ICE can proceed in parallel
// with audio session activation.
Task(priority: .userInitiated) { [weak self] in
await self?.ensurePeerConnectionAndOffer()
}
if !audioSessionActivated {
pendingWebRtcSetup = true
callLogger.notice("[Call] Deferring WebRTC setup — waiting for CallKit didActivate")
// Safety: if didActivate never fires (background VoIP push race),
// force audio session activation after 3 seconds. Without this,
// the call hangs in webRtcExchange forever Desktop gives up
// SFU room stays allocated next call gets "busy".
callLogger.notice("[Call] Deferring audio enable — waiting for CallKit didActivate")
// Telegram parity: just wait for didActivate. The audio session
// is pre-configured in the VoIP push handler (setCategory .playAndRecord
// + .voiceChat BEFORE reportNewIncomingCall), so CallKit should
// deliver didActivate reliably. Fallback after 8s if it doesn't.
Task(priority: .userInitiated) { @MainActor [weak self] in
try? await Task.sleep(for: .seconds(3))
guard let self else { return }
guard self.pendingWebRtcSetup, !self.audioSessionActivated else { return }
callLogger.notice("[Call] didActivate timeout (3s) — force-activating audio session")
try? await Task.sleep(for: .seconds(8))
guard let self, !self.audioSessionActivated, self.uiState.phase != .idle else { return }
callLogger.warning("[Call] didActivate timeout (8s) — force-enabling audio")
let rtcSession = RTCAudioSession.sharedInstance()
rtcSession.audioSessionDidActivate(AVAudioSession.sharedInstance())
let avSession = AVAudioSession.sharedInstance()
callLogger.notice("[Call] AVAudioSession: category=\(avSession.category.rawValue, privacy: .public) mode=\(avSession.mode.rawValue, privacy: .public)")
rtcSession.audioSessionDidActivate(avSession)
rtcSession.lockForConfiguration()
try? rtcSession.setCategory(
.playAndRecord, mode: .voiceChat,
options: [.allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
options: [.allowBluetooth, .defaultToSpeaker]
)
rtcSession.unlockForConfiguration()
rtcSession.isAudioEnabled = true
@@ -661,6 +701,16 @@ final class CallManager: NSObject, ObservableObject {
func setCallActiveIfNeeded() {
guard uiState.phase != .active else { return }
guard uiState.phase == .webRtcExchange else { return }
// Runtime gate: avoid starting timer before media path is truly ready.
// Unit tests may call this helper without a live peerConnection, so we
// only enforce the gate when connection exists.
if peerConnection != nil, (!hasConnectedTransport || !hasRemoteAudioTrack) {
callLogger.info(
"[Call] active gate waiting transport=\(self.hasConnectedTransport.description, privacy: .public) remoteAudio=\(self.hasRemoteAudioTrack.description, privacy: .public)"
)
return
}
uiState.phase = .active
uiState.statusText = "Call active"
cancelRingTimeout()
@@ -819,19 +869,23 @@ final class CallManager: NSObject, ObservableObject {
callLogger.info("[Call] ICE state: \(state.rawValue, privacy: .public) phase=\(self.uiState.phase.rawValue, privacy: .public)")
switch state {
case .connected, .completed:
hasConnectedTransport = true
disconnectRecoveryTask?.cancel()
disconnectRecoveryTask = nil
setCallActiveIfNeeded()
case .disconnected:
hasConnectedTransport = false
// Temporary ICE may recover (WiFiCellular switch, brief interruption).
// Android waits 15s. Previous iOS code killed instantly unstable calls.
startDisconnectRecoveryTimer(timeout: 15)
case .failed:
hasConnectedTransport = false
// More serious unlikely to recover. Android uses 12s for PeerConnectionState.FAILED
// (CallManager.kt:820). Previous iOS value was 5s too aggressive, dropped calls
// that Android would recover from on poor networks.
startDisconnectRecoveryTimer(timeout: 12)
case .closed:
hasConnectedTransport = false
finishCall(reason: "Connection closed", notifyPeer: false)
default:
break

View File

@@ -1208,6 +1208,17 @@ final class SessionManager {
/// Ends the session and disconnects.
func endSession() {
// Unsubscribe push tokens from server BEFORE disconnecting.
// Without this, old account's tokens stay registered server sends
// VoIP pushes for calls to this device even after account switch.
if let voipToken = UserDefaults.standard.string(forKey: "voip_push_token"),
!voipToken.isEmpty {
unsubscribeVoIPToken(voipToken)
}
if let fcmToken = UserDefaults.standard.string(forKey: "apns_device_token"),
!fcmToken.isEmpty {
unsubscribeFCMToken(fcmToken)
}
ProtocolManager.shared.disconnect()
SessionCredentialsManager.shared.clear()
privateKeyHash = nil
@@ -1792,6 +1803,18 @@ final class SessionManager {
)
}
// Desktop parity: detect @mentions in group messages.
// Set hasMention on the dialog when the text mentions the current user.
if isGroupDialog && !fromMe && !text.isEmpty {
let lowered = text.lowercased()
let myUsername = AccountManager.shared.currentAccount?.username?.lowercased() ?? ""
let mentioned = (!myUsername.isEmpty && lowered.contains("@\(myUsername)"))
|| lowered.contains("@\(myKey.prefix(8))")
if mentioned {
DialogRepository.shared.setMention(opponentKey: opponentKey, hasMention: true)
}
}
// Desktop parity: if we received a message from the opponent (not our own),
// they are clearly online update their online status immediately.
// This supplements PacketOnlineState (0x05) which may arrive with delay.
@@ -2733,6 +2756,21 @@ final class SessionManager {
Self.logger.info("VoIP push token sent to server")
}
/// Sends unsubscribe for FCM token (called on account switch/logout).
private func unsubscribeFCMToken(_ token: String) {
guard !token.isEmpty,
ProtocolManager.shared.connectionState == .authenticated
else { return }
var packet = PacketPushNotification()
packet.notificationsToken = token
packet.action = .unsubscribe
packet.tokenType = .fcm
packet.deviceId = DeviceIdentityManager.shared.currentDeviceId()
ProtocolManager.shared.sendPacket(packet)
Self.logger.info("FCM token unsubscribed from server")
}
/// Sends unsubscribe for a stale VoIP token (called when PushKit invalidates token).
func unsubscribeVoIPToken(_ token: String) {
guard !token.isEmpty,

View File

@@ -8,14 +8,22 @@ import SwiftUI
// Step 2: Place DarkModeButton wherever the toggle should appear.
/// Creates an overlay UIWindow for the dark mode transition animation.
/// Supports three theme modes: dark, light, system (via `rosetta_theme_mode`).
struct DarkModeWrapper<Content: View>: View {
@ViewBuilder var content: Content
@State private var overlayWindow: UIWindow?
@AppStorage("rosetta_dark_mode") private var activateDarkMode: Bool = true
@AppStorage("rosetta_theme_mode") private var themeModeRaw: String = "dark"
var body: some View {
content
.onAppear {
// Migrate legacy `rosetta_dark_mode` Bool `rosetta_theme_mode` String
let defaults = UserDefaults.standard
if defaults.object(forKey: "rosetta_theme_mode") == nil,
let legacy = defaults.object(forKey: "rosetta_dark_mode") as? Bool {
themeModeRaw = legacy ? "dark" : "light"
defaults.removeObject(forKey: "rosetta_dark_mode")
}
if overlayWindow == nil {
if let windowScene = activeWindowScene {
let overlayWindow = UIWindow(windowScene: windowScene)
@@ -26,9 +34,14 @@ struct DarkModeWrapper<Content: View>: View {
}
}
}
.onChange(of: activateDarkMode, initial: true) { _, newValue in
.onChange(of: themeModeRaw, initial: true) { _, newValue in
if let windowScene = activeWindowScene {
let style: UIUserInterfaceStyle = newValue ? .dark : .light
let style: UIUserInterfaceStyle
switch newValue {
case "light": style = .light
case "system": style = .unspecified
default: style = .dark
}
for window in windowScene.windows {
window.overrideUserInterfaceStyle = style
}
@@ -43,11 +56,12 @@ struct DarkModeWrapper<Content: View>: View {
}
/// Theme toggle button with sun/moon icon and circular reveal animation.
/// Cycles: dark light dark (quick toggle, full control in Appearance settings).
struct DarkModeButton: View {
@State private var buttonRect: CGRect = .zero
/// Local icon state changes INSTANTLY on tap (no round-trip through UserDefaults).
@State private var showMoonIcon: Bool = true
@AppStorage("rosetta_dark_mode") private var activateDarkMode: Bool = true
@AppStorage("rosetta_theme_mode") private var themeModeRaw: String = "dark"
var body: some View {
Button(action: {
@@ -61,7 +75,7 @@ struct DarkModeButton: View {
.frame(width: 44, height: 44)
})
.buttonStyle(.plain)
.onAppear { showMoonIcon = activateDarkMode }
.onAppear { showMoonIcon = themeModeRaw == "dark" || themeModeRaw == "system" }
.darkModeButtonRect { rect in
buttonRect = rect
}
@@ -88,7 +102,7 @@ struct DarkModeButton: View {
overlayWindow.addSubview(imageView)
// 3. Switch theme underneath the freeze frame (invisible to user).
activateDarkMode = targetDark
themeModeRaw = targetDark ? "dark" : "light"
// 4. Capture new state asynchronously after layout settles.
Task {

View File

@@ -67,4 +67,29 @@ enum EmojiParser {
let codes = emoji.unicodeScalars.map { String(format: "%x", $0.value) }
return ":emoji_\(codes.joined(separator: "-")):"
}
// MARK: - Emoji-Only Detection (Telegram Parity)
/// Returns true if text contains ONLY emoji characters (no text, no limit on count).
/// Telegram parity: `ChatMessageItemCommon.swift:243-252` no max count,
/// just `message.text.containsOnlyEmoji` (EmojiUtils.swift:77-78).
static func isEmojiOnly(_ text: String) -> Bool {
let trimmed = text.trimmingCharacters(in: .whitespacesAndNewlines)
guard !trimmed.isEmpty else { return false }
return trimmed.allSatisfy { $0.isEmojiCharacter }
}
}
// MARK: - Character Emoji Detection
private extension Character {
/// True if this character is a visible emoji (excludes digits, #, * which have isEmoji=true).
var isEmojiCharacter: Bool {
unicodeScalars.allSatisfy { scalar in
// Variation selectors and ZWJ are joining characters, not standalone
scalar.value == 0xFE0F || scalar.value == 0x200D ||
scalar.properties.isEmojiPresentation ||
(scalar.properties.isEmoji && scalar.value > 0xFF)
} && unicodeScalars.contains { $0.properties.isEmoji && $0.value > 0xFF }
}
}

View File

@@ -0,0 +1,22 @@
import SwiftUI
/// Renders the selected wallpaper background for chat views.
/// Reads `rosetta_wallpaper_id` from UserDefaults and displays the corresponding wallpaper.
struct WallpaperView: View {
@AppStorage("rosetta_wallpaper_id") private var wallpaperId: String = "default"
var body: some View {
let option = WallpaperOption.allOptions.first(where: { $0.id == wallpaperId })
?? WallpaperOption.allOptions[0]
switch option.style {
case .none:
RosettaColors.Adaptive.background
case .image(let assetName):
Image(assetName)
.resizable()
.aspectRatio(contentMode: .fill)
}
}
}

View File

@@ -276,6 +276,7 @@ struct ChatDetailView: View {
// Telegram-like read policy: mark read only when dialog is truly readable
// (view active + list at bottom).
markDialogAsRead()
DialogRepository.shared.setMention(opponentKey: route.publicKey, hasMention: false)
// Request user info (non-mutating, won't trigger list rebuild)
requestUserInfoIfNeeded()
// Delay DialogRepository mutations to let navigation transition complete.
@@ -819,12 +820,9 @@ private extension ChatDetailView {
}
}
/// Cached tiled pattern color computed once, reused across renders
/// Default chat wallpaper full-screen scaled image.
/// Chat wallpaper reads user selection from @AppStorage.
private var tiledChatBackground: some View {
Image("ChatWallpaper")
.resizable()
.aspectRatio(contentMode: .fill)
WallpaperView()
}
// MARK: - Messages

View File

@@ -27,8 +27,33 @@ final class CoreTextTextLayout {
let width: CGFloat // Typographic advance width (CTLineGetTypographicBounds)
let ascent: CGFloat // Distance from baseline to top of tallest glyph
let descent: CGFloat // Distance from baseline to bottom of lowest glyph
let stringRange: NSRange // Character range this line covers in the original string
}
// MARK: - Link Detection
/// A detected URL with bounding rects for hit testing.
struct LinkInfo {
let url: URL
let range: NSRange
var rects: [CGRect]
}
/// TLD whitelist desktop parity (desktop/app/constants.ts lines 38-63).
static let allowedTLDs: Set<String> = [
"com", "ru", "ua", "org", "net", "edu", "gov", "io", "tech", "info",
"biz", "me", "online", "site", "app", "dev", "chat", "gg", "fm", "tv",
"im", "sc", "su", "by"
]
/// Cached data detector (regex compilation is expensive).
private static let linkDetector: NSDataDetector? = {
try? NSDataDetector(types: NSTextCheckingResult.CheckingType.link.rawValue)
}()
/// Link color matching Telegram accent blue (#3390EC).
static let linkColor = UIColor(red: 0x33/255, green: 0x90/255, blue: 0xEC/255, alpha: 1)
// MARK: - Properties
let lines: [Line]
@@ -37,6 +62,7 @@ final class CoreTextTextLayout {
let lastLineHasRTL: Bool
let lastLineHasBlockQuote: Bool
let textColor: UIColor
let links: [LinkInfo]
private init(
lines: [Line],
@@ -44,7 +70,8 @@ final class CoreTextTextLayout {
lastLineWidth: CGFloat,
lastLineHasRTL: Bool,
lastLineHasBlockQuote: Bool,
textColor: UIColor
textColor: UIColor,
links: [LinkInfo] = []
) {
self.lines = lines
self.size = size
@@ -52,6 +79,19 @@ final class CoreTextTextLayout {
self.lastLineHasRTL = lastLineHasRTL
self.lastLineHasBlockQuote = lastLineHasBlockQuote
self.textColor = textColor
self.links = links
}
/// Returns the URL at the given point, or nil if no link at that position.
func linkAt(point: CGPoint) -> URL? {
for link in links {
for rect in link.rects {
if rect.insetBy(dx: -4, dy: -4).contains(point) {
return link.url
}
}
}
return nil
}
// MARK: - Telegram Line Spacing
@@ -102,9 +142,26 @@ final class CoreTextTextLayout {
.font: font,
.foregroundColor: textColor
]
let attrString = NSAttributedString(string: text, attributes: attributes)
let attrString = NSMutableAttributedString(string: text, attributes: attributes)
let stringLength = attrString.length
// Link detection (desktop parity: TextParser.tsx + constants.ts TLD whitelist)
var detectedLinks: [(url: URL, range: NSRange)] = []
if let detector = linkDetector {
let fullRange = NSRange(location: 0, length: stringLength)
detector.enumerateMatches(in: text, options: [], range: fullRange) { result, _, _ in
guard let result, let url = result.url else { return }
let host = url.host?.lowercased() ?? ""
let tld = host.split(separator: ".").last.map(String.init) ?? ""
guard allowedTLDs.contains(tld) else { return }
attrString.addAttributes([
.foregroundColor: linkColor,
.underlineStyle: NSUnderlineStyle.single.rawValue
], range: result.range)
detectedLinks.append((url: url, range: result.range))
}
}
// Typesetter (Telegram: InteractiveTextComponent line 1481)
let typesetter = CTTypesetterCreateWithAttributedString(attrString as CFAttributedString)
@@ -163,7 +220,8 @@ final class CoreTextTextLayout {
origin: CGPoint(x: 0, y: currentY),
width: clampedWidth,
ascent: lineAscent,
descent: lineDescent
descent: lineDescent,
stringRange: NSRange(location: currentIndex, length: lineCharCount)
))
// Advance by font line height (Telegram uses font-level, not per-line)
@@ -183,13 +241,41 @@ final class CoreTextTextLayout {
.trimmingCharacters(in: .whitespacesAndNewlines)
.hasPrefix(">")
// Compute link bounding rects
var linkInfos: [LinkInfo] = []
for detected in detectedLinks {
var rects: [CGRect] = []
for line in resultLines {
let overlap = NSIntersectionRange(line.stringRange, detected.range)
guard overlap.length > 0 else { continue }
let lineStartInLink = overlap.location - line.stringRange.location
let lineEndInLink = lineStartInLink + overlap.length
var xStart: CGFloat = 0
var xEnd: CGFloat = 0
// CTLineGetOffsetForStringIndex uses UTF-16 offsets relative to line start
xStart = CGFloat(CTLineGetOffsetForStringIndex(
line.ctLine, overlap.location, nil
))
xEnd = CGFloat(CTLineGetOffsetForStringIndex(
line.ctLine, overlap.location + overlap.length, nil
))
if xEnd < xStart { swap(&xStart, &xEnd) }
let lineH = line.ascent + line.descent
rects.append(CGRect(x: xStart, y: line.origin.y, width: xEnd - xStart, height: lineH))
}
if !rects.isEmpty {
linkInfos.append(LinkInfo(url: detected.url, range: detected.range, rects: rects))
}
}
return CoreTextTextLayout(
lines: resultLines,
size: CGSize(width: ceil(maxLineWidth), height: ceil(currentY)),
lastLineWidth: ceil(lastLineWidth),
lastLineHasRTL: lastLineHasRTL,
lastLineHasBlockQuote: lastLineHasBlockQuote,
textColor: textColor
textColor: textColor,
links: linkInfos
)
}
@@ -254,15 +340,29 @@ final class CoreTextLabel: UIView {
// + line.ascent = baseline (distance from top to baseline)
// Telegram: context.textPosition = CGPoint(x: minX, y: maxY - descent)
// which equals origin.y + ascent (since maxY = origin.y + ascent + descent)
context.textPosition = CGPoint(
x: line.origin.x,
y: line.origin.y + line.ascent
)
let baselineY = line.origin.y + line.ascent
context.textPosition = CGPoint(x: line.origin.x, y: baselineY)
// Draw each glyph run (Telegram: CTRunDraw per run)
let glyphRuns = CTLineGetGlyphRuns(line.ctLine) as! [CTRun]
for run in glyphRuns {
CTRunDraw(run, context, CFRangeMake(0, 0)) // 0,0 = all glyphs
CTRunDraw(run, context, CFRangeMake(0, 0))
// Draw underline for link runs (CTRunDraw doesn't render underlines)
let attrs = CTRunGetAttributes(run) as? [NSAttributedString.Key: Any] ?? [:]
if let underline = attrs[.underlineStyle] as? Int, underline != 0,
let color = attrs[.foregroundColor] as? UIColor {
var runAscent: CGFloat = 0
let runWidth = CGFloat(CTRunGetTypographicBounds(run, CFRangeMake(0, 0), &runAscent, nil, nil))
var origin = CGPoint.zero
CTRunGetPositions(run, CFRangeMake(0, 1), &origin)
let underlineY = baselineY + 2
context.setStrokeColor(color.cgColor)
context.setLineWidth(0.5)
context.move(to: CGPoint(x: line.origin.x + origin.x, y: underlineY))
context.addLine(to: CGPoint(x: line.origin.x + origin.x + runWidth, y: underlineY))
context.strokePath()
}
}
}

View File

@@ -1,3 +1,4 @@
import Combine
import SwiftUI
import UIKit
import Photos
@@ -22,36 +23,103 @@ struct ImageViewerState: Equatable {
let sourceFrame: CGRect
}
// MARK: - GalleryDismissPanCoordinator
/// Manages the vertical pan gesture for gallery dismiss.
/// Attached to the hosting controller's view (NOT as a SwiftUI overlay) so it
/// doesn't block SwiftUI gestures (pinch zoom, taps) on the content below.
/// Previous approach: `HeroPanGestureOverlay` (UIViewRepresentable overlay) blocked
/// all SwiftUI gestures at the hit-test level pinch zoom and double-tap never worked.
final class GalleryDismissPanCoordinator: NSObject, ObservableObject, UIGestureRecognizerDelegate {
/// Current vertical drag offset during dismiss gesture.
@Published var dragOffset: CGSize = .zero
/// Toggles on every pan-end event use `.onChange` to react.
@Published private(set) var panEndSignal: Bool = false
/// Y velocity at the moment the pan gesture ended (pt/s).
private(set) var endVelocityY: CGFloat = 0
/// Set to false to disable the dismiss gesture (e.g. when zoomed in).
var isEnabled: Bool = true
@objc func handlePan(_ gesture: UIPanGestureRecognizer) {
guard isEnabled else {
if gesture.state == .began { gesture.state = .cancelled }
return
}
let translation = gesture.translation(in: gesture.view)
switch gesture.state {
case .began, .changed:
// Vertical only Telegram parity (no diagonal drag).
dragOffset = CGSize(width: 0, height: translation.y)
case .ended, .cancelled:
endVelocityY = gesture.velocity(in: gesture.view).y
panEndSignal.toggle()
default:
break
}
}
// Only begin for downward vertical drags.
func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
guard isEnabled else { return false }
guard let pan = gestureRecognizer as? UIPanGestureRecognizer else { return false }
let v = pan.velocity(in: pan.view)
return v.y > abs(v.x)
}
// Allow simultaneous recognition with non-pan gestures (pinch, taps).
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWith other: UIGestureRecognizer
) -> Bool {
!(other is UIPanGestureRecognizer)
}
}
// MARK: - ImageViewerPresenter
/// UIHostingController subclass that hides the status bar.
/// Uses `AnyView` instead of generic `Content` to avoid a Swift compiler crash
/// in the SIL inliner (SR-XXXXX / rdar://XXXXX).
private final class StatusBarHiddenHostingController: UIHostingController<AnyView> {
override var prefersStatusBarHidden: Bool { true }
override var preferredStatusBarUpdateAnimation: UIStatusBarAnimation { .fade }
}
/// Presents the image gallery viewer using UIKit `overFullScreen` presentation
/// no bottom-sheet slide-up. Appears instantly; the viewer itself fades in.
/// Presents the image gallery viewer using UIKit `overFullScreen` presentation.
/// Telegram parity: the viewer appears as a fade overlay covering nav bar and tab bar.
@MainActor
final class ImageViewerPresenter {
static let shared = ImageViewerPresenter()
private weak var presentedController: UIViewController?
private var panCoordinator: GalleryDismissPanCoordinator?
func present(state: ImageViewerState) {
guard presentedController == nil else { return }
let viewer = ImageGalleryViewer(state: state, onDismiss: { [weak self] in
self?.dismiss()
})
let coordinator = GalleryDismissPanCoordinator()
panCoordinator = coordinator
let viewer = ImageGalleryViewer(
state: state,
panCoordinator: coordinator,
onDismiss: { [weak self] in self?.dismiss() }
)
let hostingController = StatusBarHiddenHostingController(rootView: AnyView(viewer))
hostingController.modalPresentationStyle = .overFullScreen
hostingController.view.backgroundColor = .clear
// Pan gesture on hosting controller's view NOT a SwiftUI overlay.
// UIKit gesture recognizers on a hosting view coexist with SwiftUI gestures
// on child views (pinch, taps, TabView swipe) without blocking them.
let pan = UIPanGestureRecognizer(
target: coordinator,
action: #selector(GalleryDismissPanCoordinator.handlePan)
)
pan.minimumNumberOfTouches = 1
pan.maximumNumberOfTouches = 1
pan.delegate = coordinator
hostingController.view.addGestureRecognizer(pan)
guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene,
let root = windowScene.keyWindow?.rootViewController
else { return }
@@ -65,6 +133,7 @@ final class ImageViewerPresenter {
}
func dismiss() {
panCoordinator = nil
presentedController?.dismiss(animated: false)
presentedController = nil
}
@@ -73,11 +142,12 @@ final class ImageViewerPresenter {
// MARK: - ImageGalleryViewer
/// Multi-photo gallery viewer with hero transition animation.
/// Adapted 1:1 from PhotosTransition/Helpers/PhotoGridView.swift DetailPhotosView.
/// Hero positioning is per-page INSIDE ForEach (not on TabView).
/// Telegram parity: hero open/close, vertical-only interactive dismiss,
/// slide-in panels, counter below name capsule, pinch zoom, double-tap zoom.
struct ImageGalleryViewer: View {
let state: ImageViewerState
@ObservedObject var panCoordinator: GalleryDismissPanCoordinator
let onDismiss: () -> Void
@State private var currentPage: Int
@@ -85,17 +155,27 @@ struct ImageGalleryViewer: View {
@State private var currentZoomScale: CGFloat = 1.0
@State private var isDismissing = false
@State private var isExpanded: Bool = false
@State private var dragOffset: CGSize = .zero
@State private var viewSize: CGSize = UIScreen.main.bounds.size
private static let dateFormatter: DateFormatter = {
let formatter = DateFormatter()
formatter.dateFormat = "d MMMM, HH:mm"
return formatter
let f = DateFormatter()
f.dateStyle = .none
f.timeStyle = .short
f.doesRelativeDateFormatting = true
return f
}()
init(state: ImageViewerState, onDismiss: @escaping () -> Void) {
private static let relativeDateFormatter: DateFormatter = {
let f = DateFormatter()
f.dateStyle = .medium
f.timeStyle = .none
f.doesRelativeDateFormatting = true
return f
}()
init(state: ImageViewerState, panCoordinator: GalleryDismissPanCoordinator, onDismiss: @escaping () -> Void) {
self.state = state
self.panCoordinator = panCoordinator
self.onDismiss = onDismiss
self._currentPage = State(initialValue: state.initialIndex)
}
@@ -108,64 +188,76 @@ struct ImageGalleryViewer: View {
.interpolatingSpring(duration: 0.3, bounce: 0, initialVelocity: 0)
}
private var interactiveOpacity: CGFloat {
let opacityY = abs(dragOffset.height) / (viewSize.height * 0.3)
return isExpanded ? max(1 - opacityY, 0) : 0
/// Background opacity: fades over 80pt drag (Telegram: `abs(distance) / 80`).
private var backgroundOpacity: CGFloat {
let progress = min(abs(panCoordinator.dragOffset.height) / 80, 1)
return isExpanded ? max(1 - progress, 0) : 0
}
/// Overlay/toolbar opacity: fades over 50pt drag (Telegram: `abs(distance) / 50`).
private var overlayDragOpacity: CGFloat {
1 - min(abs(panCoordinator.dragOffset.height) / 50, 1)
}
private func formattedDate(_ date: Date) -> String {
let dayPart = Self.relativeDateFormatter.string(from: date)
let timePart = Self.dateFormatter.string(from: date)
return "\(dayPart) at \(timePart)"
}
// MARK: - Body
var body: some View {
let sourceFrame = state.sourceFrame
// Hero positioning per-page inside ForEach matches reference exactly
TabView(selection: $currentPage) {
ForEach(Array(state.images.enumerated()), id: \.element.attachmentId) { index, info in
ZoomableImagePage(
attachmentId: info.attachmentId,
onDismiss: { dismissAction() },
showControls: $showControls,
currentScale: $currentZoomScale,
onEdgeTap: { direction in navigateEdgeTap(direction: direction) }
)
.frame(
width: isExpanded ? viewSize.width : sourceFrame.width,
height: isExpanded ? viewSize.height : sourceFrame.height
)
.clipped()
.offset(
x: isExpanded ? 0 : sourceFrame.minX,
y: isExpanded ? 0 : sourceFrame.minY
)
.offset(dragOffset)
.frame(
maxWidth: .infinity,
maxHeight: .infinity,
alignment: isExpanded ? .center : .topLeading
)
.tag(index)
.ignoresSafeArea()
GeometryReader { geometry in
let size = geometry.size
TabView(selection: $currentPage) {
ForEach(Array(state.images.enumerated()), id: \.element.attachmentId) { index, info in
// Hero frame/offset only on the initial page other pages are
// always fullscreen. Prevents glitch where lazily-created pages
// briefly render at sourceFrame size during TabView swipe.
let isHeroPage = index == state.initialIndex
let heroActive = isHeroPage && !isExpanded
ZoomableImagePage(
attachmentId: info.attachmentId,
onDismiss: { dismissAction() },
showControls: $showControls,
currentScale: $currentZoomScale,
onEdgeTap: { direction in navigateEdgeTap(direction: direction) }
)
.frame(
width: heroActive ? sourceFrame.width : size.width,
height: heroActive ? sourceFrame.height : size.height
)
.clipped()
.offset(
x: heroActive ? sourceFrame.minX : 0,
y: heroActive ? sourceFrame.minY : 0
)
.offset(panCoordinator.dragOffset)
.frame(
maxWidth: .infinity,
maxHeight: .infinity,
alignment: heroActive ? .topLeading : .center
)
.tag(index)
}
}
.tabViewStyle(.page(indexDisplayMode: .never))
.scrollDisabled(currentZoomScale > 1.05 || isDismissing)
.contentShape(Rectangle())
.overlay { galleryOverlay }
.background {
Color.black
.opacity(backgroundOpacity)
}
.allowsHitTesting(isExpanded)
.onAppear { viewSize = size }
}
.tabViewStyle(.page(indexDisplayMode: .never))
.ignoresSafeArea()
.scrollDisabled(currentZoomScale > 1.05 || isDismissing)
.contentShape(Rectangle())
.overlay {
// Pan gesture overlay UIKit gesture for iOS 17+ compat
HeroPanGestureOverlay { gesture in
handlePanGesture(gesture)
}
.allowsHitTesting(isExpanded && currentZoomScale <= 1.05)
}
.overlay {
overlayActions
}
.background {
Color.black
.opacity(interactiveOpacity)
.opacity(isExpanded ? 1 : 0)
.ignoresSafeArea()
}
.allowsHitTesting(isExpanded)
.statusBarHidden(true)
.task {
prefetchAdjacentImages(around: state.initialIndex)
@@ -177,96 +269,133 @@ struct ImageGalleryViewer: View {
.onChange(of: currentPage) { _, newPage in
prefetchAdjacentImages(around: newPage)
}
.onChange(of: currentZoomScale) { _, newScale in
panCoordinator.isEnabled = newScale <= 1.05
}
.onChange(of: panCoordinator.panEndSignal) { _, _ in
handlePanEnd()
}
}
// MARK: - Pan Gesture
// MARK: - Pan End Handler
private func handlePanGesture(_ gesture: UIPanGestureRecognizer) {
let panState = gesture.state
let translation = gesture.translation(in: gesture.view)
if panState == .began || panState == .changed {
dragOffset = .init(width: translation.x, height: translation.y)
private func handlePanEnd() {
let offsetY = panCoordinator.dragOffset.height
let velocityY = panCoordinator.endVelocityY
// Telegram parity: dismiss on 50pt drag OR fast downward flick (>1000 pt/s).
if offsetY > 50 || velocityY > 1000 {
dismissAction()
} else {
if dragOffset.height > 50 {
heroDismiss()
} else {
withAnimation(heroAnimation.speed(1.2)) {
dragOffset = .zero
}
withAnimation(heroAnimation.speed(1.2)) {
panCoordinator.dragOffset = .zero
}
}
}
// MARK: - Overlay Actions (matches PhotosTransition/ContentView.swift OverlayActionView)
// MARK: - Gallery Overlay (Telegram parity)
@ViewBuilder
private var overlayActions: some View {
let overlayOpacity: CGFloat = 1 - min(abs(dragOffset.height / 30), 1)
if showControls && !isDismissing && isExpanded {
VStack {
// Top actions
HStack {
glassButton(systemName: "chevron.left") { dismissAction() }
Spacer(minLength: 0)
private var galleryOverlay: some View {
if !isDismissing && isExpanded {
ZStack {
// Top panel slides DOWN from above on show, UP on hide
VStack(spacing: 4) {
topPanel
if state.images.count > 1 {
glassLabel("\(currentPage + 1) / \(state.images.count)")
counterBadge
}
Spacer()
}
.overlay {
if let info = currentInfo {
glassLabel(info.senderName)
.contentTransition(.numericText())
.animation(.easeInOut, value: currentPage)
.frame(maxWidth: .infinity)
}
}
Spacer(minLength: 0)
// Bottom actions
HStack {
glassButton(systemName: "square.and.arrow.up.fill") { shareCurrentImage() }
Spacer(minLength: 0)
glassButton(systemName: "square.and.arrow.down") { saveCurrentImage() }
.frame(maxWidth: .infinity)
.offset(y: showControls ? 0 : -120)
.allowsHitTesting(showControls)
// Bottom panel slides UP from below on show, DOWN on hide
VStack {
Spacer()
bottomPanel
}
.offset(y: showControls ? 0 : 120)
.allowsHitTesting(showControls)
}
.padding(.horizontal, 15)
.compositingGroup()
.opacity(overlayOpacity)
.opacity(overlayDragOpacity)
.animation(.spring(response: 0.3, dampingFraction: 0.85), value: showControls)
.environment(\.colorScheme, .dark)
.transition(.opacity)
.animation(.easeOut(duration: 0.2), value: showControls)
}
}
// MARK: - Glass Button / Label helpers
// MARK: - Top Panel
private func glassButton(systemName: String, action: @escaping () -> Void) -> some View {
private var topPanel: some View {
HStack {
glassCircleButton(systemName: "chevron.left") { dismissAction() }
Spacer(minLength: 0)
glassCircleButton(systemName: "ellipsis") { }
}
.overlay {
if let info = currentInfo {
VStack(spacing: 2) {
Text(info.senderName)
.font(.system(size: 17, weight: .semibold))
.foregroundStyle(.white)
.lineLimit(1)
Text(formattedDate(info.timestamp))
.font(.system(size: 12))
.foregroundStyle(.white.opacity(0.6))
}
.padding(.horizontal, 14)
.padding(.vertical, 8)
.background { TelegramGlassCapsule() }
.contentTransition(.numericText())
.animation(.easeInOut, value: currentPage)
}
}
.padding(.horizontal, 15)
.padding(.top, 4)
}
// MARK: - Counter Badge (below name capsule)
private var counterBadge: some View {
Text("\(currentPage + 1) of \(state.images.count)")
.font(.system(size: 12, weight: .semibold))
.foregroundStyle(.white)
.padding(.horizontal, 12)
.padding(.vertical, 4)
.background { TelegramGlassCapsule() }
.contentTransition(.numericText())
.animation(.easeInOut, value: currentPage)
}
// MARK: - Bottom Panel
private var bottomPanel: some View {
HStack {
glassCircleButton(systemName: "arrowshape.turn.up.right") { }
Spacer(minLength: 0)
glassCircleButton(systemName: "square.and.arrow.up") { shareCurrentImage() }
Spacer(minLength: 0)
glassCircleButton(systemName: "square.and.arrow.down") { saveCurrentImage() }
Spacer(minLength: 0)
glassCircleButton(systemName: "trash") { }
}
.padding(.horizontal, 15)
.padding(.bottom, 8)
}
// MARK: - Glass Circle Button
private func glassCircleButton(systemName: String, action: @escaping () -> Void) -> some View {
Button(action: action) {
Image(systemName: systemName)
.font(.title3)
.font(.system(size: 17, weight: .medium))
.foregroundStyle(.white)
.frame(width: 36, height: 36)
.frame(width: 44, height: 44)
}
.background { TelegramGlassCircle() }
}
private func glassLabel(_ text: String) -> some View {
Text(text)
.font(.callout)
.foregroundStyle(.white)
.lineLimit(1)
.padding(.horizontal, 15)
.padding(.vertical, 10)
.background { TelegramGlassCapsule() }
}
// MARK: - Navigation
private func navigateEdgeTap(direction: Int) {
@@ -278,7 +407,9 @@ struct ImageGalleryViewer: View {
// MARK: - Dismiss
private func dismissAction() {
if currentZoomScale > 1.05 {
// Telegram parity: hero-back only for the initially-tapped photo.
// If user paged away, the sourceFrame belongs to a different thumbnail fade instead.
if currentZoomScale > 1.05 || currentPage != state.initialIndex {
fadeDismiss()
} else {
heroDismiss()
@@ -288,10 +419,11 @@ struct ImageGalleryViewer: View {
private func heroDismiss() {
guard !isDismissing else { return }
isDismissing = true
panCoordinator.isEnabled = false
Task {
withAnimation(heroAnimation.speed(1.2)) {
dragOffset = .zero
panCoordinator.dragOffset = .zero
isExpanded = false
}
try? await Task.sleep(for: .seconds(0.35))
@@ -302,12 +434,15 @@ struct ImageGalleryViewer: View {
private func fadeDismiss() {
guard !isDismissing else { return }
isDismissing = true
panCoordinator.isEnabled = false
withAnimation(.easeOut(duration: 0.2)) {
isExpanded = false
// Slide down + fade out via dragOffset (drives backgroundOpacity toward 0).
// Do NOT set isExpanded=false that collapses to sourceFrame (wrong thumbnail).
withAnimation(.easeOut(duration: 0.25)) {
panCoordinator.dragOffset = CGSize(width: 0, height: viewSize.height * 0.4)
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.22) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.27) {
onDismiss()
}
}
@@ -362,70 +497,3 @@ struct ImageGalleryViewer: View {
}
}
}
// MARK: - HeroPanGestureOverlay
/// Transparent UIView overlay with UIPanGestureRecognizer for vertical hero dismiss.
/// Uses UIKit gesture (not UIGestureRecognizerRepresentable) for iOS 17+ compat.
/// Matches PanGesture from PhotosTransition reference vertical only, single touch.
private struct HeroPanGestureOverlay: UIViewRepresentable {
var onPan: (UIPanGestureRecognizer) -> Void
func makeUIView(context: Context) -> UIView {
let view = UIView()
view.backgroundColor = .clear
let pan = UIPanGestureRecognizer(target: context.coordinator, action: #selector(Coordinator.handlePan(_:)))
pan.minimumNumberOfTouches = 1
pan.maximumNumberOfTouches = 1
pan.delegate = context.coordinator
view.addGestureRecognizer(pan)
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
context.coordinator.onPan = onPan
}
func makeCoordinator() -> Coordinator { Coordinator(onPan: onPan) }
final class Coordinator: NSObject, UIGestureRecognizerDelegate {
var onPan: (UIPanGestureRecognizer) -> Void
init(onPan: @escaping (UIPanGestureRecognizer) -> Void) {
self.onPan = onPan
}
@objc func handlePan(_ gesture: UIPanGestureRecognizer) {
onPan(gesture)
}
// Only begin for downward vertical drags
func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
guard let pan = gestureRecognizer as? UIPanGestureRecognizer else { return false }
let velocity = pan.velocity(in: pan.view)
return velocity.y > abs(velocity.x)
}
// Let TabView scroll pass through when scrolled down
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldBeRequiredToFailBy otherGestureRecognizer: UIGestureRecognizer
) -> Bool {
if let scrollView = otherGestureRecognizer.view as? UIScrollView {
return scrollView.contentOffset.y <= 0
}
return false
}
// Allow simultaneous recognition with other gestures (taps, pinch, etc.)
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer
) -> Bool {
return !(otherGestureRecognizer is UIPanGestureRecognizer)
}
}
}

View File

@@ -102,8 +102,8 @@ final class NativeMessageCell: UICollectionViewCell {
case .dark, .light:
return traitCollection.userInterfaceStyle
default:
let prefersDark = UserDefaults.standard.object(forKey: "rosetta_dark_mode") as? Bool ?? true
return prefersDark ? .dark : .light
let themeMode = UserDefaults.standard.string(forKey: "rosetta_theme_mode") ?? "dark"
return themeMode == "light" ? .light : .dark
}
}
private static let blurHashCache: NSCache<NSString, UIImage> = {
@@ -519,6 +519,10 @@ final class NativeMessageCell: UICollectionViewCell {
longPress.minimumPressDuration = 0.35
bubbleView.addGestureRecognizer(longPress)
// Single tap open link if tapped on a URL
let linkTap = UITapGestureRecognizer(target: self, action: #selector(handleLinkTap(_:)))
bubbleView.addGestureRecognizer(linkTap)
let pan = UIPanGestureRecognizer(target: self, action: #selector(handleSwipe(_:)))
pan.delegate = self
contentView.addGestureRecognizer(pan)
@@ -546,7 +550,7 @@ final class NativeMessageCell: UICollectionViewCell {
let isOutgoing = currentLayout?.isOutgoing ?? false
let isMediaStatus: Bool = {
guard let type = currentLayout?.messageType else { return false }
return type == .photo || type == .photoWithCaption
return type == .photo || type == .photoWithCaption || type == .emojiOnly
}()
// Text use cached CoreTextTextLayout from measurement phase.
@@ -907,6 +911,12 @@ final class NativeMessageCell: UICollectionViewCell {
bubbleOutlineLayer.strokeColor = UIColor.clear.cgColor
}
// Emoji-only: hide bubble visuals (no background, just floating emoji)
let isEmojiOnly = layout.messageType == .emojiOnly
bubbleImageView.isHidden = isEmojiOnly
bubbleLayer.isHidden = isEmojiOnly
bubbleOutlineLayer.isHidden = isEmojiOnly
// Text
textLabel.isHidden = layout.textSize == .zero
textLabel.frame = layout.textFrame
@@ -1287,6 +1297,18 @@ final class NativeMessageCell: UICollectionViewCell {
return attrs
}
// MARK: - Link Tap
@objc private func handleLinkTap(_ gesture: UITapGestureRecognizer) {
let pointInText = gesture.location(in: textLabel)
guard let url = textLabel.textLayout?.linkAt(point: pointInText) else { return }
var finalURL = url
if finalURL.scheme == nil || finalURL.scheme?.isEmpty == true {
finalURL = URL(string: "https://\(url.absoluteString)") ?? url
}
UIApplication.shared.open(finalURL)
}
// MARK: - Context Menu (Telegram-style)
private let contextMenuHaptic = UIImpactFeedbackGenerator(style: .medium)
@@ -2257,7 +2279,7 @@ final class NativeMessageCell: UICollectionViewCell {
return
}
// Telegram uses a dedicated status background on media messages.
statusBackgroundView.isHidden = layout.messageType != .photo
statusBackgroundView.isHidden = layout.messageType != .photo && layout.messageType != .emojiOnly
}
private func updateStatusBackgroundFrame() {
@@ -2287,6 +2309,8 @@ final class NativeMessageCell: UICollectionViewCell {
#if DEBUG
private func assertStatusLaneFramesValid(layout: MessageCellLayout) {
// emojiOnly has no visible bubble status pill floats below emoji
guard layout.messageType != .emojiOnly else { return }
let bubbleBounds = CGRect(origin: .zero, size: layout.bubbleSize)
let frames = [
("timestamp", layout.timestampFrame),
@@ -2340,6 +2364,9 @@ final class NativeMessageCell: UICollectionViewCell {
wasSentCheckVisible = false
wasReadCheckVisible = false
statusBackgroundView.isHidden = true
bubbleImageView.isHidden = false
bubbleLayer.isHidden = false
bubbleOutlineLayer.isHidden = false
resetPhotoTiles()
replyContainer.isHidden = true
replyMessageId = nil

View File

@@ -976,7 +976,8 @@ final class NativeMessageListController: UIViewController {
textLayoutCache.removeAll()
return
}
let isDark = UserDefaults.standard.object(forKey: "rosetta_dark_mode") as? Bool ?? true
let themeMode = UserDefaults.standard.string(forKey: "rosetta_theme_mode") ?? "dark"
let isDark = themeMode != "light"
let (layouts, textLayouts) = MessageCellLayout.batchCalculate(
messages: messages,
maxBubbleWidth: config.maxBubbleWidth,

View File

@@ -124,358 +124,3 @@ struct ZoomableImagePage: View {
}
}
// MARK: - UIViewRepresentable
/// Wraps `ImageGestureContainerView` for SwiftUI integration.
private struct ZoomableImageUIViewRepresentable: UIViewRepresentable {
let image: UIImage
let onDismiss: () -> Void
let onDismissProgress: (CGFloat) -> Void
let onDismissCancel: () -> Void
let onToggleControls: () -> Void
let onScaleChanged: (CGFloat) -> Void
let onEdgeTap: ((Int) -> Void)?
func makeUIView(context: Context) -> ImageGestureContainerView {
let view = ImageGestureContainerView(image: image)
view.onDismiss = onDismiss
view.onDismissProgress = onDismissProgress
view.onDismissCancel = onDismissCancel
view.onToggleControls = onToggleControls
view.onScaleChanged = onScaleChanged
view.onEdgeTap = onEdgeTap
return view
}
func updateUIView(_ view: ImageGestureContainerView, context: Context) {
view.onDismiss = onDismiss
view.onDismissProgress = onDismissProgress
view.onDismissCancel = onDismissCancel
view.onToggleControls = onToggleControls
view.onScaleChanged = onScaleChanged
view.onEdgeTap = onEdgeTap
}
}
// MARK: - ImageGestureContainerView
/// UIKit view that handles all image gestures with full control:
/// - Centroid-based pinch zoom (1x5x)
/// - Double-tap to zoom to tap point (2.5x) or reset
/// - Pan when zoomed (with offset clamping)
/// - Vertical drag to dismiss with velocity tracking
/// - Single tap: edge zones navigate, center toggles controls
/// - Axis locking: decides vertical dismiss vs pan early
///
/// Android parity: `ZoomableImage` in `ImageViewerScreen.kt`
final class ImageGestureContainerView: UIView, UIGestureRecognizerDelegate {
// MARK: - Configuration
private let minScale: CGFloat = 1.0
private let maxScale: CGFloat = 5.0
private let doubleTapScale: CGFloat = 2.5
private let dismissDistanceThreshold: CGFloat = 100
private let dismissVelocityThreshold: CGFloat = 500
private let touchSlop: CGFloat = 20
/// Android: left/right 20% zones are edge-tap navigation areas.
private let edgeTapFraction: CGFloat = 0.20
/// Android: spring(dampingRatio = 0.9, stiffness = 400) UIKit(damping: 0.9, velocity: 0)
private let springDamping: CGFloat = 0.9
private let springDuration: CGFloat = 0.35
// MARK: - Subviews
private let imageView = UIImageView()
private var panGesture: UIPanGestureRecognizer?
// MARK: - Transform State
private var currentScale: CGFloat = 1.0
private var currentOffset: CGPoint = .zero
private var dismissOffset: CGFloat = 0
// Pinch gesture tracking
private var pinchStartScale: CGFloat = 1.0
private var pinchStartOffset: CGPoint = .zero
private var lastPinchCentroid: CGPoint = .zero
// Pan gesture tracking
private var panStartOffset: CGPoint = .zero
private var isDismissGesture = false
private var gestureAxisLocked = false
// MARK: - Callbacks
var onDismiss: (() -> Void)?
var onDismissProgress: ((CGFloat) -> Void)?
var onDismissCancel: (() -> Void)?
var onToggleControls: (() -> Void)?
var onScaleChanged: ((CGFloat) -> Void)?
/// -1 = left edge, 1 = right edge
var onEdgeTap: ((Int) -> Void)?
// MARK: - Init
init(image: UIImage) {
super.init(frame: .zero)
imageView.image = image
imageView.contentMode = .scaleAspectFit
imageView.isUserInteractionEnabled = false
addSubview(imageView)
clipsToBounds = true
setupGestures()
}
@available(*, unavailable)
required init?(coder: NSCoder) { fatalError() }
// MARK: - Layout
/// Track the last laid-out size so we only reset frame when it actually changes.
/// Without this, SwiftUI state changes (e.g. `onDismissProgress`) trigger
/// `layoutSubviews` `imageView.frame = bounds` which RESETS the UIKit transform,
/// causing the image to snap back during dismiss drag.
private var lastLayoutSize: CGSize = .zero
override func layoutSubviews() {
super.layoutSubviews()
guard lastLayoutSize != bounds.size else { return }
lastLayoutSize = bounds.size
// Temporarily reset transform, update frame, then re-apply.
let savedTransform = imageView.transform
imageView.transform = .identity
imageView.frame = bounds
imageView.transform = savedTransform
}
// MARK: - Gesture Setup
private func setupGestures() {
let pinch = UIPinchGestureRecognizer(target: self, action: #selector(handlePinch))
pinch.delegate = self
addGestureRecognizer(pinch)
// Pan gesture REMOVED replaced by SwiftUI DragGesture on the wrapper view.
// UIKit UIPanGestureRecognizer on UIViewRepresentable intercepts ALL touches
// before SwiftUI TabView gets them, preventing page swipe navigation.
// SwiftUI DragGesture cooperates with TabView natively.
let doubleTap = UITapGestureRecognizer(target: self, action: #selector(handleDoubleTap))
doubleTap.numberOfTapsRequired = 2
addGestureRecognizer(doubleTap)
let singleTap = UITapGestureRecognizer(target: self, action: #selector(handleSingleTap))
singleTap.numberOfTapsRequired = 1
singleTap.require(toFail: doubleTap)
addGestureRecognizer(singleTap)
}
// MARK: - Apply Transform
private func applyTransform(animated: Bool = false) {
// Guard against NaN/Infinity prevents CoreGraphics crash and UI freeze.
if currentScale.isNaN || currentScale.isInfinite { currentScale = 1.0 }
if currentOffset.x.isNaN || currentOffset.x.isInfinite { currentOffset.x = 0 }
if currentOffset.y.isNaN || currentOffset.y.isInfinite { currentOffset.y = 0 }
if dismissOffset.isNaN || dismissOffset.isInfinite { dismissOffset = 0 }
let transform = CGAffineTransform.identity
.translatedBy(x: currentOffset.x, y: currentOffset.y + dismissOffset)
.scaledBy(x: currentScale, y: currentScale)
if animated {
UIView.animate(
withDuration: springDuration,
delay: 0,
usingSpringWithDamping: springDamping,
initialSpringVelocity: 0,
options: [.curveEaseOut]
) {
self.imageView.transform = transform
}
} else {
imageView.transform = transform
}
}
// MARK: - Pinch Gesture (Centroid Zoom)
@objc private func handlePinch(_ gesture: UIPinchGestureRecognizer) {
switch gesture.state {
case .began:
pinchStartScale = currentScale
pinchStartOffset = currentOffset
if gesture.numberOfTouches >= 2 {
lastPinchCentroid = gesture.location(in: self)
}
case .changed:
let newScale = min(max(pinchStartScale * gesture.scale, minScale * 0.5), maxScale)
// Centroid-based zoom: keep the point under fingers stationary
if gesture.numberOfTouches >= 2 {
let centroid = gesture.location(in: self)
let viewCenter = CGPoint(x: bounds.midX, y: bounds.midY)
let gesturePoint = CGPoint(x: centroid.x - viewCenter.x, y: centroid.y - viewCenter.y)
let safeCurrentScale = max(currentScale, 0.01)
let scaleRatio = newScale / safeCurrentScale
guard scaleRatio.isFinite else { break }
currentOffset = CGPoint(
x: gesturePoint.x - (gesturePoint.x - currentOffset.x) * scaleRatio,
y: gesturePoint.y - (gesturePoint.y - currentOffset.y) * scaleRatio
)
lastPinchCentroid = centroid
}
currentScale = newScale
onScaleChanged?(currentScale)
applyTransform()
case .ended, .cancelled:
if currentScale < minScale + 0.05 {
// Snap back to 1x
currentScale = minScale
currentOffset = .zero
onScaleChanged?(minScale)
applyTransform(animated: true)
} else {
clampOffset(animated: true)
}
default: break
}
}
// MARK: - Pan Gesture (Pan when zoomed ONLY)
// Dismiss gesture moved to SwiftUI DragGesture on ZoomableImagePage wrapper
// to allow TabView page swipe to work.
@objc private func handlePan(_ gesture: UIPanGestureRecognizer) {
// Only handle pan when zoomed dismiss is handled by SwiftUI DragGesture
guard currentScale > 1.05 else {
gesture.state = .cancelled
return
}
let translation = gesture.translation(in: self)
switch gesture.state {
case .began:
panStartOffset = currentOffset
case .changed:
currentOffset = CGPoint(
x: panStartOffset.x + translation.x,
y: panStartOffset.y + translation.y
)
applyTransform()
case .ended, .cancelled:
clampOffset(animated: true)
gestureAxisLocked = false
default: break
}
}
// MARK: - Double Tap (Zoom to tap point)
@objc private func handleDoubleTap(_ gesture: UITapGestureRecognizer) {
let tapPoint = gesture.location(in: self)
let viewCenter = CGPoint(x: bounds.midX, y: bounds.midY)
if currentScale > 1.1 {
// Zoom out to 1x
currentScale = minScale
currentOffset = .zero
onScaleChanged?(minScale)
applyTransform(animated: true)
} else {
// Zoom in to tap point at 2.5x (Android: tapX - tapX * targetScale)
let tapX = tapPoint.x - viewCenter.x
let tapY = tapPoint.y - viewCenter.y
currentScale = doubleTapScale
currentOffset = CGPoint(
x: tapX - tapX * doubleTapScale,
y: tapY - tapY * doubleTapScale
)
clampOffsetImmediate()
onScaleChanged?(doubleTapScale)
applyTransform(animated: true)
}
}
// MARK: - Single Tap (Edge navigation or toggle controls)
@objc private func handleSingleTap(_ gesture: UITapGestureRecognizer) {
guard currentScale <= 1.05 else {
// When zoomed, single tap always toggles controls
onToggleControls?()
return
}
let tapX = gesture.location(in: self).x
let width = bounds.width
let edgeZone = width * edgeTapFraction
if tapX < edgeZone {
onEdgeTap?(-1) // Previous
} else if tapX > width - edgeZone {
onEdgeTap?(1) // Next
} else {
onToggleControls?()
}
}
// MARK: - Offset Clamping
private func clampOffset(animated: Bool) {
guard currentScale > 1.0 else {
currentOffset = .zero
applyTransform(animated: animated)
return
}
let clamped = clampedOffset()
if currentOffset != clamped {
currentOffset = clamped
applyTransform(animated: animated)
}
}
private func clampOffsetImmediate() {
currentOffset = clampedOffset()
}
private func clampedOffset() -> CGPoint {
let maxX = max(bounds.width * (currentScale - 1) / 2, 0)
let maxY = max(bounds.height * (currentScale - 1) / 2, 0)
return CGPoint(
x: min(max(currentOffset.x, -maxX), maxX),
y: min(max(currentOffset.y, -maxY), maxY)
)
}
// MARK: - UIGestureRecognizerDelegate
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWith other: UIGestureRecognizer
) -> Bool {
// Allow pinch + pan simultaneously (zoom + drag)
let isPinchPan = (gestureRecognizer is UIPinchGestureRecognizer && other is UIPanGestureRecognizer) ||
(gestureRecognizer is UIPanGestureRecognizer && other is UIPinchGestureRecognizer)
return isPinchPan
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer is UIPanGestureRecognizer {
// Pan only when zoomed dismiss handled by SwiftUI DragGesture
return currentScale > 1.05
}
return true
}
}

View File

@@ -288,6 +288,9 @@ private extension ChatRowView {
// Previously hidden when lastMessageFromMe (desktop parity),
// but this caused invisible unreads when user sent a reply
// without reading prior incoming messages first.
if dialog.hasMention && dialog.unreadCount > 0 && !isSyncing {
mentionBadge
}
if dialog.unreadCount > 0 && !isSyncing {
unreadBadge
}
@@ -296,6 +299,18 @@ private extension ChatRowView {
}
}
/// Telegram-style `@` mention indicator (shown left of unread count).
var mentionBadge: some View {
Text("@")
.font(.system(size: 14, weight: .medium))
.foregroundStyle(.white)
.frame(width: 20, height: 20)
.background {
Circle()
.fill(dialog.isMuted ? Color(hex: 0x787880) : RosettaColors.figmaBlue)
}
}
@ViewBuilder
var deliveryIcon: some View {
if dialog.lastMessageDelivered == .delivered && dialog.lastMessageRead {

View File

@@ -0,0 +1,283 @@
import SwiftUI
// MARK: - Wallpaper Definitions
struct WallpaperOption: Identifiable, Equatable {
let id: String
let name: String
let style: WallpaperStyle
enum WallpaperStyle: Equatable {
case none
case image(String) // Asset catalog image name (supports light/dark appearance)
}
}
extension WallpaperOption {
/// Android parity: 3 wallpaper pairs (light/dark) from mobile-android assets.
/// Each imageset contains light + dark appearance variants.
/// "ChatWallpaper" = legacy default (wallpaper_light_02 + wallpaper_dark_03).
static let allOptions: [WallpaperOption] = [
WallpaperOption(id: "default", name: "Default", style: .image("ChatWallpaper")),
WallpaperOption(id: "wallpaper_1", name: "Pair 1", style: .image("Wallpaper1")),
WallpaperOption(id: "wallpaper_2", name: "Pair 2", style: .image("Wallpaper2")),
WallpaperOption(id: "wallpaper_3", name: "Pair 3", style: .image("Wallpaper3")),
WallpaperOption(id: "none", name: "None", style: .none),
]
}
// MARK: - Theme Mode
enum ThemeMode: String, CaseIterable {
case dark = "dark"
case light = "light"
case system = "system"
var label: String {
switch self {
case .dark: return "Dark"
case .light: return "Light"
case .system: return "System"
}
}
var iconName: String {
switch self {
case .dark: return "moon.fill"
case .light: return "sun.max.fill"
case .system: return "circle.lefthalf.filled"
}
}
}
// MARK: - Appearance View
struct AppearanceView: View {
@AppStorage("rosetta_wallpaper_id") private var selectedWallpaperId: String = "default"
@AppStorage("rosetta_theme_mode") private var themeModeRaw: String = "dark"
private var themeMode: ThemeMode {
ThemeMode(rawValue: themeModeRaw) ?? .dark
}
private let columns = Array(repeating: GridItem(.flexible(), spacing: 10), count: 3)
var body: some View {
ScrollView(showsIndicators: false) {
VStack(spacing: 24) {
chatPreviewSection
themeSection
wallpaperSection
}
.padding(.horizontal, 16)
.padding(.top, 16)
.padding(.bottom, 100)
}
.background(RosettaColors.Adaptive.background)
.scrollContentBackground(.hidden)
.navigationBarTitleDisplayMode(.inline)
.toolbar {
ToolbarItem(placement: .principal) {
Text("Appearance")
.font(.system(size: 17, weight: .semibold))
.foregroundStyle(RosettaColors.Adaptive.text)
}
}
.toolbarBackground(.hidden, for: .navigationBar)
}
// MARK: - Chat Preview
private var chatPreviewSection: some View {
VStack(spacing: 0) {
ZStack {
// Wallpaper background
wallpaperPreview(for: selectedWallpaperId)
.frame(height: 200)
.clipShape(RoundedRectangle(cornerRadius: 20, style: .continuous))
// Sample messages overlay
VStack(spacing: 6) {
Spacer()
// Incoming message
HStack {
Text("Hey! How's it going?")
.font(.system(size: 15))
.foregroundStyle(.white)
.padding(.horizontal, 14)
.padding(.vertical, 8)
.background(
RoundedRectangle(cornerRadius: 18, style: .continuous)
.fill(Color(hex: 0x2A2A2A))
)
Spacer()
}
// Outgoing message
HStack {
Spacer()
Text("Great, thanks!")
.font(.system(size: 15))
.foregroundStyle(.white)
.padding(.horizontal, 14)
.padding(.vertical, 8)
.background(
RoundedRectangle(cornerRadius: 18, style: .continuous)
.fill(RosettaColors.primaryBlue)
)
}
Spacer()
.frame(height: 12)
}
.padding(.horizontal, 12)
}
}
}
// MARK: - Theme Section
private var themeSection: some View {
VStack(alignment: .leading, spacing: 10) {
Text("COLOR THEME")
.font(.system(size: 13, weight: .medium))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
.padding(.horizontal, 4)
SettingsCard {
HStack(spacing: 0) {
ForEach(ThemeMode.allCases, id: \.self) { mode in
themeButton(mode)
if mode != ThemeMode.allCases.last {
Divider()
.frame(height: 24)
.foregroundStyle(RosettaColors.Adaptive.divider)
}
}
}
.frame(height: 52)
}
}
}
private func themeButton(_ mode: ThemeMode) -> some View {
Button {
withAnimation(.easeInOut(duration: 0.2)) {
themeModeRaw = mode.rawValue
}
applyThemeMode(mode)
} label: {
HStack(spacing: 8) {
Image(systemName: mode.iconName)
.font(.system(size: 14))
.foregroundStyle(themeMode == mode ? RosettaColors.primaryBlue : RosettaColors.Adaptive.textSecondary)
Text(mode.label)
.font(.system(size: 15, weight: themeMode == mode ? .semibold : .regular))
.foregroundStyle(themeMode == mode ? RosettaColors.primaryBlue : RosettaColors.Adaptive.text)
}
.frame(maxWidth: .infinity)
.contentShape(Rectangle())
}
.buttonStyle(.plain)
}
private func applyThemeMode(_ mode: ThemeMode) {
guard let windowScene = UIApplication.shared.connectedScenes
.compactMap({ $0 as? UIWindowScene })
.first(where: { $0.activationState == .foregroundActive })
?? UIApplication.shared.connectedScenes
.compactMap({ $0 as? UIWindowScene }).first
else { return }
let style: UIUserInterfaceStyle
switch mode {
case .dark: style = .dark
case .light: style = .light
case .system: style = .unspecified
}
for window in windowScene.windows {
window.overrideUserInterfaceStyle = style
}
}
// MARK: - Wallpaper Section
private var wallpaperSection: some View {
VStack(alignment: .leading, spacing: 10) {
Text("CHAT BACKGROUND")
.font(.system(size: 13, weight: .medium))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
.padding(.horizontal, 4)
LazyVGrid(columns: columns, spacing: 10) {
ForEach(WallpaperOption.allOptions) { option in
wallpaperCell(option)
}
}
}
}
private func wallpaperCell(_ option: WallpaperOption) -> some View {
let isSelected = selectedWallpaperId == option.id
return Button {
withAnimation(.easeInOut(duration: 0.2)) {
selectedWallpaperId = option.id
}
} label: {
ZStack {
wallpaperPreview(for: option.id)
// "None" label
if case .none = option.style {
Text("None")
.font(.system(size: 13, weight: .medium))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
}
// Selection checkmark
if isSelected {
VStack {
Spacer()
HStack {
Spacer()
Image(systemName: "checkmark.circle.fill")
.font(.system(size: 22))
.foregroundStyle(RosettaColors.primaryBlue)
.background(Circle().fill(.white).frame(width: 18, height: 18))
.padding(8)
}
}
}
}
.frame(height: 140)
.clipShape(RoundedRectangle(cornerRadius: 14, style: .continuous))
.overlay(
RoundedRectangle(cornerRadius: 14, style: .continuous)
.stroke(isSelected ? RosettaColors.primaryBlue : Color.white.opacity(0.1), lineWidth: isSelected ? 2 : 0.5)
)
}
.buttonStyle(.plain)
}
// MARK: - Wallpaper Preview Renderer
@ViewBuilder
private func wallpaperPreview(for wallpaperId: String) -> some View {
let option = WallpaperOption.allOptions.first(where: { $0.id == wallpaperId })
?? WallpaperOption.allOptions[0]
switch option.style {
case .none:
RosettaColors.Adaptive.background
case .image(let assetName):
Image(assetName)
.resizable()
.aspectRatio(contentMode: .fill)
}
}
}

View File

@@ -6,6 +6,7 @@ enum SettingsDestination: Hashable {
case updates
case safety
case backup
case appearance
}
/// Settings screen with in-place profile editing transition.
@@ -72,6 +73,8 @@ struct SettingsView: View {
SafetyView(onLogout: onLogout)
case .backup:
BackupView()
case .appearance:
AppearanceView()
}
}
.task {
@@ -345,6 +348,7 @@ struct SettingsView: View {
accountSwitcherCard
// Desktop parity: separate cards with subtitle descriptions.
appearanceCard
updatesCard
if BiometricAuthManager.shared.isBiometricAvailable {
biometricCard
@@ -560,6 +564,27 @@ struct SettingsView: View {
// MARK: - Desktop Parity Cards
private var appearanceCard: some View {
VStack(alignment: .leading, spacing: 8) {
SettingsCard {
NavigationLink(value: SettingsDestination.appearance) {
settingsRowLabel(
icon: "paintbrush.fill",
title: "Appearance",
color: .blue
)
}
.settingsHighlight()
}
Text("Customize theme, wallpaper and chat appearance.")
.font(.system(size: 13))
.foregroundStyle(RosettaColors.Adaptive.textSecondary)
.padding(.horizontal, 16)
.padding(.bottom, 8)
}
.padding(.top, 16)
}
private var updatesCard: some View {
VStack(alignment: .leading, spacing: 8) {
SettingsCard {

View File

@@ -1,3 +1,4 @@
import AVFAudio
import CallKit
import FirebaseCore
import FirebaseCrashlytics
@@ -126,6 +127,8 @@ final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCent
fetchCompletionHandler completionHandler: @escaping (UIBackgroundFetchResult) -> Void
) {
let pushType = userInfo["type"] as? String ?? ""
let appState = application.applicationState
Logger.voip.info("Push received: type=\(pushType, privacy: .public) appState=\(appState.rawValue, privacy: .public)")
// MARK: type=read clear notifications for dialog (read on another device).
// Handle even in foreground: if user reads on Desktop, phone clears its notifications.
@@ -300,43 +303,101 @@ final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCent
// MARK: - Call Push Handler
/// Handles `type=call` push: shows incoming call notification when app is backgrounded.
/// Server sends this as a wake-up when the recipient's WebSocket is not connected.
/// No badge increment (calls don't affect unread count).
/// No dedup (calls are urgent always show notification).
/// No mute check (Android parity: calls bypass mute).
/// Handles `type=call` FCM push: triggers CallKit + sets up call state + reconnects WebSocket.
/// Android parity: `handleIncomingCallPush()` in RosettaFirebaseMessagingService.kt.
/// Previously just showed a local notification now matches VoIP push behavior.
private func handleCallPush(
userInfo: [AnyHashable: Any],
completionHandler: @escaping (UIBackgroundFetchResult) -> Void
) {
let callerKey = userInfo["dialog"] as? String
?? Self.extractSenderKey(from: userInfo)
guard !callerKey.isEmpty else {
// Dedup: if VoIP push already reported this call to CallKit, skip FCM path.
// Server sends BOTH VoIP APNs AND FCM simultaneously. Processing both creates
// two WebSocket connections every signal delivered twice audio breaks.
if CallKitManager.shared.hasPendingCall() {
Logger.voip.info("FCM call push: VoIP push already handled — skipping")
completionHandler(.noData)
return
}
let callerKey = userInfo["dialog"] as? String
?? Self.extractSenderKey(from: userInfo)
guard !callerKey.isEmpty else {
Logger.voip.warning("FCM call push: empty callerKey — ignoring")
completionHandler(.noData)
return
}
let callId = userInfo["callId"] as? String
let joinToken = userInfo["joinToken"] as? String
let shared = UserDefaults(suiteName: "group.com.rosetta.dev")
let contactNames = shared?.dictionary(forKey: "contact_display_names") as? [String: String] ?? [:]
let callerName = contactNames[callerKey]
?? Self.firstNonBlank(userInfo, keys: ["sender_name", "from_title", "sender", "title", "name"])
?? "Rosetta"
let callerName: String = {
if let cached = contactNames[callerKey], !cached.isEmpty { return cached }
if let fromPush = Self.firstNonBlank(userInfo, keys: ["sender_name", "from_title", "sender", "title", "name"]) {
return fromPush
}
if let creds = SessionCredentialsManager.shared.load(),
let dbName = Self.resolveCallerNameFromDB(callerKey: callerKey, accountKey: creds.publicKey),
!dbName.isEmpty {
return dbName
}
return "Rosetta"
}()
let content = UNMutableNotificationContent()
content.title = callerName
content.body = "Incoming call"
content.sound = .default
content.categoryIdentifier = "call"
content.userInfo = ["sender_public_key": callerKey, "sender_name": callerName, "type": "call"]
content.interruptionLevel = .timeSensitive
Logger.voip.info("FCM call push: key=\(callerKey.prefix(16), privacy: .public) name=\(callerName, privacy: .public) callId=\(callId ?? "nil", privacy: .public)")
let request = UNNotificationRequest(
identifier: "call_\(callerKey)_\(Int(Date().timeIntervalSince1970))",
content: content,
trigger: nil
)
UNUserNotificationCenter.current().add(request) { _ in
completionHandler(.newData)
// 1. Report to CallKit shows native incoming call UI (same as VoIP push path).
CallKitManager.shared.reportIncomingCallSynchronously(
callerKey: callerKey,
callerName: callerName,
callId: callId
) { error in
if let error {
Logger.voip.error("FCM call push: CallKit report failed: \(error.localizedDescription, privacy: .public)")
}
completionHandler(error == nil ? .newData : .failed)
}
// 2. Set up call state + reconnect WebSocket (same as VoIP push path).
Task { @MainActor in
// Guard: only process calls for the active account.
let activeKey = AccountManager.shared.activeAccountPublicKey ?? ""
if !activeKey.isEmpty,
let creds = SessionCredentialsManager.shared.load(),
creds.publicKey != activeKey {
Logger.voip.warning("FCM call push: ignoring — inactive account")
CallKitManager.shared.reportCallEndedByRemote(reason: .unanswered)
return
}
if CallManager.shared.ownPublicKey.isEmpty,
let creds = SessionCredentialsManager.shared.load() {
CallManager.shared.bindAccount(publicKey: creds.publicKey)
}
if !callerKey.isEmpty, CallManager.shared.uiState.phase == .idle {
CallManager.shared.setupIncomingCallFromPush(
callerKey: callerKey,
callerName: callerName,
callId: callId,
joinToken: joinToken
)
}
// Restore WebSocket so call signaling can proceed.
if ProtocolManager.shared.connectionState == .authenticated {
return
}
if ProtocolManager.shared.publicKey == nil,
let creds = SessionCredentialsManager.shared.load() {
Logger.voip.info("FCM call push: restoring session from Keychain")
ProtocolManager.shared.connect(
publicKey: creds.publicKey,
privateKeyHash: creds.privateKeyHash
)
} else {
ProtocolManager.shared.forceReconnectOnForeground()
}
}
}
@@ -450,7 +511,7 @@ final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCent
return []
}
/// Handle notification tap navigate to the sender's chat.
/// Handle notification tap navigate to the sender's chat or expand call.
/// Android parity: extracts sender key with multi-key fallback.
func userNotificationCenter(
_ center: UNUserNotificationCenter,
@@ -458,6 +519,18 @@ final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCent
withCompletionHandler completionHandler: @escaping () -> Void
) {
let userInfo = response.notification.request.content.userInfo
// Call notification tap expand call overlay (not chat).
if let pushType = userInfo["type"] as? String, pushType == "call" {
Task { @MainActor in
if CallManager.shared.uiState.phase != .idle {
CallManager.shared.expandCall()
}
}
completionHandler()
return
}
// Android parity: try multiple key names for sender identification.
let senderKey = Self.extractSenderKey(from: userInfo)
@@ -557,6 +630,20 @@ extension AppDelegate: PKPushRegistryDelegate {
}()
Logger.voip.info("VoIP resolved: key=\(callerKey.prefix(16), privacy: .public) name=\(callerName, privacy: .public) callId=\(callId ?? "nil", privacy: .public) joinTokenPresent=\((joinToken?.isEmpty == false).description, privacy: .public)")
// Telegram parity: pre-configure RTCAudioSession BEFORE reporting to CallKit.
// This tells the system what audio configuration we need (.playAndRecord,
// .voiceChat mode). Without this, CallKit may fail to deliver didActivate
// for background VoIP push calls because the audio session is in an
// unknown state (.soloAmbient). Telegram: OngoingCallThreadLocalContext.mm
// setupAudioSession() called before reportNewIncomingCall.
do {
let avSession = AVAudioSession.sharedInstance()
let options: AVAudioSession.CategoryOptions = [.allowBluetooth, .defaultToSpeaker, .mixWithOthers]
try avSession.setCategory(.playAndRecord, mode: .voiceChat, options: options)
} catch {
Logger.voip.error("Failed to pre-configure audio session: \(error.localizedDescription)")
}
// Apple REQUIREMENT: reportNewIncomingCall MUST be called SYNCHRONOUSLY.
// Using Task { @MainActor } would introduce an async hop that may be
// delayed if the main actor is busy, causing Apple to terminate the app.
@@ -587,6 +674,18 @@ extension AppDelegate: PKPushRegistryDelegate {
// and CallManager can handle the call. When app was killed, SessionManager
// has no credentials in memory load from Keychain (saved during startSession).
Task { @MainActor in
// Guard: only process calls for the ACTIVE account.
// When multiple accounts exist, VoIP token may still be registered
// for a passive account server sends push for wrong account.
let activeKey = AccountManager.shared.activeAccountPublicKey ?? ""
if !activeKey.isEmpty,
let creds = SessionCredentialsManager.shared.load(),
creds.publicKey != activeKey {
Logger.voip.warning("VoIP push: ignoring — push woke inactive account \(creds.publicKey.prefix(8), privacy: .public), active is \(activeKey.prefix(8), privacy: .public)")
CallKitManager.shared.reportCallEndedByRemote(reason: .unanswered)
return
}
// Set up incoming call state from push payload IMMEDIATELY.
// Don't wait for WebSocket .call signal it's fire-and-forget
// and may have been sent before our WebSocket connected.

View File

@@ -16,6 +16,12 @@ final class NotificationService: UNNotificationServiceExtension {
private static let processedIdsKey = "nse_processed_message_ids"
/// Max dedup entries kept in App Group NSE has tight memory limits.
private static let maxProcessedIds = 100
/// Tracks dialogs recently read on another device (e.g. Desktop).
/// When a READ push arrives, we store {dialogKey: timestamp}. Subsequent message
/// pushes for the same dialog within the window are suppressed the user is actively
/// reading on Desktop, so the phone should stay silent.
private static let recentlyReadKey = "nse_recently_read_dialogs"
private static let recentlyReadWindow: TimeInterval = 30
/// Android parity: multiple key names for sender public key extraction.
/// Server sends `dialog` field (was `from`). Both kept for backward compat.
@@ -53,6 +59,17 @@ final class NotificationService: UNNotificationServiceExtension {
dialogKey = String(dialogKey.dropFirst("#group:".count))
}
// Track this dialog as "recently read on another device" (Desktop parity).
// Next message push for this dialog within 30s will be suppressed.
if !dialogKey.isEmpty, let shared {
let now = Date().timeIntervalSince1970
var recentlyRead = shared.dictionary(forKey: Self.recentlyReadKey) as? [String: Double] ?? [:]
recentlyRead[dialogKey] = now
// Evict stale entries (> 60s) to prevent unbounded growth.
recentlyRead = recentlyRead.filter { now - $0.value < 60 }
shared.set(recentlyRead, forKey: Self.recentlyReadKey)
}
// Deliver silently no sound, no alert.
content.sound = nil
content.title = ""
@@ -136,6 +153,23 @@ final class NotificationService: UNNotificationServiceExtension {
return
}
// 3.1 Desktop-active suppression: if this dialog was read on another device
// (Desktop) within the last 30s, suppress the notification. The user is
// actively reading on Desktop no need to buzz the phone.
if !senderKey.isEmpty {
let recentlyRead = shared.dictionary(forKey: Self.recentlyReadKey) as? [String: Double] ?? [:]
if let lastReadTime = recentlyRead[senderKey] {
let elapsed = Date().timeIntervalSince1970 - lastReadTime
if elapsed < Self.recentlyReadWindow {
content.sound = nil
content.title = ""
content.body = ""
contentHandler(content)
return
}
}
}
// 3.5 Dedup: skip badge increment if we already processed this push.
// Protects against duplicate FCM delivery (rare, but server dedup window is ~10s).
let messageId = content.userInfo["message_id"] as? String
@@ -174,7 +208,7 @@ final class NotificationService: UNNotificationServiceExtension {
updatedInfo["sender_public_key"] = senderKey
}
// 6. Resolve sender name from App Group cache (synced by DialogRepository).
// 7. Resolve sender name from App Group cache (synced by DialogRepository).
let contactNames = shared?.dictionary(forKey: "contact_display_names") as? [String: String] ?? [:]
let resolvedName = contactNames[senderKey]
?? Self.firstNonBlank(content.userInfo, keys: Self.senderNameKeyNames)
@@ -186,12 +220,12 @@ final class NotificationService: UNNotificationServiceExtension {
}
content.userInfo = updatedInfo
// 7. Ensure notification category for CarPlay parity.
// 8. Ensure notification category for CarPlay parity.
if content.categoryIdentifier.isEmpty {
content.categoryIdentifier = "message"
}
// 8. Create Communication Notification via INSendMessageIntent.
// 9. Create Communication Notification via INSendMessageIntent.
let senderName = resolvedName
?? Self.firstNonBlank(content.userInfo, keys: Self.senderNameKeyNames)
?? content.title

View File

@@ -0,0 +1,193 @@
import XCTest
@testable import Rosetta
@MainActor
final class CallDisplayNameTests: XCTestCase {
private let ownKey = "02-own"
private let peerKey = "0263e9134d3abeb880bb5fa679954800a80d0c286c5c54c9452a996b0c7608db3"
override func setUp() {
super.setUp()
CallManager.shared.resetForSessionEnd()
CallManager.shared.bindAccount(publicKey: ownKey)
}
override func tearDown() {
CallManager.shared.resetForSessionEnd()
super.tearDown()
}
// MARK: - CallUiState.displayName fallback chain
func testDisplayNamePrefersTitle() {
let state = CallUiState(
peerPublicKey: peerKey,
peerTitle: "Alice",
peerUsername: "alice"
)
XCTAssertEqual(state.displayName, "Alice")
}
func testDisplayNameFallsBackToUsername() {
let state = CallUiState(
peerPublicKey: peerKey,
peerTitle: "",
peerUsername: "alice"
)
XCTAssertEqual(state.displayName, "@alice")
}
func testDisplayNameFallsBackToKeyPrefix() {
let state = CallUiState(
peerPublicKey: peerKey,
peerTitle: "",
peerUsername: ""
)
XCTAssertEqual(state.displayName, String(peerKey.prefix(12)))
}
func testDisplayNameReturnsUnknownWhenEmpty() {
let state = CallUiState()
XCTAssertEqual(state.displayName, "Unknown")
}
// MARK: - Outgoing call sets displayName before CallKit
func testOutgoingCallSetsDisplayNameFromTitle() {
let result = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey,
title: "Alice",
username: "alice"
)
XCTAssertEqual(result, .started)
// After startOutgoingCall, uiState should have peerTitle set
XCTAssertEqual(CallManager.shared.uiState.peerTitle, "Alice")
XCTAssertEqual(CallManager.shared.uiState.displayName, "Alice")
}
func testOutgoingCallWithEmptyTitleUsesUsername() {
let result = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey,
title: "",
username: "alice"
)
XCTAssertEqual(result, .started)
XCTAssertEqual(CallManager.shared.uiState.displayName, "@alice")
}
func testOutgoingCallWithNoIdentityUsesKeyPrefix() {
let result = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey,
title: "",
username: ""
)
XCTAssertEqual(result, .started)
// Should NOT be the full key must be truncated prefix
let displayName = CallManager.shared.uiState.displayName
XCTAssertEqual(displayName, String(peerKey.prefix(12)))
XCTAssertNotEqual(displayName, peerKey, "Full public key must NEVER be used as display name")
}
// MARK: - Incoming call name hydration
func testIncomingCallDisplayNameDefaultsToKeyPrefix() {
// Incoming calls start with empty title (signal packet has no name field)
let packet = PacketSignalPeer(
src: peerKey,
dst: ownKey,
sharedPublic: "",
signalType: .call,
roomId: ""
)
CallManager.shared.testHandleSignalPacket(packet)
XCTAssertEqual(CallManager.shared.uiState.phase, .incoming)
// Without DialogRepository entry, name falls back to key prefix
let displayName = CallManager.shared.uiState.displayName
XCTAssertTrue(displayName.count <= 12 || displayName == "Unknown",
"Incoming call display name should be short prefix, not full key: \(displayName)")
}
// MARK: - Server parity: signal packet has NO name field
func testIncomingCallFromServerHasNoNameInPacket() {
// Server PacketSignalPeer (0x1A) sends only src/dst public keys no title.
// Verify that after signal processing, displayName is NOT the full public key.
let packet = PacketSignalPeer(
src: peerKey,
dst: ownKey,
sharedPublic: "",
signalType: .call,
roomId: ""
)
CallManager.shared.testHandleSignalPacket(packet)
let displayName = CallManager.shared.uiState.displayName
XCTAssertNotEqual(displayName, peerKey,
"Server sends no name — displayName must NOT be full 66-char public key")
XCTAssertTrue(displayName.count < 20,
"Display name should be short (prefix or 'Unknown'), got: \(displayName)")
}
// MARK: - Outgoing call: displayName available BEFORE CallKit report
func testDisplayNameSetBeforeCallKitReport() {
// beginCallSession is called BEFORE startOutgoingCall(peerKey:displayName:)
// so uiState.displayName must already be resolved when CallKit is invoked.
let result = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey,
title: "Bob",
username: "bob"
)
XCTAssertEqual(result, .started)
// After startOutgoingCall, peerTitle should be "Bob" (set by beginCallSession)
XCTAssertEqual(CallManager.shared.uiState.peerTitle, "Bob")
// CallKit was called with displayName = "Bob" (verified by the flow:
// beginCallSession sets peerTitle startOutgoingCall reads uiState.displayName)
}
// MARK: - Full key never leaks as display name
func testFullKeyNeverUsedInAnyScenario() {
// Scenario 1: outgoing with title
_ = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey, title: "Alice", username: ""
)
XCTAssertNotEqual(CallManager.shared.uiState.displayName, peerKey)
CallManager.shared.resetForSessionEnd()
CallManager.shared.bindAccount(publicKey: ownKey)
// Scenario 2: outgoing with username only
_ = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey, title: "", username: "alice"
)
XCTAssertNotEqual(CallManager.shared.uiState.displayName, peerKey)
CallManager.shared.resetForSessionEnd()
CallManager.shared.bindAccount(publicKey: ownKey)
// Scenario 3: outgoing with nothing
_ = CallManager.shared.startOutgoingCall(
toPublicKey: peerKey, title: "", username: ""
)
XCTAssertNotEqual(CallManager.shared.uiState.displayName, peerKey)
CallManager.shared.resetForSessionEnd()
CallManager.shared.bindAccount(publicKey: ownKey)
// Scenario 4: incoming via signal
let packet = PacketSignalPeer(
src: peerKey, dst: ownKey, sharedPublic: "",
signalType: .call, roomId: ""
)
CallManager.shared.testHandleSignalPacket(packet)
XCTAssertNotEqual(CallManager.shared.uiState.displayName, peerKey)
}
// MARK: - updateCallerName (CallKit update after hydration)
func testUpdateCallerNameIsAvailable() {
// Verify the method exists and doesn't crash with empty state
CallKitManager.shared.updateCallerName("") // should be a no-op
CallKitManager.shared.updateCallerName("Alice") // no UUID no-op, but shouldn't crash
}
}