Фикс: дубликат CallKit вызова, disconnect recovery, WebRTC packet buffering и E2EE rebind loop

This commit is contained in:
2026-04-02 15:29:46 +05:00
parent 4be6761492
commit de0818fe69
10 changed files with 863 additions and 295 deletions

View File

@@ -95,6 +95,10 @@ final class ProtocolManager: @unchecked Sendable {
private var webRTCHandlers: [UUID: (PacketWebRTC) -> Void] = [:] private var webRTCHandlers: [UUID: (PacketWebRTC) -> Void] = [:]
private var iceServersHandlers: [UUID: (PacketIceServers) -> Void] = [:] private var iceServersHandlers: [UUID: (PacketIceServers) -> Void] = [:]
/// Background task to keep WebSocket alive during brief background periods (active call).
/// iOS gives ~30s; enough for the call to survive app switching / notification interactions.
private var callBackgroundTask: UIBackgroundTaskIdentifier = .invalid
// Saved credentials for auto-reconnect // Saved credentials for auto-reconnect
private var savedPublicKey: String? private var savedPublicKey: String?
private var savedPrivateHash: String? private var savedPrivateHash: String?
@@ -185,6 +189,16 @@ final class ProtocolManager: @unchecked Sendable {
func forceReconnectOnForeground() { func forceReconnectOnForeground() {
guard savedPublicKey != nil, savedPrivateHash != nil else { return } guard savedPublicKey != nil, savedPrivateHash != nil else { return }
// During an active call the WebSocket may still be alive (background task
// keeps the process running for ~30s). Tearing it down would break signaling
// and trigger server re-delivery of .call causing endCallBecauseBusy.
// If the connection is authenticated, trust it and skip reconnect.
if CallManager.shared.uiState.phase != .idle,
connectionState == .authenticated {
Self.logger.info("⚡ Foreground reconnect skipped — active call, WS authenticated")
return
}
// Android parity: skip if handshake or device verification is in progress. // Android parity: skip if handshake or device verification is in progress.
// These are active flows that should not be interrupted. // These are active flows that should not be interrupted.
switch connectionState { switch connectionState {
@@ -209,6 +223,25 @@ final class ProtocolManager: @unchecked Sendable {
client.forceReconnect() client.forceReconnect()
} }
// MARK: - Call Background Task
/// Keeps the process alive during active calls so WebSocket survives brief background.
func beginCallBackgroundTask() {
guard callBackgroundTask == .invalid else { return }
callBackgroundTask = UIApplication.shared.beginBackgroundTask(withName: "RosettaCall") { [weak self] in
self?.endCallBackgroundTask()
}
Self.logger.info("📞 Background task started for call")
}
func endCallBackgroundTask() {
guard callBackgroundTask != .invalid else { return }
let task = callBackgroundTask
callBackgroundTask = .invalid
UIApplication.shared.endBackgroundTask(task)
Self.logger.info("📞 Background task ended for call")
}
/// Android parity: `reconnectNowIfNeeded()` if already in an active state, /// Android parity: `reconnectNowIfNeeded()` if already in an active state,
/// skip reconnect. Otherwise reset backoff and connect immediately. /// skip reconnect. Otherwise reset backoff and connect immediately.
func reconnectIfNeeded() { func reconnectIfNeeded() {

View File

@@ -1,6 +1,7 @@
import AVFAudio import AVFAudio
import CallKit import CallKit
import os import os
import WebRTC
/// CallKit integration layer wraps CXProvider and CXCallController. /// CallKit integration layer wraps CXProvider and CXCallController.
/// Reports incoming/outgoing calls to the system so they appear in the native call UI, /// Reports incoming/outgoing calls to the system so they appear in the native call UI,
@@ -54,17 +55,43 @@ final class CallKitManager: NSObject {
nonisolated func reportIncomingCallSynchronously( nonisolated func reportIncomingCallSynchronously(
callerKey: String, callerKey: String,
callerName: String, callerName: String,
callId: String? = nil,
completion: @escaping (Error?) -> Void completion: @escaping (Error?) -> Void
) { ) {
let uuid = UUID() // Guard: if WebSocket already reported this call via reportIncomingCall(),
// skip to prevent duplicate CallKit calls. Two calls with different UUIDs
// cause CallKit (maximumCallsPerCallGroup=1) to auto-end the first one,
// which sends endCall signal to desktop and drops the call.
uuidLock.lock()
let alreadyPending = _pendingCallUUID != nil
uuidLock.unlock()
if alreadyPending {
Self.logger.info("Skipping duplicate CallKit report — call already pending from WebSocket")
completion(nil)
return
}
// Stable UUID from server callId (Telegram parity) prevents UUID mismatch
// between PushKit and WebSocket paths for the same call.
let uuid: UUID = {
if let callId, let parsed = UUID(uuidString: callId) {
return parsed
}
return UUID()
}()
// Assign UUID synchronously to prevent race with WebSocket signal. // Assign UUID synchronously to prevent race with WebSocket signal.
uuidLock.lock() uuidLock.lock()
_pendingCallUUID = uuid _pendingCallUUID = uuid
uuidLock.unlock() uuidLock.unlock()
let handleValue = callerName.isEmpty
? String(callerKey.prefix(7))
: callerName
let update = CXCallUpdate() let update = CXCallUpdate()
update.remoteHandle = CXHandle(type: .generic, value: callerKey) update.remoteHandle = CXHandle(type: .generic, value: handleValue)
update.localizedCallerName = callerName.isEmpty ? "Rosetta" : callerName update.localizedCallerName = callerName.isEmpty ? "Rosetta" : callerName
update.hasVideo = false update.hasVideo = false
update.supportsHolding = false update.supportsHolding = false
@@ -105,8 +132,21 @@ final class CallKitManager: NSObject {
let uuid = currentCallUUID ?? UUID() let uuid = currentCallUUID ?? UUID()
currentCallUUID = uuid currentCallUUID = uuid
// Sync _pendingCallUUID so VoIP push path can see we already have a call.
// Without this, VoIP push creates a SECOND CallKit call with a different UUID,
// and CallKit auto-ends the first one sends endCall to desktop.
uuidLock.lock()
_pendingCallUUID = uuid
uuidLock.unlock()
// Use display-friendly handle value CallKit shows this on lock screen.
// Full public key is ugly; short prefix matches Android parity (7 chars).
let handleValue = callerName.isEmpty
? String(callerKey.prefix(7))
: callerName
let update = CXCallUpdate() let update = CXCallUpdate()
update.remoteHandle = CXHandle(type: .generic, value: callerKey) update.remoteHandle = CXHandle(type: .generic, value: handleValue)
update.localizedCallerName = callerName.isEmpty ? "Rosetta" : callerName update.localizedCallerName = callerName.isEmpty ? "Rosetta" : callerName
update.hasVideo = false update.hasVideo = false
update.supportsHolding = false update.supportsHolding = false
@@ -127,6 +167,28 @@ final class CallKitManager: NSObject {
} }
} }
/// Updates the caller display name on an active incoming call.
/// Called when hydratePeerIdentity resolves a name that wasn't available during
/// initial reportIncomingCall (e.g. DB not yet loaded on cold start from VoIP push).
func updateCallerName(_ name: String) {
guard !name.isEmpty else { return }
// Use currentCallUUID if available; fall back to _pendingCallUUID
// for PushKit race: currentCallUUID is set async via Task { @MainActor },
// but _pendingCallUUID is set synchronously in reportIncomingCallSynchronously().
// hydratePeerIdentity may run before the async Task assigns currentCallUUID.
let uuid: UUID? = currentCallUUID ?? {
uuidLock.lock()
let pending = _pendingCallUUID
uuidLock.unlock()
return pending
}()
guard let uuid else { return }
let update = CXCallUpdate()
update.localizedCallerName = name
update.remoteHandle = CXHandle(type: .generic, value: name)
provider.reportCall(with: uuid, updated: update)
}
// MARK: - Outgoing Call // MARK: - Outgoing Call
func startOutgoingCall(peerKey: String) { func startOutgoingCall(peerKey: String) {
@@ -210,13 +272,16 @@ extension CallKitManager: CXProviderDelegate {
nonisolated func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) { nonisolated func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
Self.logger.info("CXAnswerCallAction") Self.logger.info("CXAnswerCallAction")
action.fulfill()
Task { @MainActor in Task { @MainActor in
let result = CallManager.shared.acceptIncomingCall() let result = CallManager.shared.acceptIncomingCall()
if result == .started { if result == .started {
action.fulfill() Self.logger.info("CXAnswerCallAction: accepted")
} else { } else {
Self.logger.warning("CXAnswerCallAction failed: \(String(describing: result))") // Phase not .incoming yet WebSocket hasn't delivered .call signal.
action.fail() // Set pending flag so handleSignalPacket(.call) auto-accepts when it arrives.
Self.logger.info("CXAnswerCallAction: pending (phase not incoming yet, waiting for WebSocket)")
CallManager.shared.pendingCallKitAccept = true
} }
} }
} }
@@ -249,9 +314,30 @@ extension CallKitManager: CXProviderDelegate {
nonisolated func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) { nonisolated func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
Self.logger.info("Audio session activated by CallKit") Self.logger.info("Audio session activated by CallKit")
let rtcSession = RTCAudioSession.sharedInstance()
// 1. Tell WebRTC the session is active (updates internal isActive flag).
rtcSession.audioSessionDidActivate(audioSession)
// 2. Configure category BEFORE enabling audio when isAudioEnabled
// becomes true, ADM may immediately init the audio unit and will
// use whatever category is currently set. Without this, it may use
// .soloAmbient (default) instead of .playAndRecord silent audio.
rtcSession.lockForConfiguration()
try? rtcSession.setCategory(
.playAndRecord, mode: .voiceChat,
options: [.allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
)
rtcSession.unlockForConfiguration()
// 3. NOW enable audio ADM will init with correct .playAndRecord category.
rtcSession.isAudioEnabled = true
Task { @MainActor in
await CallManager.shared.onAudioSessionActivated()
}
} }
nonisolated func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) { nonisolated func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
Self.logger.info("Audio session deactivated by CallKit") Self.logger.info("Audio session deactivated by CallKit")
let rtcSession = RTCAudioSession.sharedInstance()
rtcSession.audioSessionDidDeactivate(audioSession)
rtcSession.isAudioEnabled = false
} }
} }

View File

@@ -1,6 +1,7 @@
import AVFAudio import AVFAudio
import CryptoKit import CryptoKit
import Foundation import Foundation
import os
import SwiftUI import SwiftUI
import UIKit import UIKit
import WebRTC import WebRTC
@@ -10,19 +11,32 @@ extension CallManager {
func handleWebRtcPacket(_ packet: PacketWebRTC) async { func handleWebRtcPacket(_ packet: PacketWebRTC) async {
guard uiState.phase == .webRtcExchange || uiState.phase == .active else { return } guard uiState.phase == .webRtcExchange || uiState.phase == .active else { return }
guard let peerConnection = self.peerConnection else { return }
// Buffer packets if peer connection not yet created (CallKit didActivate delay).
// Android doesn't need this no CallKit, peerConnection created immediately.
guard let peerConnection = self.peerConnection else {
callLogger.info("[Call] Buffering WebRTC packet (type=\(packet.signalType.rawValue, privacy: .public)) — peerConnection not ready")
bufferedWebRtcPackets.append(packet)
return
}
await processWebRtcPacket(packet, on: peerConnection)
}
func processWebRtcPacket(_ packet: PacketWebRTC, on peerConnection: RTCPeerConnection) async {
switch packet.signalType { switch packet.signalType {
case .answer: case .answer:
guard let answer = parseSessionDescription(from: packet.sdpOrCandidate), guard let answer = parseSessionDescription(from: packet.sdpOrCandidate),
answer.type == .answer else { answer.type == .answer else {
return return
} }
callLogger.info("[Call] Received ANSWER from SFU")
do { do {
try await setRemoteDescription(answer, on: peerConnection) try await setRemoteDescription(answer, on: peerConnection)
remoteDescriptionSet = true remoteDescriptionSet = true
await flushBufferedRemoteCandidates() await flushBufferedRemoteCandidates()
} catch { } catch {
callLogger.error("[Call] Failed to apply answer: \(error.localizedDescription, privacy: .public)")
finishCall(reason: "Failed to apply answer", notifyPeer: false) finishCall(reason: "Failed to apply answer", notifyPeer: false)
} }
case .offer: case .offer:
@@ -30,6 +44,7 @@ extension CallManager {
offer.type == .offer else { offer.type == .offer else {
return return
} }
callLogger.info("[Call] Received OFFER from SFU (renegotiation)")
do { do {
try await setRemoteDescription(offer, on: peerConnection) try await setRemoteDescription(offer, on: peerConnection)
remoteDescriptionSet = true remoteDescriptionSet = true
@@ -42,6 +57,7 @@ extension CallManager {
sdpOrCandidate: serializeSessionDescription(answer) sdpOrCandidate: serializeSessionDescription(answer)
) )
} catch { } catch {
callLogger.error("[Call] Failed to handle offer: \(error.localizedDescription, privacy: .public)")
finishCall(reason: "Failed to handle offer", notifyPeer: false) finishCall(reason: "Failed to handle offer", notifyPeer: false)
} }
case .iceCandidate: case .iceCandidate:
@@ -54,11 +70,39 @@ extension CallManager {
} }
} }
/// Called by CallKit when audio session is activated (didActivate callback).
/// Category is already set in didActivate (before isAudioEnabled = true).
/// This method handles deferred WebRTC setup and non-audio-session config.
/// MUST be async directly awaits ensurePeerConnectionAndOffer() to avoid
/// double-async nesting (Task inside Task) that causes race conditions.
func onAudioSessionActivated() async {
audioSessionActivated = true
callLogger.info("[Call] didActivate: phase=\(self.uiState.phase.rawValue, privacy: .public) pendingWebRtcSetup=\(self.pendingWebRtcSetup.description, privacy: .public)")
guard uiState.phase != .idle else { return }
// Flush deferred WebRTC setup .createRoom arrived before didActivate.
// Direct await (no Task wrapper) eliminates the double-async race where
// remote offers arrive before peer connection exists.
if pendingWebRtcSetup {
pendingWebRtcSetup = false
await ensurePeerConnectionAndOffer()
}
// Apply routing AFTER peer connection is created (track now exists).
applyAudioOutputRouting()
UIDevice.current.isProximityMonitoringEnabled = true
localAudioTrack?.isEnabled = !uiState.isMuted
callLogger.info("[Call] Audio ready, track=\((self.localAudioTrack != nil).description, privacy: .public) enabled=\((!self.uiState.isMuted).description, privacy: .public)")
}
func ensurePeerConnectionAndOffer() async { func ensurePeerConnectionAndOffer() async {
// Guard: finishCall() may have run during the async gap before this Task executes. // Guard: finishCall() may have run during the async gap before this Task executes.
guard uiState.phase == .webRtcExchange else { return } guard uiState.phase == .webRtcExchange else {
callLogger.info("[Call] ensurePeerConnectionAndOffer: skipped (phase=\(self.uiState.phase.rawValue, privacy: .public))")
return
}
callLogger.info("[Call] ensurePeerConnectionAndOffer: starting")
do { do {
try configureAudioSession()
let peerConnection = try ensurePeerConnection() let peerConnection = try ensurePeerConnection()
applySenderCryptorIfPossible() applySenderCryptorIfPossible()
@@ -69,7 +113,22 @@ extension CallManager {
sdpOrCandidate: serializeSessionDescription(offer) sdpOrCandidate: serializeSessionDescription(offer)
) )
offerSent = true offerSent = true
callLogger.info("[Call] ensurePeerConnectionAndOffer: offer sent")
// Flush WebRTC packets AFTER local description is set.
// Buffered ANSWER requires local offer to be set first (signalingState must be
// have-local-offer, not stable). ICE candidates also need remote desc first
// they'll go to bufferedRemoteCandidates via processWebRtcPacket.
if !bufferedWebRtcPackets.isEmpty {
callLogger.info("[Call] Flushing \(self.bufferedWebRtcPackets.count, privacy: .public) buffered WebRTC packets")
let packets = bufferedWebRtcPackets
bufferedWebRtcPackets.removeAll()
for packet in packets {
await processWebRtcPacket(packet, on: peerConnection)
}
}
} catch { } catch {
callLogger.error("[Call] ensurePeerConnectionAndOffer FAILED: \(error.localizedDescription, privacy: .public)")
finishCall(reason: "Failed to establish call", notifyPeer: false) finishCall(reason: "Failed to establish call", notifyPeer: false)
} }
} }
@@ -89,15 +148,20 @@ extension CallManager {
// re-enters via CallManager.endCall(). Skip if already idle or mid-finish. // re-enters via CallManager.endCall(). Skip if already idle or mid-finish.
guard !isFinishingCall, uiState.phase != .idle else { return } guard !isFinishingCall, uiState.phase != .idle else { return }
isFinishingCall = true isFinishingCall = true
pendingCallKitAccept = false
defer { isFinishingCall = false } defer { isFinishingCall = false }
print("[CallBar] finishCall(reason=\(reason ?? "nil")) phase=\(uiState.phase.rawValue) isMinimized=\(uiState.isMinimized)") callLogger.info("[Call] finishCall(reason=\(reason ?? "nil", privacy: .public)) phase=\(self.uiState.phase.rawValue, privacy: .public)")
// Log call stack to identify WHO triggered finishCall
let symbols = Thread.callStackSymbols.prefix(8).joined(separator: "\n ")
print("[CallBar] stack:\n \(symbols)")
let snapshot = uiState let snapshot = uiState
// Step 0: Cancel recovery/rebind tasks and clear packet buffer.
disconnectRecoveryTask?.cancel()
disconnectRecoveryTask = nil
e2eeRebindTask?.cancel()
e2eeRebindTask = nil
bufferedWebRtcPackets.removeAll()
// Step 1: Close WebRTC FIRST SFU sees peer disconnect immediately. // Step 1: Close WebRTC FIRST SFU sees peer disconnect immediately.
// Without this, SFU waits for ICE timeout (~30s) before releasing the room, // Without this, SFU waits for ICE timeout (~30s) before releasing the room,
// blocking new calls to the same peer. // blocking new calls to the same peer.
@@ -178,6 +242,8 @@ extension CallManager {
offerSent = false offerSent = false
remoteDescriptionSet = false remoteDescriptionSet = false
lastPeerSharedPublicHex = "" lastPeerSharedPublicHex = ""
audioSessionActivated = false
pendingWebRtcSetup = false
var finalState = CallUiState() var finalState = CallUiState()
if let reason, !reason.isEmpty { if let reason, !reason.isEmpty {
@@ -188,6 +254,7 @@ extension CallManager {
} }
deactivateAudioSession() deactivateAudioSession()
ProtocolManager.shared.endCallBackgroundTask()
} }
func ensureLocalSessionKeys() { func ensureLocalSessionKeys() {
@@ -198,6 +265,7 @@ extension CallManager {
} }
func hydratePeerIdentity(for publicKey: String) { func hydratePeerIdentity(for publicKey: String) {
let hadName = !uiState.peerTitle.isEmpty
if let dialog = DialogRepository.shared.dialogs[publicKey] { if let dialog = DialogRepository.shared.dialogs[publicKey] {
if uiState.peerTitle.isEmpty { if uiState.peerTitle.isEmpty {
uiState.peerTitle = dialog.opponentTitle uiState.peerTitle = dialog.opponentTitle
@@ -206,6 +274,11 @@ extension CallManager {
uiState.peerUsername = dialog.opponentUsername uiState.peerUsername = dialog.opponentUsername
} }
} }
// If we just resolved a name that wasn't available before (e.g. DB loaded
// after cold start), update CallKit so lock screen shows the name.
if !hadName, !uiState.peerTitle.isEmpty {
CallKitManager.shared.updateCallerName(uiState.displayName)
}
} }
func applySenderCryptorIfPossible() { func applySenderCryptorIfPossible() {
@@ -237,6 +310,8 @@ extension CallManager {
func ensurePeerConnection() throws -> RTCPeerConnection { func ensurePeerConnection() throws -> RTCPeerConnection {
if let currentPeerConnection = self.peerConnection { return currentPeerConnection } if let currentPeerConnection = self.peerConnection { return currentPeerConnection }
callLogger.info("[Call] Creating new peer connection (iceServers=\(self.iceServers.count, privacy: .public))")
if peerConnectionFactory == nil { if peerConnectionFactory == nil {
RTCPeerConnectionFactory.initialize() RTCPeerConnectionFactory.initialize()
peerConnectionFactory = RTCPeerConnectionFactory() peerConnectionFactory = RTCPeerConnectionFactory()
@@ -268,25 +343,36 @@ extension CallManager {
self.localAudioSource = audioSource self.localAudioSource = audioSource
self.localAudioTrack = audioTrack self.localAudioTrack = audioTrack
self.peerConnection = connection self.peerConnection = connection
callLogger.info("[Call] Peer connection created, audio track enabled=\((!self.uiState.isMuted).description, privacy: .public)")
return connection return connection
} }
func configureAudioSession() throws { func configureAudioSession() throws {
let session = AVAudioSession.sharedInstance() let rtcSession = RTCAudioSession.sharedInstance()
try session.setCategory( rtcSession.lockForConfiguration()
defer { rtcSession.unlockForConfiguration() }
try rtcSession.setCategory(
.playAndRecord, .playAndRecord,
mode: .voiceChat, mode: .voiceChat,
options: [.allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker] options: [.allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker]
) )
try session.setActive(true) // Do NOT call setActive(true) session is already activated by CallKit
// via audioSessionDidActivate(). Double activation increments WebRTC's
// internal activation count, causing deactivateAudioSession() to decrement
// to 1 instead of 0 AVAudioSession never actually deactivates.
applyAudioOutputRouting() applyAudioOutputRouting()
UIDevice.current.isProximityMonitoringEnabled = true UIDevice.current.isProximityMonitoringEnabled = true
print("[Call] AudioSession configured: category=\(session.category.rawValue) mode=\(session.mode.rawValue) sampleRate=\(session.sampleRate) outputs=\(session.currentRoute.outputs.map { "\($0.portName)(\($0.portType.rawValue))" }) inputs=\(session.currentRoute.inputs.map { "\($0.portName)(\($0.portType.rawValue))" })") let session = AVAudioSession.sharedInstance()
callLogger.info("[Call] AudioSession configured: category=\(session.category.rawValue, privacy: .public) mode=\(session.mode.rawValue, privacy: .public)")
} }
func deactivateAudioSession() { func deactivateAudioSession() {
UIDevice.current.isProximityMonitoringEnabled = false UIDevice.current.isProximityMonitoringEnabled = false
try? AVAudioSession.sharedInstance().setActive(false, options: [.notifyOthersOnDeactivation]) let rtcSession = RTCAudioSession.sharedInstance()
rtcSession.lockForConfiguration()
try? rtcSession.setActive(false)
rtcSession.unlockForConfiguration()
rtcSession.isAudioEnabled = false
} }
func applyAudioOutputRouting() { func applyAudioOutputRouting() {
@@ -438,10 +524,10 @@ extension CallManager: RTCPeerConnectionDelegate {
nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {} nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {}
nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) { nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
Task { @MainActor in Task { @MainActor in
print("[Call] Remote stream added: \(stream.streamId), audioTracks=\(stream.audioTracks.count), videoTracks=\(stream.videoTracks.count)") callLogger.info("[Call] Remote stream added: \(stream.streamId, privacy: .public) audioTracks=\(stream.audioTracks.count, privacy: .public)")
for audioTrack in stream.audioTracks { for audioTrack in stream.audioTracks {
audioTrack.isEnabled = true audioTrack.isEnabled = true
print("[Call] Remote audio track: \(audioTrack.trackId), enabled=\(audioTrack.isEnabled), state=\(audioTrack.readyState.rawValue)") callLogger.info("[Call] Remote audio track: \(audioTrack.trackId, privacy: .public) enabled=\(audioTrack.isEnabled.description, privacy: .public) state=\(audioTrack.readyState.rawValue, privacy: .public)")
} }
} }
} }
@@ -462,35 +548,28 @@ extension CallManager: RTCPeerConnectionDelegate {
nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {} nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {}
nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCPeerConnectionState) { nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCPeerConnectionState) {
print("[Call] PeerConnection state: \(newState.rawValue)") callLogger.info("[Call] PeerConnection state: \(newState.rawValue, privacy: .public)")
if newState == .connected { if newState == .connected {
Task { @MainActor in Task { @MainActor in
self.setCallActiveIfNeeded() self.setCallActiveIfNeeded()
// Log audio state
let localEnabled = self.localAudioTrack?.isEnabled ?? false let localEnabled = self.localAudioTrack?.isEnabled ?? false
let senderCount = peerConnection.senders.count let senderCount = peerConnection.senders.count
let receiverCount = peerConnection.receivers.count let receiverCount = peerConnection.receivers.count
let transceiverCount = peerConnection.transceivers.count let transceiverCount = peerConnection.transceivers.count
print("[Call] CONNECTED — localAudio=\(localEnabled), senders=\(senderCount), receivers=\(receiverCount), transceivers=\(transceiverCount)") callLogger.info("[Call] CONNECTED — localAudio=\(localEnabled.description, privacy: .public) senders=\(senderCount, privacy: .public) receivers=\(receiverCount, privacy: .public) transceivers=\(transceiverCount, privacy: .public)")
for (i, t) in peerConnection.transceivers.enumerated() { for (i, t) in peerConnection.transceivers.enumerated() {
let dir = t.direction.rawValue let dir = t.direction.rawValue
let hasSender = t.sender.track != nil let hasSender = t.sender.track != nil
let hasReceiver = t.receiver.track != nil let hasReceiver = t.receiver.track != nil
let recvEnabled = t.receiver.track?.isEnabled ?? false let recvEnabled = t.receiver.track?.isEnabled ?? false
print("[Call] Transceiver[\(i)]: dir=\(dir) hasSender=\(hasSender) hasReceiver=\(hasReceiver) recvEnabled=\(recvEnabled)") callLogger.info("[Call] Transceiver[\(i, privacy: .public)]: dir=\(dir, privacy: .public) hasSender=\(hasSender.description, privacy: .public) hasReceiver=\(hasReceiver.description, privacy: .public) recvEnabled=\(recvEnabled.description, privacy: .public)")
} }
// Log audio session
let session = AVAudioSession.sharedInstance() let session = AVAudioSession.sharedInstance()
print("[Call] AudioSession: category=\(session.category.rawValue) mode=\(session.mode.rawValue) route.outputs=\(session.currentRoute.outputs.map { $0.portName })") callLogger.info("[Call] AudioSession: category=\(session.category.rawValue, privacy: .public) mode=\(session.mode.rawValue, privacy: .public)")
}
return
}
if newState == .failed || newState == .closed || newState == .disconnected {
Task { @MainActor in
print("[CallBar] PeerConnection \(newState.rawValue) → finishCall()")
self.finishCall(reason: "Connection lost", notifyPeer: false)
} }
} }
// NOTE: .failed/.disconnected/.closed handled by ICE state handler only.
// Previously both handlers called finishCall() dual-kill on single disconnect event.
} }
nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didStartReceivingOn transceiver: RTCRtpTransceiver) { nonisolated func peerConnection(_ peerConnection: RTCPeerConnection, didStartReceivingOn transceiver: RTCRtpTransceiver) {
@@ -498,7 +577,7 @@ extension CallManager: RTCPeerConnectionDelegate {
Task { @MainActor in Task { @MainActor in
receiver.track?.isEnabled = true receiver.track?.isEnabled = true
let trackKind = transceiver.mediaType == .audio ? "audio" : "video" let trackKind = transceiver.mediaType == .audio ? "audio" : "video"
print("[Call] didStartReceivingOn: \(trackKind), receiverId=\(receiver.receiverId), trackEnabled=\(receiver.track?.isEnabled ?? false)") callLogger.info("[Call] didStartReceivingOn: \(trackKind, privacy: .public) receiverId=\(receiver.receiverId, privacy: .public) trackEnabled=\((receiver.track?.isEnabled ?? false).description, privacy: .public)")
self.attachReceiverCryptor(receiver) self.attachReceiverCryptor(receiver)
} }
} }

View File

@@ -3,9 +3,12 @@ import AVFAudio
import Combine import Combine
import CryptoKit import CryptoKit
import Foundation import Foundation
import os
import SwiftUI import SwiftUI
import WebRTC import WebRTC
let callLogger = Logger(subsystem: "com.rosetta.messenger", category: "Call")
@MainActor @MainActor
final class CallManager: NSObject, ObservableObject { final class CallManager: NSObject, ObservableObject {
@@ -43,9 +46,35 @@ final class CallManager: NSObject, ObservableObject {
var liveActivity: Activity<CallActivityAttributes>? var liveActivity: Activity<CallActivityAttributes>?
/// Re-entrancy guard: prevents CXEndCallAction endCall() finishCall() loop. /// Re-entrancy guard: prevents CXEndCallAction endCall() finishCall() loop.
var isFinishingCall = false var isFinishingCall = false
/// Pending accept: user tapped Accept on CallKit before WebSocket delivered .call signal.
/// When handleSignalPacket(.call) arrives, auto-accept if this is true.
var pendingCallKitAccept = false
/// True after CallKit fires didActivate audio session has the entitlement.
/// WebRTC peer connection MUST NOT be created before this flag is true,
/// otherwise AURemoteIO init fails with "Missing entitlement" (-12988).
var audioSessionActivated = false
/// Buffered when .createRoom arrives before didActivate. Flushed in onAudioSessionActivated().
var pendingWebRtcSetup = false
/// Buffers WebRTC packets (OFFER/ANSWER/ICE) from SFU that arrive before
/// peerConnection is created (CallKit didActivate delay). Without this, packets
/// are silently dropped. Android doesn't need this no CallKit.
var bufferedWebRtcPackets: [PacketWebRTC] = []
/// Recovery timer for ICE .disconnected state. Android waits 15s before ending call;
/// iOS was killing instantly on any brief network hiccup.
var disconnectRecoveryTask: Task<Void, Never>?
/// Periodic E2EE rebind loop (1.5s, Android parity). SFU may create new
/// RTP senders/receivers during renegotiation that need encryptor/decryptor attachment.
var e2eeRebindTask: Task<Void, Never>?
private override init() { private override init() {
super.init() super.init()
// Tell WebRTC to NOT auto-activate audio session when audio tracks are created.
// Audio will be enabled only after CallKit's didActivate callback fires.
// Without this, WebRTC calls AVAudioSession.setActive(true) before CallKit
// grants the entitlement -12988 "Missing entitlement" no audio on first call.
let rtcSession = RTCAudioSession.sharedInstance()
rtcSession.useManualAudio = true
rtcSession.isAudioEnabled = false
wireProtocolHandlers() wireProtocolHandlers()
} }
@@ -61,6 +90,38 @@ final class CallManager: NSObject, ObservableObject {
ownPublicKey = publicKey.trimmingCharacters(in: .whitespacesAndNewlines) ownPublicKey = publicKey.trimmingCharacters(in: .whitespacesAndNewlines)
} }
/// Sets up incoming call state directly from VoIP push payload.
/// Called when app was killed PushKit wakes it WebSocket not yet connected.
/// The .call signal may never arrive (fire-and-forget), so we set up state from push.
func setupIncomingCallFromPush(callerKey: String, callerName: String) {
guard uiState.phase == .idle else { return }
guard !callerKey.isEmpty else { return }
callLogger.info("setupIncomingCallFromPush: callerKey=\(callerKey.prefix(12), privacy: .public) name=\(callerName, privacy: .public)")
// Don't call beginCallSession() it calls finishCall() which kills the
// CallKit call that PushKit just reported. Set state directly instead.
uiState = CallUiState(
phase: .incoming,
peerPublicKey: callerKey,
peerTitle: callerName,
peerUsername: ""
)
role = .callee
uiState.statusText = "Incoming call..."
ProtocolManager.shared.beginCallBackgroundTask()
hydratePeerIdentity(for: callerKey)
startRingTimeout()
// Auto-accept if user already tapped Accept on CallKit before this ran.
// Happens when app was killed VoIP push CallKit Accept WebSocket
// not yet connected, so CXAnswerCallAction fired before phase was .incoming.
if pendingCallKitAccept {
pendingCallKitAccept = false
callLogger.info("setupIncomingCallFromPush: auto-accepting (pendingCallKitAccept)")
let result = acceptIncomingCall()
callLogger.info("setupIncomingCallFromPush: auto-accept result=\(String(describing: result), privacy: .public)")
}
}
func onAuthenticated() { func onAuthenticated() {
ProtocolManager.shared.requestIceServers() ProtocolManager.shared.requestIceServers()
} }
@@ -81,6 +142,7 @@ final class CallManager: NSObject, ObservableObject {
uiState.phase = .outgoing uiState.phase = .outgoing
uiState.statusText = "Calling..." uiState.statusText = "Calling..."
ProtocolManager.shared.beginCallBackgroundTask()
CallKitManager.shared.startOutgoingCall(peerKey: target) CallKitManager.shared.startOutgoingCall(peerKey: target)
ProtocolManager.shared.sendCallSignal( ProtocolManager.shared.sendCallSignal(
@@ -119,7 +181,7 @@ final class CallManager: NSObject, ObservableObject {
} }
func declineIncomingCall() { func declineIncomingCall() {
print("[CallBar] declineIncomingCall() — phase=\(uiState.phase.rawValue)") callLogger.info("[Call] declineIncomingCall phase=\(self.uiState.phase.rawValue, privacy: .public)")
guard uiState.phase == .incoming else { return } guard uiState.phase == .incoming else { return }
if ownPublicKey.isEmpty == false, uiState.peerPublicKey.isEmpty == false { if ownPublicKey.isEmpty == false, uiState.peerPublicKey.isEmpty == false {
ProtocolManager.shared.sendCallSignal( ProtocolManager.shared.sendCallSignal(
@@ -132,7 +194,7 @@ final class CallManager: NSObject, ObservableObject {
} }
func endCall() { func endCall() {
print("[CallBar] endCall() — phase=\(uiState.phase.rawValue) isMinimized=\(uiState.isMinimized)") callLogger.info("[Call] endCall phase=\(self.uiState.phase.rawValue, privacy: .public)")
finishCall(reason: nil, notifyPeer: true) finishCall(reason: nil, notifyPeer: true)
} }
@@ -142,7 +204,7 @@ final class CallManager: NSObject, ObservableObject {
localAudioTrack?.isEnabled = !nextMuted localAudioTrack?.isEnabled = !nextMuted
CallKitManager.shared.setMuted(nextMuted) CallKitManager.shared.setMuted(nextMuted)
updateLiveActivity() updateLiveActivity()
print("[Call] toggleMute: isMuted=\(nextMuted), trackEnabled=\(localAudioTrack?.isEnabled ?? false), trackState=\(localAudioTrack?.readyState.rawValue ?? -1)") callLogger.info("[Call] toggleMute isMuted=\(nextMuted.description, privacy: .public)")
} }
func toggleSpeaker() { func toggleSpeaker() {
@@ -150,14 +212,14 @@ final class CallManager: NSObject, ObservableObject {
uiState.isSpeakerOn = nextSpeaker uiState.isSpeakerOn = nextSpeaker
applyAudioOutputRouting() applyAudioOutputRouting()
let route = AVAudioSession.sharedInstance().currentRoute let route = AVAudioSession.sharedInstance().currentRoute
print("[Call] toggleSpeaker: isSpeakerOn=\(nextSpeaker), outputs=\(route.outputs.map { $0.portName })") callLogger.info("[Call] toggleSpeaker isSpeakerOn=\(nextSpeaker.description, privacy: .public)")
} }
func minimizeCall() { func minimizeCall() {
guard uiState.isVisible else { return } guard uiState.isVisible else { return }
pendingMinimizeTask?.cancel() pendingMinimizeTask?.cancel()
pendingMinimizeTask = nil pendingMinimizeTask = nil
print("[CallBar] minimizeCall() — phase=\(uiState.phase.rawValue)") callLogger.info("[Call] minimizeCall phase=\(self.uiState.phase.rawValue, privacy: .public)")
withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) { withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) {
uiState.isMinimized = true uiState.isMinimized = true
} }
@@ -167,7 +229,7 @@ final class CallManager: NSObject, ObservableObject {
pendingMinimizeTask?.cancel() pendingMinimizeTask?.cancel()
pendingMinimizeTask = nil pendingMinimizeTask = nil
guard uiState.isVisible else { return } guard uiState.isVisible else { return }
print("[CallBar] expandCall() — phase=\(uiState.phase.rawValue)") callLogger.info("[Call] expandCall phase=\(self.uiState.phase.rawValue, privacy: .public)")
withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) { withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) {
uiState.isMinimized = false uiState.isMinimized = false
} }
@@ -194,7 +256,7 @@ final class CallManager: NSObject, ObservableObject {
} }
private func handleSignalPacket(_ packet: PacketSignalPeer) { private func handleSignalPacket(_ packet: PacketSignalPeer) {
print("[CallBar] handleSignalPacket: type=\(packet.signalType) phase=\(uiState.phase.rawValue) isMinimized=\(uiState.isMinimized)") callLogger.info("[Call] handleSignalPacket: type=\(String(describing: packet.signalType), privacy: .public) phase=\(self.uiState.phase.rawValue, privacy: .public)")
switch packet.signalType { switch packet.signalType {
case .endCallBecauseBusy: case .endCallBecauseBusy:
finishCall(reason: "User is busy", notifyPeer: false, skipAttachment: true) finishCall(reason: "User is busy", notifyPeer: false, skipAttachment: true)
@@ -220,6 +282,14 @@ final class CallManager: NSObject, ObservableObject {
let incomingPeer = packet.src.trimmingCharacters(in: .whitespacesAndNewlines) let incomingPeer = packet.src.trimmingCharacters(in: .whitespacesAndNewlines)
guard incomingPeer.isEmpty == false else { return } guard incomingPeer.isEmpty == false else { return }
guard uiState.phase == .idle else { guard uiState.phase == .idle else {
// Already in a call with this peer ignore duplicate .call signal.
// Server re-delivers .call after WebSocket reconnect; without this guard,
// the code sends .endCallBecauseBusy which terminates the active call.
if incomingPeer == uiState.peerPublicKey {
callLogger.info("Ignoring duplicate .call signal — already in call with this peer (phase=\(self.uiState.phase.rawValue, privacy: .public))")
return
}
// Different peer trying to call send busy.
ProtocolManager.shared.sendCallSignal( ProtocolManager.shared.sendCallSignal(
signalType: .endCallBecauseBusy, signalType: .endCallBecauseBusy,
src: ownPublicKey, src: ownPublicKey,
@@ -230,6 +300,7 @@ final class CallManager: NSObject, ObservableObject {
beginCallSession(peerPublicKey: incomingPeer, title: "", username: "") beginCallSession(peerPublicKey: incomingPeer, title: "", username: "")
role = .callee role = .callee
uiState.phase = .incoming uiState.phase = .incoming
ProtocolManager.shared.beginCallBackgroundTask()
withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) { withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) {
uiState.isMinimized = false uiState.isMinimized = false
} }
@@ -247,6 +318,14 @@ final class CallManager: NSObject, ObservableObject {
} }
// No playRingtone / startLiveActivity CallKit handles ringtone and Dynamic Island. // No playRingtone / startLiveActivity CallKit handles ringtone and Dynamic Island.
startRingTimeout() startRingTimeout()
// Auto-accept if user already tapped Accept on CallKit before WebSocket connected.
// This happens when app was killed VoIP push CallKit Accept WebSocket reconnects.
if pendingCallKitAccept {
pendingCallKitAccept = false
let result = acceptIncomingCall()
callLogger.info("Auto-accept: result=\(String(describing: result), privacy: .public) ownKey=\(self.ownPublicKey.isEmpty ? "EMPTY" : String(self.ownPublicKey.prefix(12)), privacy: .public) phase=\(self.uiState.phase.rawValue, privacy: .public) wsState=\(String(describing: ProtocolManager.shared.connectionState), privacy: .public)")
}
case .keyExchange: case .keyExchange:
handleKeyExchange(packet) handleKeyExchange(packet)
case .createRoom: case .createRoom:
@@ -255,8 +334,17 @@ final class CallManager: NSObject, ObservableObject {
roomId = incomingRoomId roomId = incomingRoomId
uiState.phase = .webRtcExchange uiState.phase = .webRtcExchange
uiState.statusText = "Connecting..." uiState.statusText = "Connecting..."
Task { [weak self] in // Defer WebRTC peer connection setup until CallKit grants audio entitlement.
await self?.ensurePeerConnectionAndOffer() // Creating RTCPeerConnection + audio track BEFORE didActivate causes
// AURemoteIO to fail with "Missing entitlement" (-12988), poisoning audio
// for the entire call. If didActivate already fired, proceed immediately.
if audioSessionActivated {
Task { [weak self] in
await self?.ensurePeerConnectionAndOffer()
}
} else {
pendingWebRtcSetup = true
callLogger.info("[Call] Deferring WebRTC setup — waiting for CallKit didActivate")
} }
case .activeCall: case .activeCall:
break break
@@ -286,6 +374,7 @@ final class CallManager: NSObject, ObservableObject {
sharedKey = derivedSharedKey sharedKey = derivedSharedKey
uiState.keyCast = derivedSharedKey.hexString uiState.keyCast = derivedSharedKey.hexString
applySenderCryptorIfPossible() applySenderCryptorIfPossible()
startE2EERebindLoop()
cancelRingTimeout() cancelRingTimeout()
CallSoundManager.shared.stopAll() CallSoundManager.shared.stopAll()
@@ -375,18 +464,18 @@ final class CallManager: NSObject, ObservableObject {
} }
let authInfo = ActivityAuthorizationInfo() let authInfo = ActivityAuthorizationInfo()
print("[Call] LiveActivity: areActivitiesEnabled=\(authInfo.areActivitiesEnabled), frequentPushesEnabled=\(authInfo.frequentPushesEnabled)") callLogger.info("[Call] LiveActivity: areActivitiesEnabled=\(authInfo.areActivitiesEnabled.description, privacy: .public)")
guard authInfo.areActivitiesEnabled else { guard authInfo.areActivitiesEnabled else {
print("[Call] LiveActivity DISABLED by user settings") callLogger.info("[Call] LiveActivity DISABLED by user settings")
return return
} }
// Compress avatar to fit ActivityKit 4KB limit while maximizing quality // Compress avatar to fit ActivityKit 4KB limit while maximizing quality
var avatarThumb: Data? var avatarThumb: Data?
if let avatar = AvatarRepository.shared.loadAvatar(publicKey: uiState.peerPublicKey) { if let avatar = AvatarRepository.shared.loadAvatar(publicKey: uiState.peerPublicKey) {
avatarThumb = Self.compressAvatarForActivity(avatar) avatarThumb = Self.compressAvatarForActivity(avatar)
print("[Call] Avatar thumb: \(avatarThumb?.count ?? 0) bytes") callLogger.info("[Call] Avatar thumb: \(avatarThumb?.count ?? 0, privacy: .public) bytes")
} else { } else {
print("[Call] No avatar for peer") callLogger.info("[Call] No avatar for peer")
} }
let attributes = CallActivityAttributes( let attributes = CallActivityAttributes(
peerName: uiState.displayName, peerName: uiState.displayName,
@@ -399,16 +488,16 @@ final class CallManager: NSObject, ObservableObject {
isActive: uiState.phase == .active, isActive: uiState.phase == .active,
isMuted: uiState.isMuted isMuted: uiState.isMuted
) )
print("[Call] LiveActivity starting: peerName=\(uiState.displayName), isActive=\(uiState.phase == .active)") callLogger.info("[Call] LiveActivity starting: peerName=\(self.uiState.displayName, privacy: .public)")
do { do {
liveActivity = try Activity.request( liveActivity = try Activity.request(
attributes: attributes, attributes: attributes,
content: .init(state: state, staleDate: nil), content: .init(state: state, staleDate: nil),
pushType: nil pushType: nil
) )
print("[Call] LiveActivity started: id=\(liveActivity?.id ?? "nil"), state=\(String(describing: liveActivity?.activityState))") callLogger.info("[Call] LiveActivity started: id=\(self.liveActivity?.id ?? "nil", privacy: .public)")
} catch { } catch {
print("[Call] LiveActivity FAILED: \(error)") callLogger.error("[Call] LiveActivity FAILED: \(error.localizedDescription, privacy: .public)")
} }
} }
@@ -448,6 +537,26 @@ final class CallManager: NSObject, ObservableObject {
} }
} }
/// Periodic E2EE rebind loop (Android parity: 1.5s interval).
/// SFU may create new RTP senders/receivers during renegotiation.
/// Without this, encrypted frames from new senders can't be decrypted silence.
func startE2EERebindLoop() {
e2eeRebindTask?.cancel()
e2eeRebindTask = Task { @MainActor [weak self] in
while !Task.isCancelled {
try? await Task.sleep(for: .milliseconds(1500))
guard !Task.isCancelled else { return }
guard let self else { return }
guard self.uiState.phase == .webRtcExchange || self.uiState.phase == .active else { continue }
guard self.sharedKey != nil, let pc = self.peerConnection else { continue }
self.applySenderCryptorIfPossible()
for receiver in pc.receivers {
self.attachReceiverCryptor(receiver)
}
}
}
}
func handleGeneratedCandidate(_ candidate: RTCIceCandidate) { func handleGeneratedCandidate(_ candidate: RTCIceCandidate) {
let payload: [String: Any] = [ let payload: [String: Any] = [
"candidate": candidate.sdp, "candidate": candidate.sdp,
@@ -462,17 +571,42 @@ final class CallManager: NSObject, ObservableObject {
} }
func handleIceConnectionStateChanged(_ state: RTCIceConnectionState) { func handleIceConnectionStateChanged(_ state: RTCIceConnectionState) {
print("[CallBar] ICE state changed: \(state.rawValue) phase=\(uiState.phase.rawValue) isMinimized=\(uiState.isMinimized)") callLogger.info("[Call] ICE state: \(state.rawValue, privacy: .public) phase=\(self.uiState.phase.rawValue, privacy: .public)")
switch state { switch state {
case .connected, .completed: case .connected, .completed:
disconnectRecoveryTask?.cancel()
disconnectRecoveryTask = nil
setCallActiveIfNeeded() setCallActiveIfNeeded()
case .failed, .closed, .disconnected: case .disconnected:
print("[CallBar] ICE \(state.rawValue) → finishCall()") // Temporary ICE may recover (WiFiCellular switch, brief interruption).
finishCall(reason: "Connection lost", notifyPeer: false) // Android waits 15s. Previous iOS code killed instantly unstable calls.
startDisconnectRecoveryTimer(timeout: 15)
case .failed:
// More serious unlikely to recover. Shorter timeout than .disconnected.
startDisconnectRecoveryTimer(timeout: 5)
case .closed:
finishCall(reason: "Connection closed", notifyPeer: false)
default: default:
break break
} }
} }
func startDisconnectRecoveryTimer(timeout: Int) {
disconnectRecoveryTask?.cancel()
disconnectRecoveryTask = Task { @MainActor [weak self] in
callLogger.info("[Call] Waiting \(timeout, privacy: .public)s for ICE recovery...")
try? await Task.sleep(for: .seconds(timeout))
guard !Task.isCancelled else { return }
guard let self else { return }
let iceState = self.peerConnection?.iceConnectionState
if iceState == .connected || iceState == .completed {
callLogger.info("[Call] ICE recovered during wait — call continues")
return
}
callLogger.info("[Call] ICE did not recover in \(timeout, privacy: .public)s — ending call")
self.finishCall(reason: "Connection lost", notifyPeer: false)
}
}
// MARK: - Adaptive Avatar Compressor // MARK: - Adaptive Avatar Compressor
/// Compresses avatar to fit ActivityKit's ~4KB attributes limit. /// Compresses avatar to fit ActivityKit's ~4KB attributes limit.

View File

@@ -0,0 +1,92 @@
import Foundation
import os
/// Stores minimal session credentials in Keychain for background VoIP push wake-up.
/// Only `publicKey` + `privateKeyHash` NOT the raw private key.
/// Uses `kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly` so PushKit can read
/// them even when the app was killed, as long as the device has been unlocked once.
final class SessionCredentialsManager: @unchecked Sendable {
static let shared = SessionCredentialsManager()
private static let logger = Logger(subsystem: "com.rosetta.messenger", category: "SessionCreds")
private static let service = "com.rosetta.messenger.session"
private static let account = "voip_session_credentials"
struct Credentials {
let publicKey: String
let privateKeyHash: String
}
private init() {}
// MARK: - Save
func save(publicKey: String, privateKeyHash: String) {
let payload = "\(publicKey):\(privateKeyHash)"
guard let data = payload.data(using: .utf8) else { return }
// Delete existing first
let deleteQuery: [String: Any] = [
kSecClass as String: kSecClassGenericPassword,
kSecAttrService as String: Self.service,
kSecAttrAccount as String: Self.account,
]
SecItemDelete(deleteQuery as CFDictionary)
let addQuery: [String: Any] = [
kSecClass as String: kSecClassGenericPassword,
kSecAttrService as String: Self.service,
kSecAttrAccount as String: Self.account,
kSecValueData as String: data,
kSecAttrAccessible as String: kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly,
]
let status = SecItemAdd(addQuery as CFDictionary, nil)
if status == errSecSuccess {
Self.logger.info("Session credentials saved to Keychain")
} else {
Self.logger.error("Failed to save session credentials: \(status)")
}
}
// MARK: - Load
func load() -> Credentials? {
let query: [String: Any] = [
kSecClass as String: kSecClassGenericPassword,
kSecAttrService as String: Self.service,
kSecAttrAccount as String: Self.account,
kSecReturnData as String: true,
kSecMatchLimit as String: kSecMatchLimitOne,
]
var result: AnyObject?
let status = SecItemCopyMatching(query as CFDictionary, &result)
guard status == errSecSuccess,
let data = result as? Data,
let payload = String(data: data, encoding: .utf8) else {
return nil
}
let parts = payload.components(separatedBy: ":")
guard parts.count == 2, !parts[0].isEmpty, !parts[1].isEmpty else {
return nil
}
return Credentials(publicKey: parts[0], privateKeyHash: parts[1])
}
// MARK: - Clear
func clear() {
let query: [String: Any] = [
kSecClass as String: kSecClassGenericPassword,
kSecAttrService as String: Self.service,
kSecAttrAccount as String: Self.account,
]
SecItemDelete(query as CFDictionary)
Self.logger.info("Session credentials cleared from Keychain")
}
}

View File

@@ -225,6 +225,9 @@ final class SessionManager {
let hash = crypto.generatePrivateKeyHash(privateKeyHex: privateKeyHex) let hash = crypto.generatePrivateKeyHash(privateKeyHex: privateKeyHex)
privateKeyHash = hash privateKeyHash = hash
ProtocolManager.shared.connect(publicKey: account.publicKey, privateKeyHash: hash) ProtocolManager.shared.connect(publicKey: account.publicKey, privateKeyHash: hash)
// Persist minimal credentials for background VoIP push wake-up.
// PushKit can restore WebSocket connection without user interaction.
SessionCredentialsManager.shared.save(publicKey: account.publicKey, privateKeyHash: hash)
#if DEBUG #if DEBUG
Self.logger.info("⏱ CONN_PERF: connectCalled \(Int((CFAbsoluteTimeGetCurrent() - sessionStart) * 1000))ms") Self.logger.info("⏱ CONN_PERF: connectCalled \(Int((CFAbsoluteTimeGetCurrent() - sessionStart) * 1000))ms")
@@ -1116,6 +1119,7 @@ final class SessionManager {
/// Ends the session and disconnects. /// Ends the session and disconnects.
func endSession() { func endSession() {
ProtocolManager.shared.disconnect() ProtocolManager.shared.disconnect()
SessionCredentialsManager.shared.clear()
privateKeyHash = nil privateKeyHash = nil
privateKeyHex = nil privateKeyHex = nil
lastTypingSentAt.removeAll() lastTypingSentAt.removeAll()

View File

@@ -363,6 +363,13 @@ private extension UnlockView {
/// Auto-triggers biometric unlock after animations complete. /// Auto-triggers biometric unlock after animations complete.
func autoTriggerBiometric() async { func autoTriggerBiometric() async {
// Don't trigger Face ID during an active call CallKit handles the call UI.
// Face ID fails in background anyway ("Caller is not running foreground").
if CallKitManager.shared.hasPendingCall() ||
CallManager.shared.uiState.phase != .idle {
return
}
guard canUseBiometric, !biometricTriggered else { return } guard canUseBiometric, !biometricTriggered else { return }
biometricTriggered = true biometricTriggered = true

View File

@@ -41,84 +41,87 @@ struct ForwardChatPickerView: View {
} }
var body: some View { var body: some View {
VStack(spacing: 0) { ZStack {
// MARK: - Header Color.black.ignoresSafeArea()
ForwardPickerHeader( VStack(spacing: 0) {
isMultiSelect: isMultiSelect, // MARK: - Header
onClose: { dismiss() }, ForwardPickerHeader(
onSelect: { isMultiSelect: isMultiSelect,
withAnimation(.easeInOut(duration: 0.2)) { onClose: { dismiss() },
isMultiSelect = true onSelect: {
withAnimation(.easeInOut(duration: 0.2)) {
isMultiSelect = true
}
} }
} )
)
// MARK: - Search // MARK: - Search
ForwardPickerSearchBar(searchText: $searchText) ForwardPickerSearchBar(searchText: $searchText)
.padding(.horizontal, 8) .padding(.horizontal, 8)
.padding(.top, 8) .padding(.top, 8)
.padding(.bottom, 6) .padding(.bottom, 6)
// MARK: - Chat List // MARK: - Chat List
if dialogs.isEmpty && !searchText.isEmpty { if dialogs.isEmpty && !searchText.isEmpty {
VStack { VStack {
Spacer() Spacer()
Text("No chats found") Text("No chats found")
.font(.system(size: 15)) .font(.system(size: 15))
.foregroundStyle(Color(white: 0.5)) .foregroundStyle(Color(white: 0.5))
Spacer() Spacer()
} }
} else { } else {
ScrollView { ScrollView {
LazyVStack(spacing: 0) { LazyVStack(spacing: 0) {
ForEach(Array(dialogs.enumerated()), id: \.element.id) { index, dialog in ForEach(Array(dialogs.enumerated()), id: \.element.id) { index, dialog in
ForwardPickerRow( ForwardPickerRow(
dialog: dialog, dialog: dialog,
isMultiSelect: isMultiSelect, isMultiSelect: isMultiSelect,
isSelected: selectedIds.contains(dialog.opponentKey) isSelected: selectedIds.contains(dialog.opponentKey)
) { ) {
if isMultiSelect { if isMultiSelect {
withAnimation(.easeInOut(duration: 0.15)) { withAnimation(.easeInOut(duration: 0.15)) {
if selectedIds.contains(dialog.opponentKey) { if selectedIds.contains(dialog.opponentKey) {
selectedIds.remove(dialog.opponentKey) selectedIds.remove(dialog.opponentKey)
} else { } else {
selectedIds.insert(dialog.opponentKey) selectedIds.insert(dialog.opponentKey)
}
} }
} else {
onSelect([ChatRoute(dialog: dialog)])
} }
} else {
onSelect([ChatRoute(dialog: dialog)])
} }
}
if index < dialogs.count - 1 { if index < dialogs.count - 1 {
Divider() Divider()
.padding(.leading, 65) .padding(.leading, 65)
.foregroundStyle(Color(red: 0x54 / 255.0, green: 0x54 / 255.0, blue: 0x58 / 255.0).opacity(0.55)) .foregroundStyle(Color(red: 0x54 / 255.0, green: 0x54 / 255.0, blue: 0x58 / 255.0).opacity(0.55))
}
} }
} }
} }
.scrollDismissesKeyboard(.interactively)
} }
.scrollDismissesKeyboard(.interactively)
}
// MARK: - Bottom Bar (multi-select) // MARK: - Bottom Bar (multi-select)
if isMultiSelect { if isMultiSelect {
ForwardPickerBottomBar( ForwardPickerBottomBar(
selectedCount: selectedIds.count, selectedCount: selectedIds.count,
onSend: { onSend: {
let routes = dialogs let routes = dialogs
.filter { selectedIds.contains($0.opponentKey) } .filter { selectedIds.contains($0.opponentKey) }
.map { ChatRoute(dialog: $0) } .map { ChatRoute(dialog: $0) }
if !routes.isEmpty { onSelect(routes) } if !routes.isEmpty { onSelect(routes) }
} }
) )
.transition(.move(edge: .bottom).combined(with: .opacity)) .transition(.move(edge: .bottom).combined(with: .opacity))
}
} }
} }
.background(Color.black.ignoresSafeArea())
.preferredColorScheme(.dark) .preferredColorScheme(.dark)
.presentationBackground(Color.black) .presentationBackground(Color.black)
.presentationDragIndicator(.visible) .presentationDragIndicator(.hidden)
.presentationDetents([.large])
} }
} }
@@ -205,9 +208,17 @@ private struct ForwardPickerHeader: View {
if !isMultiSelect { if !isMultiSelect {
HStack { HStack {
Spacer() Spacer()
Button("Select", action: onSelect) Button(action: onSelect) {
.font(.system(size: 17, weight: .regular)) Text("Select")
.foregroundStyle(.white) .font(.system(size: 17, weight: .regular))
.foregroundStyle(.white)
.padding(.horizontal, 14)
.frame(height: 30)
.background {
Capsule()
.fill(Color(white: 0.16))
}
}
} }
} }
} }
@@ -255,7 +266,7 @@ private struct ForwardPickerSearchBar: View {
.frame(height: 44) .frame(height: 44)
.background { .background {
RoundedRectangle(cornerRadius: 22, style: .continuous) RoundedRectangle(cornerRadius: 22, style: .continuous)
.fill(Color(white: 0.14)) .fill(Color.white.opacity(0.1))
} }
} }
} }
@@ -275,7 +286,7 @@ private struct ForwardPickerRow: View {
HStack(spacing: 4) { HStack(spacing: 4) {
Text(dialog.isSavedMessages ? "Saved Messages" : dialog.opponentTitle) Text(dialog.isSavedMessages ? "Saved Messages" : dialog.opponentTitle)
.font(.system(size: 17, weight: .regular)) .font(.system(size: 17, weight: .medium))
.foregroundStyle(.white) .foregroundStyle(.white)
.lineLimit(1) .lineLimit(1)
@@ -361,7 +372,7 @@ private struct ForwardPickerBottomBar: View {
.frame(height: 42) .frame(height: 42)
.background { .background {
RoundedRectangle(cornerRadius: 21, style: .continuous) RoundedRectangle(cornerRadius: 21, style: .continuous)
.fill(Color(white: 0.14)) .fill(Color.white.opacity(0.1))
} }
// Telegram send button: 33pt circle + SVG arrow // Telegram send button: 33pt circle + SVG arrow

View File

@@ -72,10 +72,9 @@ final class ImageViewerPresenter {
// MARK: - ImageGalleryViewer // MARK: - ImageGalleryViewer
/// Telegram-style multi-photo gallery viewer with hero transition animation. /// Multi-photo gallery viewer with hero transition animation.
/// Reference: PhotosTransition/Helpers/PhotoGridView.swift hero expand/collapse pattern. /// Adapted 1:1 from PhotosTransition/Helpers/PhotoGridView.swift DetailPhotosView.
/// Android parity: `ImageViewerScreen.kt` top bar with sender/date, /// Hero positioning is per-page INSIDE ForEach (not on TabView).
/// bottom caption bar, edge-tap navigation, velocity dismiss, share/save.
struct ImageGalleryViewer: View { struct ImageGalleryViewer: View {
let state: ImageViewerState let state: ImageViewerState
@@ -85,11 +84,8 @@ struct ImageGalleryViewer: View {
@State private var showControls = true @State private var showControls = true
@State private var currentZoomScale: CGFloat = 1.0 @State private var currentZoomScale: CGFloat = 1.0
@State private var isDismissing = false @State private var isDismissing = false
/// Hero transition state: false = positioned at source frame, true = fullscreen.
@State private var isExpanded: Bool = false @State private var isExpanded: Bool = false
/// Drag offset for interactive pan-to-dismiss.
@State private var dragOffset: CGSize = .zero @State private var dragOffset: CGSize = .zero
/// Full screen dimensions (captured from geometry).
@State private var viewSize: CGSize = UIScreen.main.bounds.size @State private var viewSize: CGSize = UIScreen.main.bounds.size
private static let dateFormatter: DateFormatter = { private static let dateFormatter: DateFormatter = {
@@ -108,95 +104,69 @@ struct ImageGalleryViewer: View {
state.images.indices.contains(currentPage) ? state.images[currentPage] : nil state.images.indices.contains(currentPage) ? state.images[currentPage] : nil
} }
/// Whether the source frame is valid for hero animation (non-zero).
private var hasHeroSource: Bool {
state.sourceFrame.width > 0 && state.sourceFrame.height > 0
}
/// Hero animation spring matches PhotosTransition reference.
private var heroAnimation: Animation { private var heroAnimation: Animation {
.interpolatingSpring(duration: 0.3, bounce: 0, initialVelocity: 0) .interpolatingSpring(duration: 0.3, bounce: 0, initialVelocity: 0)
} }
/// Opacity that decreases as user drags further from center.
private var interactiveOpacity: CGFloat { private var interactiveOpacity: CGFloat {
let opacityY = abs(dragOffset.height) / (viewSize.height * 0.3) let opacityY = abs(dragOffset.height) / (viewSize.height * 0.3)
return max(1 - opacityY, 0) return isExpanded ? max(1 - opacityY, 0) : 0
} }
var body: some View { var body: some View {
let sourceFrame = state.sourceFrame let sourceFrame = state.sourceFrame
ZStack { // Hero positioning per-page inside ForEach matches reference exactly
// Background fades with hero expansion and drag progress TabView(selection: $currentPage) {
Color.black ForEach(Array(state.images.enumerated()), id: \.element.attachmentId) { index, info in
.opacity(isExpanded ? interactiveOpacity : 0) ZoomableImagePage(
attachmentId: info.attachmentId,
onDismiss: { dismissAction() },
showControls: $showControls,
currentScale: $currentZoomScale,
onEdgeTap: { direction in navigateEdgeTap(direction: direction) }
)
.frame(
width: isExpanded ? viewSize.width : sourceFrame.width,
height: isExpanded ? viewSize.height : sourceFrame.height
)
.clipped()
.offset(
x: isExpanded ? 0 : sourceFrame.minX,
y: isExpanded ? 0 : sourceFrame.minY
)
.offset(dragOffset)
.frame(
maxWidth: .infinity,
maxHeight: .infinity,
alignment: isExpanded ? .center : .topLeading
)
.tag(index)
.ignoresSafeArea() .ignoresSafeArea()
// Pager with hero positioning
TabView(selection: $currentPage) {
ForEach(Array(state.images.enumerated()), id: \.element.attachmentId) { index, info in
ZoomableImagePage(
attachmentId: info.attachmentId,
onDismiss: { dismiss() },
showControls: $showControls,
currentScale: $currentZoomScale,
onEdgeTap: { direction in
navigateEdgeTap(direction: direction)
}
)
.tag(index)
}
} }
.tabViewStyle(.page(indexDisplayMode: .never))
.scrollDisabled(currentZoomScale > 1.05 || isDismissing)
// Hero frame: source rect when collapsed, full screen when expanded
.frame(
width: isExpanded ? viewSize.width : (hasHeroSource ? sourceFrame.width : viewSize.width),
height: isExpanded ? viewSize.height : (hasHeroSource ? sourceFrame.height : viewSize.height)
)
.clipped()
.offset(
x: isExpanded ? 0 : (hasHeroSource ? sourceFrame.minX : 0),
y: isExpanded ? 0 : (hasHeroSource ? sourceFrame.minY : 0)
)
.offset(dragOffset)
.frame(
maxWidth: .infinity,
maxHeight: .infinity,
alignment: isExpanded ? .center : (hasHeroSource ? .topLeading : .center)
)
.ignoresSafeArea()
// Interactive drag gesture for hero dismiss (vertical only, when not zoomed)
.simultaneousGesture(
currentZoomScale <= 1.05 ?
DragGesture(minimumDistance: 40)
.onChanged { value in
let dy = abs(value.translation.height)
let dx = abs(value.translation.width)
guard dy > dx * 2.0 else { return }
dragOffset = .init(width: value.translation.width, height: value.translation.height)
}
.onEnded { value in
if dragOffset.height > 50 {
heroDismiss()
} else {
withAnimation(heroAnimation.speed(1.2)) {
dragOffset = .zero
}
}
}
: nil
)
// Controls overlay fades with hero expansion
controlsOverlay
.opacity(isExpanded ? 1 : 0)
.opacity(interactiveOpacity)
} }
.statusBarHidden(true) .tabViewStyle(.page(indexDisplayMode: .never))
.ignoresSafeArea()
.scrollDisabled(currentZoomScale > 1.05 || isDismissing)
.contentShape(Rectangle())
.overlay {
// Pan gesture overlay UIKit gesture for iOS 17+ compat
HeroPanGestureOverlay { gesture in
handlePanGesture(gesture)
}
.allowsHitTesting(isExpanded && currentZoomScale <= 1.05)
}
.overlay {
overlayActions
}
.background {
Color.black
.opacity(interactiveOpacity)
.opacity(isExpanded ? 1 : 0)
.ignoresSafeArea()
}
.allowsHitTesting(isExpanded) .allowsHitTesting(isExpanded)
.onGeometryChange(for: CGSize.self, of: { $0.size }) { viewSize = $0 } .statusBarHidden(true)
.task { .task {
prefetchAdjacentImages(around: state.initialIndex) prefetchAdjacentImages(around: state.initialIndex)
guard !isExpanded else { return } guard !isExpanded else { return }
@@ -209,101 +179,95 @@ struct ImageGalleryViewer: View {
} }
} }
// MARK: - Controls Overlay // MARK: - Pan Gesture
private func handlePanGesture(_ gesture: UIPanGestureRecognizer) {
let panState = gesture.state
let translation = gesture.translation(in: gesture.view)
if panState == .began || panState == .changed {
dragOffset = .init(width: translation.x, height: translation.y)
} else {
if dragOffset.height > 50 {
heroDismiss()
} else {
withAnimation(heroAnimation.speed(1.2)) {
dragOffset = .zero
}
}
}
}
// MARK: - Overlay Actions (matches PhotosTransition/ContentView.swift OverlayActionView)
@ViewBuilder @ViewBuilder
private var controlsOverlay: some View { private var overlayActions: some View {
VStack(spacing: 0) { let overlayOpacity: CGFloat = 1 - min(abs(dragOffset.height / 30), 1)
if showControls && !isDismissing {
topBar
.transition(.move(edge: .top).combined(with: .opacity))
}
Spacer()
if showControls && !isDismissing {
bottomBar
.transition(.move(edge: .bottom).combined(with: .opacity))
}
}
.animation(.easeOut(duration: 0.2), value: showControls)
}
// MARK: - Top Bar if showControls && !isDismissing && isExpanded {
VStack {
// Top actions
HStack {
glassButton(systemName: "chevron.left") { dismissAction() }
private var topBar: some View { Spacer(minLength: 0)
HStack(spacing: 8) {
Button { dismiss() } label: {
Image(systemName: "chevron.left")
.font(.system(size: 20, weight: .medium))
.foregroundStyle(.white)
.frame(width: 44, height: 44)
}
if let info = currentInfo { if state.images.count > 1 {
VStack(alignment: .leading, spacing: 1) { glassLabel("\(currentPage + 1) / \(state.images.count)")
Text(info.senderName) }
.font(.system(size: 16, weight: .semibold))
.foregroundStyle(.white)
.lineLimit(1)
Text(Self.dateFormatter.string(from: info.timestamp))
.font(.system(size: 13))
.foregroundStyle(.white.opacity(0.7))
} }
} .overlay {
if let info = currentInfo {
Spacer() glassLabel(info.senderName)
.contentTransition(.numericText())
if state.images.count > 1 { .animation(.easeInOut, value: currentPage)
Text("\(currentPage + 1) / \(state.images.count)") .frame(maxWidth: .infinity)
.font(.system(size: 15, weight: .medium)) }
.foregroundStyle(.white.opacity(0.8))
.padding(.trailing, 8)
}
}
.padding(.horizontal, 4)
.padding(.vertical, 8)
.background(Color.black.opacity(0.5).ignoresSafeArea(edges: .top))
}
// MARK: - Bottom Bar
private var bottomBar: some View {
VStack(spacing: 0) {
if let caption = currentInfo?.caption, !caption.isEmpty {
Text(caption)
.font(.system(size: 15))
.foregroundStyle(.white)
.lineLimit(4)
.frame(maxWidth: .infinity, alignment: .leading)
.padding(.horizontal, 16)
.padding(.vertical, 12)
.background(Color.black.opacity(0.5))
}
HStack(spacing: 32) {
Button { shareCurrentImage() } label: {
Image(systemName: "square.and.arrow.up")
.font(.system(size: 20, weight: .medium))
.foregroundStyle(.white)
.frame(width: 44, height: 44)
} }
Spacer() Spacer(minLength: 0)
Button { saveCurrentImage() } label: { // Bottom actions
Image(systemName: "square.and.arrow.down") HStack {
.font(.system(size: 20, weight: .medium)) glassButton(systemName: "square.and.arrow.up.fill") { shareCurrentImage() }
.foregroundStyle(.white)
.frame(width: 44, height: 44) Spacer(minLength: 0)
glassButton(systemName: "square.and.arrow.down") { saveCurrentImage() }
} }
} }
.padding(.horizontal, 24) .padding(.horizontal, 15)
.padding(.bottom, 8) .compositingGroup()
.background(Color.black.opacity(0.5).ignoresSafeArea(edges: .bottom)) .opacity(overlayOpacity)
.environment(\.colorScheme, .dark)
.transition(.opacity)
.animation(.easeOut(duration: 0.2), value: showControls)
} }
} }
// MARK: - Edge Tap Navigation // MARK: - Glass Button / Label helpers
private func glassButton(systemName: String, action: @escaping () -> Void) -> some View {
Button(action: action) {
Image(systemName: systemName)
.font(.title3)
.foregroundStyle(.white)
.frame(width: 36, height: 36)
}
.background { TelegramGlassCircle() }
}
private func glassLabel(_ text: String) -> some View {
Text(text)
.font(.callout)
.foregroundStyle(.white)
.lineLimit(1)
.padding(.horizontal, 15)
.padding(.vertical, 10)
.background { TelegramGlassCapsule() }
}
// MARK: - Navigation
private func navigateEdgeTap(direction: Int) { private func navigateEdgeTap(direction: Int) {
let target = currentPage + direction let target = currentPage + direction
@@ -313,8 +277,7 @@ struct ImageGalleryViewer: View {
// MARK: - Dismiss // MARK: - Dismiss
/// Unified dismiss: hero collapse when not zoomed, fade when zoomed. private func dismissAction() {
private func dismiss() {
if currentZoomScale > 1.05 { if currentZoomScale > 1.05 {
fadeDismiss() fadeDismiss()
} else { } else {
@@ -322,7 +285,6 @@ struct ImageGalleryViewer: View {
} }
} }
/// Hero collapse back to source frame.
private func heroDismiss() { private func heroDismiss() {
guard !isDismissing else { return } guard !isDismissing else { return }
isDismissing = true isDismissing = true
@@ -337,7 +299,6 @@ struct ImageGalleryViewer: View {
} }
} }
/// Fallback fade dismiss when zoomed.
private func fadeDismiss() { private func fadeDismiss() {
guard !isDismissing else { return } guard !isDismissing else { return }
isDismissing = true isDismissing = true
@@ -402,3 +363,69 @@ struct ImageGalleryViewer: View {
} }
} }
// MARK: - HeroPanGestureOverlay
/// Transparent UIView overlay with UIPanGestureRecognizer for vertical hero dismiss.
/// Uses UIKit gesture (not UIGestureRecognizerRepresentable) for iOS 17+ compat.
/// Matches PanGesture from PhotosTransition reference vertical only, single touch.
private struct HeroPanGestureOverlay: UIViewRepresentable {
var onPan: (UIPanGestureRecognizer) -> Void
func makeUIView(context: Context) -> UIView {
let view = UIView()
view.backgroundColor = .clear
let pan = UIPanGestureRecognizer(target: context.coordinator, action: #selector(Coordinator.handlePan(_:)))
pan.minimumNumberOfTouches = 1
pan.maximumNumberOfTouches = 1
pan.delegate = context.coordinator
view.addGestureRecognizer(pan)
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
context.coordinator.onPan = onPan
}
func makeCoordinator() -> Coordinator { Coordinator(onPan: onPan) }
final class Coordinator: NSObject, UIGestureRecognizerDelegate {
var onPan: (UIPanGestureRecognizer) -> Void
init(onPan: @escaping (UIPanGestureRecognizer) -> Void) {
self.onPan = onPan
}
@objc func handlePan(_ gesture: UIPanGestureRecognizer) {
onPan(gesture)
}
// Only begin for downward vertical drags
func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
guard let pan = gestureRecognizer as? UIPanGestureRecognizer else { return false }
let velocity = pan.velocity(in: pan.view)
return velocity.y > abs(velocity.x)
}
// Let TabView scroll pass through when scrolled down
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldBeRequiredToFailBy otherGestureRecognizer: UIGestureRecognizer
) -> Bool {
if let scrollView = otherGestureRecognizer.view as? UIScrollView {
return scrollView.contentOffset.y <= 0
}
return false
}
// Allow simultaneous recognition with other gestures (taps, pinch, etc.)
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer
) -> Bool {
return !(otherGestureRecognizer is UIPanGestureRecognizer)
}
}
}

View File

@@ -2,10 +2,16 @@ import FirebaseCore
import FirebaseCrashlytics import FirebaseCrashlytics
import FirebaseMessaging import FirebaseMessaging
import Intents import Intents
import os
import PushKit import PushKit
import SQLite3
import SwiftUI import SwiftUI
import UserNotifications import UserNotifications
private extension Logger {
static let voip = Logger(subsystem: "com.rosetta.messenger", category: "VoIP")
}
// MARK: - Firebase AppDelegate // MARK: - Firebase AppDelegate
final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCenterDelegate, final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCenterDelegate,
@@ -333,6 +339,44 @@ final class AppDelegate: NSObject, UIApplicationDelegate, UNUserNotificationCent
} }
} }
// MARK: - Caller Name from SQLite (VoIP push fallback)
/// Reads caller display name directly from SQLite when app is killed and
/// UserDefaults hasn't been loaded yet. Database file persists on disk.
static func resolveCallerNameFromDB(callerKey: String, accountKey: String) -> String? {
let key = accountKey.trimmingCharacters(in: .whitespacesAndNewlines)
guard !key.isEmpty else { return nil }
let normalized = String(key.unicodeScalars.map {
CharacterSet.alphanumerics.contains($0) ? Character($0) : "_"
})
let baseURL = FileManager.default.urls(for: .applicationSupportDirectory, in: .userDomainMask).first
?? URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
let dbPath = baseURL
.appendingPathComponent("Rosetta/Database/rosetta_\(normalized).sqlite")
.path
guard FileManager.default.fileExists(atPath: dbPath) else { return nil }
var db: OpaquePointer?
guard sqlite3_open_v2(dbPath, &db, SQLITE_OPEN_READONLY, nil) == SQLITE_OK else { return nil }
defer { sqlite3_close(db) }
var stmt: OpaquePointer?
let sql = "SELECT opponent_title, opponent_username FROM dialogs WHERE opponent_key = ? LIMIT 1"
guard sqlite3_prepare_v2(db, sql, -1, &stmt, nil) == SQLITE_OK else { return nil }
defer { sqlite3_finalize(stmt) }
sqlite3_bind_text(stmt, 1, (callerKey as NSString).utf8String, -1, nil)
guard sqlite3_step(stmt) == SQLITE_ROW else { return nil }
let title = sqlite3_column_text(stmt, 0).map { String(cString: $0) } ?? ""
let username = sqlite3_column_text(stmt, 1).map { String(cString: $0) } ?? ""
return title.isEmpty ? (username.isEmpty ? nil : username) : title
}
// MARK: - Push Payload Helpers (Android parity) // MARK: - Push Payload Helpers (Android parity)
/// Android parity: extract sender public key from multiple possible key names. /// Android parity: extract sender public key from multiple possible key names.
@@ -484,15 +528,40 @@ extension AppDelegate: PKPushRegistryDelegate {
return return
} }
let data = payload.dictionaryPayload let data = payload.dictionaryPayload
let callerKey = data["dialog"] as? String ?? "" Logger.voip.info("VoIP push received: \(data.description, privacy: .public)")
let callerName = data["title"] as? String ?? "Rosetta" // Server sends: { "type": "CALL", "from": "<pubkey>", "callId": "<uuid>" }
// Fallback to "dialog" for backward compat with older server versions.
let callerKey = data["from"] as? String
?? data["dialog"] as? String
?? ""
let callId = data["callId"] as? String
// Resolve caller display name from multiple sources.
let callerName: String = {
// 1. Push payload (if server sends title)
if let title = data["title"] as? String, !title.isEmpty { return title }
// 2. UserDefaults (synced by DialogRepository.syncContactNamesToDefaults)
for defaults in [UserDefaults(suiteName: "group.com.rosetta.dev"), UserDefaults.standard] {
if let names = defaults?.dictionary(forKey: "contact_display_names") as? [String: String],
let name = names[callerKey], !name.isEmpty {
return name
}
}
// 3. SQLite direct read (data persists on disk even when app was killed)
if let creds = SessionCredentialsManager.shared.load() {
let name = Self.resolveCallerNameFromDB(callerKey: callerKey, accountKey: creds.publicKey)
if let name, !name.isEmpty { return name }
}
return "Rosetta"
}()
Logger.voip.info("VoIP resolved: key=\(callerKey.prefix(16), privacy: .public) name=\(callerName, privacy: .public) callId=\(callId ?? "nil", privacy: .public)")
// Apple REQUIREMENT: reportNewIncomingCall MUST be called SYNCHRONOUSLY. // Apple REQUIREMENT: reportNewIncomingCall MUST be called SYNCHRONOUSLY.
// Using Task { @MainActor } would introduce an async hop that may be // Using Task { @MainActor } would introduce an async hop that may be
// delayed if the main actor is busy, causing Apple to terminate the app. // delayed if the main actor is busy, causing Apple to terminate the app.
CallKitManager.shared.reportIncomingCallSynchronously( CallKitManager.shared.reportIncomingCallSynchronously(
callerKey: callerKey.isEmpty ? "unknown" : callerKey, callerKey: callerKey.isEmpty ? "unknown" : callerKey,
callerName: callerName callerName: callerName,
callId: callId
) { error in ) { error in
completion() completion()
@@ -503,11 +572,37 @@ extension AppDelegate: PKPushRegistryDelegate {
return return
} }
// Trigger WebSocket reconnection so the actual .call signal // Restore WebSocket connection so the .call signal packet arrives
// packet arrives and CallManager can handle the call. Without this, the // and CallManager can handle the call. When app was killed, SessionManager
// app wakes from killed state but CallManager stays idle Accept does nothing. // has no credentials in memory load from Keychain (saved during startSession).
Task { @MainActor in Task { @MainActor in
if ProtocolManager.shared.connectionState != .authenticated { // Set up incoming call state from push payload IMMEDIATELY.
// Don't wait for WebSocket .call signal it's fire-and-forget
// and may have been sent before our WebSocket connected.
if !callerKey.isEmpty, CallManager.shared.uiState.phase == .idle {
// Ensure account is bound for acceptIncomingCall()
if CallManager.shared.ownPublicKey.isEmpty,
let creds = SessionCredentialsManager.shared.load() {
CallManager.shared.bindAccount(publicKey: creds.publicKey)
}
CallManager.shared.setupIncomingCallFromPush(
callerKey: callerKey,
callerName: callerName
)
}
// Restore WebSocket so keyExchange can be sent when user accepts.
if ProtocolManager.shared.connectionState == .authenticated {
return
}
if ProtocolManager.shared.publicKey == nil,
let creds = SessionCredentialsManager.shared.load() {
Logger.voip.info("Restoring session from Keychain for VoIP wake-up")
ProtocolManager.shared.connect(
publicKey: creds.publicKey,
privateKeyHash: creds.privateKeyHash
)
} else {
ProtocolManager.shared.forceReconnectOnForeground() ProtocolManager.shared.forceReconnectOnForeground()
} }
} }