Фикс: поднятие контента сообщений синхронно с раскрытием клавиатуры

This commit is contained in:
2026-03-23 19:46:01 +05:00
parent 0b95776968
commit 1cdd392cf3
8 changed files with 231 additions and 45 deletions

View File

@@ -27,7 +27,7 @@ private struct KeyboardSpacer: View {
// Inverted scroll: spacer at VStack START. Growing it pushes
// messages away from offset=0 visually UP. CADisplayLink
// animates keyboardPadding in sync with keyboard curve.
return composerHeight + keyboard.keyboardPadding + 4
return composerHeight + max(keyboard.keyboardPadding, keyboard.spacerPadding) + 8
}
}()
#if DEBUG
@@ -88,6 +88,7 @@ struct ChatDetailView: View {
}
@State private var messageText = ""
@State private var isMultilineInput = false
@State private var sendError: String?
@State private var isViewActive = false
// markReadTask removed read receipts no longer sent from .onChange(of: messages.count)
@@ -1598,6 +1599,11 @@ private extension ChatDetailView {
KeyboardTracker.shared.updateFromKVO(keyboardHeight: height)
},
onUserTextInsertion: handleComposerUserTyping,
onMultilineChange: { multiline in
withAnimation(.easeInOut(duration: 0.2)) {
isMultilineInput = multiline
}
},
textColor: UIColor(RosettaColors.Adaptive.text),
placeholderColor: UIColor(RosettaColors.Adaptive.textSecondary.opacity(0.5))
)
@@ -1652,7 +1658,7 @@ private extension ChatDetailView {
}
.padding(3)
.frame(minHeight: 42, alignment: .bottom)
.background { glass(shape: .rounded(21), strokeOpacity: 0.18) }
.background { glass(shape: .rounded(isMultilineInput ? 16 : 21), strokeOpacity: 0.18) }
.padding(.leading, 6)
Button(action: trailingAction) {

View File

@@ -92,9 +92,9 @@ struct MessageAvatarView: View {
}
}
.task {
loadFromCache()
await loadFromCache()
if avatarImage == nil {
decodeBlurHash()
await decodeBlurHash()
}
}
}
@@ -169,21 +169,25 @@ struct MessageAvatarView: View {
/// Shared static cache with half-eviction (same pattern as MessageImageView).
@MainActor private static var blurHashCache: [String: UIImage] = [:]
private func decodeBlurHash() {
private func decodeBlurHash() async {
let hash = extractBlurHash(from: attachment.preview)
guard !hash.isEmpty else { return }
// Fast path: cache hit (synchronous)
if let cached = Self.blurHashCache[hash] {
blurImage = cached
return
}
if let result = UIImage.fromBlurHash(hash, width: 32, height: 32) {
if Self.blurHashCache.count > 200 {
let keysToRemove = Array(Self.blurHashCache.keys.prefix(100))
for key in keysToRemove { Self.blurHashCache.removeValue(forKey: key) }
}
Self.blurHashCache[hash] = result
blurImage = result
// Slow path: DCT decode off main thread
let result = await Task.detached(priority: .userInitiated) {
UIImage.fromBlurHash(hash, width: 32, height: 32)
}.value
guard !Task.isCancelled, let result else { return }
if Self.blurHashCache.count > 200 {
let keysToRemove = Array(Self.blurHashCache.keys.prefix(100))
for key in keysToRemove { Self.blurHashCache.removeValue(forKey: key) }
}
Self.blurHashCache[hash] = result
blurImage = result
}
/// Extracts the blurhash from preview string.
@@ -195,12 +199,26 @@ struct MessageAvatarView: View {
// MARK: - Download
private func loadFromCache() {
private func loadFromCache() async {
// Fast path: NSCache hit (synchronous, sub-microsecond)
if let cached = AttachmentCache.shared.loadImage(forAttachmentId: attachment.id) {
avatarImage = cached
showAvatar = true // No animation for cached show immediately
return
}
// Slow path: disk I/O + crypto run off main thread with semaphore
// Android parity: ImageLoadSemaphore limits to 3 concurrent decode ops
let attachmentId = attachment.id
await ImageLoadLimiter.shared.acquire()
let loaded = await Task.detached(priority: .userInitiated) {
AttachmentCache.shared.loadImage(forAttachmentId: attachmentId)
}.value
await ImageLoadLimiter.shared.release()
if !Task.isCancelled, let loaded {
avatarImage = loaded
showAvatar = true
return
}
// Outgoing avatar: sender is me load from AvatarRepository (always available locally)
if outgoing {
let myKey = SessionManager.shared.currentPublicKey
@@ -283,22 +301,22 @@ struct MessageAvatarView: View {
return nil
}
/// Parses decrypted data as an image: data URI, plain base64, or raw image bytes.
/// Android parity: `base64ToBitmap()` with subsampling to max 4096px.
private func parseImageData(_ data: Data) -> UIImage? {
if let str = String(data: data, encoding: .utf8) {
if str.hasPrefix("data:"),
let commaIndex = str.firstIndex(of: ",") {
let base64Part = String(str[str.index(after: commaIndex)...])
if let imageData = Data(base64Encoded: base64Part),
let img = UIImage(data: imageData) {
let img = AttachmentCache.downsampledImage(from: imageData) {
return img
}
} else if let imageData = Data(base64Encoded: str),
let img = UIImage(data: imageData) {
let img = AttachmentCache.downsampledImage(from: imageData) {
return img
}
}
return UIImage(data: data)
return AttachmentCache.downsampledImage(from: data)
}
/// Extracts the server tag from preview string.

View File

@@ -66,9 +66,9 @@ struct MessageImageView: View {
}
}
.task {
loadFromCache()
await loadFromCache()
if image == nil {
decodeBlurHash()
await decodeBlurHash()
}
}
.onReceive(NotificationCenter.default.publisher(for: .triggerAttachmentDownload)) { notif in
@@ -205,29 +205,46 @@ struct MessageImageView: View {
@MainActor private static var blurHashCache: [String: UIImage] = [:]
private func decodeBlurHash() {
private func decodeBlurHash() async {
let hash = extractBlurHash(from: attachment.preview)
guard !hash.isEmpty else { return }
// Fast path: cache hit (synchronous)
if let cached = Self.blurHashCache[hash] {
blurImage = cached
return
}
if let result = UIImage.fromBlurHash(hash, width: 32, height: 32) {
if Self.blurHashCache.count > 200 {
let keysToRemove = Array(Self.blurHashCache.keys.prefix(100))
for key in keysToRemove { Self.blurHashCache.removeValue(forKey: key) }
}
Self.blurHashCache[hash] = result
blurImage = result
// Slow path: DCT decode off main thread
let result = await Task.detached(priority: .userInitiated) {
UIImage.fromBlurHash(hash, width: 32, height: 32)
}.value
guard !Task.isCancelled, let result else { return }
if Self.blurHashCache.count > 200 {
let keysToRemove = Array(Self.blurHashCache.keys.prefix(100))
for key in keysToRemove { Self.blurHashCache.removeValue(forKey: key) }
}
Self.blurHashCache[hash] = result
blurImage = result
}
// MARK: - Download
private func loadFromCache() {
private func loadFromCache() async {
PerformanceLogger.shared.track("image.cacheLoad")
// Fast path: NSCache hit (synchronous, sub-microsecond)
if let cached = AttachmentCache.shared.loadImage(forAttachmentId: attachment.id) {
image = cached
return
}
// Slow path: disk I/O + crypto run off main thread with semaphore
// Android parity: ImageLoadSemaphore limits to 3 concurrent decode ops
let attachmentId = attachment.id
await ImageLoadLimiter.shared.acquire()
let loaded = await Task.detached(priority: .userInitiated) {
AttachmentCache.shared.loadImage(forAttachmentId: attachmentId)
}.value
await ImageLoadLimiter.shared.release()
if !Task.isCancelled, let loaded {
image = loaded
}
}
@@ -293,21 +310,22 @@ struct MessageImageView: View {
return nil
}
/// Android parity: `base64ToBitmap()` with subsampling to max 4096px.
private func parseImageData(_ data: Data) -> UIImage? {
if let str = String(data: data, encoding: .utf8) {
if str.hasPrefix("data:"),
let commaIndex = str.firstIndex(of: ",") {
let base64Part = String(str[str.index(after: commaIndex)...])
if let imageData = Data(base64Encoded: base64Part),
let img = UIImage(data: imageData) {
let img = AttachmentCache.downsampledImage(from: imageData) {
return img
}
} else if let imageData = Data(base64Encoded: str),
let img = UIImage(data: imageData) {
let img = AttachmentCache.downsampledImage(from: imageData) {
return img
}
}
return UIImage(data: data)
return AttachmentCache.downsampledImage(from: data)
}
// MARK: - Preview Parsing