feat: добавить состояние PAUSED и функции pause/resume для голосовых сообщений

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-04-11 20:36:51 +05:00
parent 5e5c4c11ac
commit fad8bfb1d1
7 changed files with 1320 additions and 74 deletions

View File

@@ -10,6 +10,7 @@ enum class AttachmentType(val value: Int) {
AVATAR(3), // Аватар пользователя
CALL(4), // Событие звонка (пропущен/принят/завершен)
VOICE(5), // Голосовое сообщение
VIDEO_CIRCLE(6), // Видео-кружок (video note)
UNKNOWN(-1); // Неизвестный тип
companion object {

View File

@@ -3705,16 +3705,32 @@ fun ChatDetailScreen(
onMediaSelected = { selectedMedia, caption ->
val imageUris =
selectedMedia.filter { !it.isVideo }.map { it.uri }
if (imageUris.isNotEmpty()) {
val videoUris =
selectedMedia.filter { it.isVideo }.map { it.uri }
if (imageUris.isNotEmpty() || videoUris.isNotEmpty()) {
showMediaPicker = false
inputFocusTrigger++
viewModel.sendImageGroupFromUris(imageUris, caption)
if (imageUris.isNotEmpty()) {
viewModel.sendImageGroupFromUris(
imageUris,
caption
)
}
if (videoUris.isNotEmpty()) {
videoUris.forEach { uri ->
viewModel.sendVideoCircleFromUri(uri)
}
}
}
},
onMediaSelectedWithCaption = { mediaItem, caption ->
showMediaPicker = false
inputFocusTrigger++
if (mediaItem.isVideo) {
viewModel.sendVideoCircleFromUri(mediaItem.uri)
} else {
viewModel.sendImageFromUri(mediaItem.uri, caption)
}
},
onOpenCamera = {
val imm =
@@ -3806,16 +3822,32 @@ fun ChatDetailScreen(
onMediaSelected = { selectedMedia, caption ->
val imageUris =
selectedMedia.filter { !it.isVideo }.map { it.uri }
if (imageUris.isNotEmpty()) {
val videoUris =
selectedMedia.filter { it.isVideo }.map { it.uri }
if (imageUris.isNotEmpty() || videoUris.isNotEmpty()) {
showMediaPicker = false
inputFocusTrigger++
viewModel.sendImageGroupFromUris(imageUris, caption)
if (imageUris.isNotEmpty()) {
viewModel.sendImageGroupFromUris(
imageUris,
caption
)
}
if (videoUris.isNotEmpty()) {
videoUris.forEach { uri ->
viewModel.sendVideoCircleFromUri(uri)
}
}
}
},
onMediaSelectedWithCaption = { mediaItem, caption ->
showMediaPicker = false
inputFocusTrigger++
if (mediaItem.isVideo) {
viewModel.sendVideoCircleFromUri(mediaItem.uri)
} else {
viewModel.sendImageFromUri(mediaItem.uri, caption)
}
},
onOpenCamera = {
val imm =

View File

@@ -3,7 +3,9 @@ package com.rosetta.messenger.ui.chats
import android.app.Application
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.MediaMetadataRetriever
import android.util.Base64
import android.webkit.MimeTypeMap
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import com.rosetta.messenger.crypto.CryptoManager
@@ -656,7 +658,8 @@ class ChatViewModel(application: Application) : AndroidViewModel(application) {
when (parseAttachmentType(attachment)) {
AttachmentType.IMAGE,
AttachmentType.FILE,
AttachmentType.AVATAR -> {
AttachmentType.AVATAR,
AttachmentType.VIDEO_CIRCLE -> {
hasMediaAttachment = true
if (attachment.optString("localUri", "").isNotBlank()) {
// Локальный URI ещё есть => загрузка/подготовка не завершена.
@@ -1626,6 +1629,8 @@ class ChatViewModel(application: Application) : AndroidViewModel(application) {
"avatar" -> AttachmentType.AVATAR.value
"call" -> AttachmentType.CALL.value
"voice" -> AttachmentType.VOICE.value
"video_circle", "videocircle", "video_note", "videonote", "round_video", "videoround", "video" ->
AttachmentType.VIDEO_CIRCLE.value
else -> -1
}
}
@@ -1796,7 +1801,8 @@ class ChatViewModel(application: Application) : AndroidViewModel(application) {
// 💾 Для IMAGE/AVATAR/VOICE - пробуем загрузить blob из файла если пустой
if ((effectiveType == AttachmentType.IMAGE ||
effectiveType == AttachmentType.AVATAR ||
effectiveType == AttachmentType.VOICE) &&
effectiveType == AttachmentType.VOICE ||
effectiveType == AttachmentType.VIDEO_CIRCLE) &&
blob.isEmpty() &&
attachmentId.isNotEmpty()
) {
@@ -2569,6 +2575,7 @@ class ChatViewModel(application: Application) : AndroidViewModel(application) {
message.attachments.any { it.type == AttachmentType.AVATAR } -> "Avatar"
message.attachments.any { it.type == AttachmentType.CALL } -> "Call"
message.attachments.any { it.type == AttachmentType.VOICE } -> "Voice message"
message.attachments.any { it.type == AttachmentType.VIDEO_CIRCLE } -> "Video message"
message.forwardedMessages.isNotEmpty() -> "Forwarded message"
message.replyData != null -> "Reply"
else -> "Pinned message"
@@ -4809,6 +4816,344 @@ class ChatViewModel(application: Application) : AndroidViewModel(application) {
}
}
private data class VideoCircleMeta(
val durationSec: Int,
val width: Int,
val height: Int,
val mimeType: String
)
private fun bytesToHex(bytes: ByteArray): String {
val hexChars = "0123456789abcdef".toCharArray()
val output = CharArray(bytes.size * 2)
var index = 0
bytes.forEach { byte ->
val value = byte.toInt() and 0xFF
output[index++] = hexChars[value ushr 4]
output[index++] = hexChars[value and 0x0F]
}
return String(output)
}
private fun resolveVideoCircleMeta(
application: Application,
videoUri: android.net.Uri
): VideoCircleMeta {
var durationSec = 1
var width = 0
var height = 0
val mimeType =
application.contentResolver.getType(videoUri)?.trim().orEmpty().ifBlank {
val ext =
MimeTypeMap.getFileExtensionFromUrl(videoUri.toString())
?.lowercase(Locale.ROOT)
?: ""
MimeTypeMap.getSingleton().getMimeTypeFromExtension(ext) ?: "video/mp4"
}
runCatching {
val retriever = MediaMetadataRetriever()
retriever.setDataSource(application, videoUri)
val durationMs =
retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_DURATION
)
?.toLongOrNull()
?: 0L
val rawWidth =
retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH
)
?.toIntOrNull()
?: 0
val rawHeight =
retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT
)
?.toIntOrNull()
?: 0
val rotation =
retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION
)
?.toIntOrNull()
?: 0
retriever.release()
durationSec = ((durationMs + 999L) / 1000L).toInt().coerceAtLeast(1)
val rotated = rotation == 90 || rotation == 270
width = if (rotated) rawHeight else rawWidth
height = if (rotated) rawWidth else rawHeight
}
return VideoCircleMeta(
durationSec = durationSec,
width = width.coerceAtLeast(0),
height = height.coerceAtLeast(0),
mimeType = mimeType
)
}
private suspend fun encodeVideoUriToHex(
application: Application,
videoUri: android.net.Uri
): String? {
return withContext(Dispatchers.IO) {
runCatching {
application.contentResolver.openInputStream(videoUri)?.use { stream ->
val bytes = stream.readBytes()
if (bytes.isEmpty()) null else bytesToHex(bytes)
}
}.getOrNull()
}
}
/**
* 🎥 Отправка видео-кружка (video note) из URI.
* Использует такой же transport + шифрование пайплайн, как voice attachment.
*/
fun sendVideoCircleFromUri(videoUri: android.net.Uri) {
val recipient = opponentKey
val sender = myPublicKey
val privateKey = myPrivateKey
val context = getApplication<Application>()
if (recipient == null || sender == null || privateKey == null) {
return
}
if (isSending) {
return
}
val fileSize = runCatching { com.rosetta.messenger.utils.MediaUtils.getFileSize(context, videoUri) }
.getOrDefault(0L)
val maxBytes = com.rosetta.messenger.utils.MediaUtils.MAX_FILE_SIZE_MB * 1024L * 1024L
if (fileSize > 0L && fileSize > maxBytes) {
return
}
isSending = true
val messageId = UUID.randomUUID().toString().replace("-", "").take(32)
val timestamp = System.currentTimeMillis()
val attachmentId = "video_circle_$timestamp"
val meta = resolveVideoCircleMeta(context, videoUri)
val preview = "${meta.durationSec}::${meta.mimeType}"
val optimisticMessage =
ChatMessage(
id = messageId,
text = "",
isOutgoing = true,
timestamp = Date(timestamp),
status = MessageStatus.SENDING,
attachments =
listOf(
MessageAttachment(
id = attachmentId,
blob = "",
type = AttachmentType.VIDEO_CIRCLE,
preview = preview,
width = meta.width,
height = meta.height,
localUri = videoUri.toString()
)
)
)
addMessageSafely(optimisticMessage)
_inputText.value = ""
backgroundUploadScope.launch {
try {
val optimisticAttachmentsJson =
JSONArray()
.apply {
put(
JSONObject().apply {
put("id", attachmentId)
put("type", AttachmentType.VIDEO_CIRCLE.value)
put("preview", preview)
put("blob", "")
put("width", meta.width)
put("height", meta.height)
put("localUri", videoUri.toString())
}
)
}
.toString()
saveMessageToDatabase(
messageId = messageId,
text = "",
encryptedContent = "",
encryptedKey = "",
timestamp = timestamp,
isFromMe = true,
delivered = 0,
attachmentsJson = optimisticAttachmentsJson,
opponentPublicKey = recipient
)
saveDialog("Video message", timestamp, opponentPublicKey = recipient)
} catch (_: Exception) {
}
try {
val videoHex = encodeVideoUriToHex(context, videoUri)
if (videoHex.isNullOrBlank()) {
withContext(Dispatchers.Main) {
updateMessageStatus(messageId, MessageStatus.ERROR)
}
return@launch
}
sendVideoCircleMessageInternal(
messageId = messageId,
attachmentId = attachmentId,
timestamp = timestamp,
videoHex = videoHex,
preview = preview,
width = meta.width,
height = meta.height,
recipient = recipient,
sender = sender,
privateKey = privateKey
)
} catch (_: Exception) {
withContext(Dispatchers.Main) {
updateMessageStatus(messageId, MessageStatus.ERROR)
}
updateMessageStatusInDb(messageId, DeliveryStatus.ERROR.value)
} finally {
isSending = false
}
}
}
private suspend fun sendVideoCircleMessageInternal(
messageId: String,
attachmentId: String,
timestamp: Long,
videoHex: String,
preview: String,
width: Int,
height: Int,
recipient: String,
sender: String,
privateKey: String
) {
var packetSentToProtocol = false
try {
val application = getApplication<Application>()
val encryptionContext =
buildEncryptionContext(
plaintext = "",
recipient = recipient,
privateKey = privateKey
) ?: throw IllegalStateException("Cannot resolve chat encryption context")
val encryptedContent = encryptionContext.encryptedContent
val encryptedKey = encryptionContext.encryptedKey
val aesChachaKey = encryptionContext.aesChachaKey
val privateKeyHash = CryptoManager.generatePrivateKeyHash(privateKey)
val encryptedVideoBlob = encryptAttachmentPayload(videoHex, encryptionContext)
val isSavedMessages = (sender == recipient)
val uploadTag =
if (!isSavedMessages) {
TransportManager.uploadFile(attachmentId, encryptedVideoBlob)
} else {
""
}
val attachmentTransportServer =
if (uploadTag.isNotEmpty()) {
TransportManager.getTransportServer().orEmpty()
} else {
""
}
val videoAttachment =
MessageAttachment(
id = attachmentId,
blob = "",
type = AttachmentType.VIDEO_CIRCLE,
preview = preview,
width = width,
height = height,
transportTag = uploadTag,
transportServer = attachmentTransportServer
)
val packet =
PacketMessage().apply {
fromPublicKey = sender
toPublicKey = recipient
content = encryptedContent
chachaKey = encryptedKey
this.aesChachaKey = aesChachaKey
this.timestamp = timestamp
this.privateKey = privateKeyHash
this.messageId = messageId
attachments = listOf(videoAttachment)
}
if (!isSavedMessages) {
ProtocolManager.send(packet)
packetSentToProtocol = true
}
runCatching {
AttachmentFileManager.saveAttachment(
context = application,
blob = videoHex,
attachmentId = attachmentId,
publicKey = sender,
privateKey = privateKey
)
}
val attachmentsJson =
JSONArray()
.apply {
put(
JSONObject().apply {
put("id", attachmentId)
put("type", AttachmentType.VIDEO_CIRCLE.value)
put("preview", preview)
put("blob", "")
put("width", width)
put("height", height)
put("transportTag", uploadTag)
put("transportServer", attachmentTransportServer)
}
)
}
.toString()
updateMessageStatusAndAttachmentsInDb(
messageId = messageId,
delivered = if (isSavedMessages) 1 else 0,
attachmentsJson = attachmentsJson
)
withContext(Dispatchers.Main) {
updateMessageStatus(messageId, MessageStatus.SENT)
updateMessageAttachments(messageId, null)
}
saveDialog("Video message", timestamp, opponentPublicKey = recipient)
} catch (_: Exception) {
if (packetSentToProtocol) {
withContext(Dispatchers.Main) {
updateMessageStatus(messageId, MessageStatus.SENT)
}
} else {
withContext(Dispatchers.Main) {
updateMessageStatus(messageId, MessageStatus.ERROR)
}
}
}
}
/**
* 🎙️ Отправка голосового сообщения.
* blob хранится как HEX строка opus/webm байт (desktop parity).

View File

@@ -584,6 +584,7 @@ class ChatsListViewModel(application: Application) : AndroidViewModel(applicatio
3 -> "Avatar" // AttachmentType.AVATAR = 3
4 -> "Call" // AttachmentType.CALL = 4
5 -> "Voice message" // AttachmentType.VOICE = 5
6 -> "Video message" // AttachmentType.VIDEO_CIRCLE = 6
else -> if (inferredCall) "Call" else null
}
}
@@ -607,6 +608,7 @@ class ChatsListViewModel(application: Application) : AndroidViewModel(applicatio
"avatar" -> 3
"call" -> 4
"voice" -> 5
"video_circle", "videocircle", "video_note", "videonote", "round_video", "videoround", "video" -> 6
else -> -1
}
}

View File

@@ -6,10 +6,12 @@ import android.graphics.BitmapFactory
import android.graphics.Matrix
import android.media.AudioAttributes
import android.media.MediaPlayer
import android.net.Uri
import android.os.SystemClock
import android.util.Base64
import android.util.LruCache
import android.webkit.MimeTypeMap
import android.widget.VideoView
import androidx.compose.animation.core.Animatable
import androidx.compose.animation.core.LinearEasing
import androidx.compose.animation.core.RepeatMode
@@ -58,6 +60,7 @@ import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import androidx.compose.ui.viewinterop.AndroidView
import androidx.exifinterface.media.ExifInterface
import com.rosetta.messenger.R
import com.rosetta.messenger.crypto.CryptoManager
@@ -686,6 +689,19 @@ fun MessageAttachments(
messageStatus = messageStatus
)
}
AttachmentType.VIDEO_CIRCLE -> {
VideoCircleAttachment(
attachment = attachment,
chachaKey = chachaKey,
chachaKeyPlainHex = chachaKeyPlainHex,
privateKey = privateKey,
senderPublicKey = senderPublicKey,
isOutgoing = isOutgoing,
isDarkTheme = isDarkTheme,
timestamp = timestamp,
messageStatus = messageStatus
)
}
else -> {
// Desktop parity: unsupported/legacy attachment gets explicit compatibility card.
LegacyAttachmentErrorCard(isDarkTheme = isDarkTheme)
@@ -1808,6 +1824,50 @@ private fun parseVoicePreview(preview: String): Pair<Int, List<Float>> {
return duration to waves
}
private data class VideoCirclePreviewMeta(
val durationSec: Int,
val mimeType: String
)
private fun parseVideoCirclePreview(preview: String): VideoCirclePreviewMeta {
if (preview.isBlank()) {
return VideoCirclePreviewMeta(durationSec = 1, mimeType = "video/mp4")
}
val durationPart = preview.substringBefore("::", preview).trim()
val mimePart = preview.substringAfter("::", "").trim()
val duration = durationPart.toIntOrNull()?.coerceAtLeast(1) ?: 1
val mime =
if (mimePart.contains("/")) {
mimePart
} else {
"video/mp4"
}
return VideoCirclePreviewMeta(durationSec = duration, mimeType = mime)
}
private fun decodeVideoCirclePayload(data: String): ByteArray? {
return decodeHexPayload(data) ?: decodeBase64Payload(data)
}
private fun ensureVideoCirclePlaybackUri(
context: android.content.Context,
attachmentId: String,
payload: String,
mimeType: String,
localUri: String = ""
): Uri? {
if (localUri.isNotBlank()) {
return runCatching { Uri.parse(localUri) }.getOrNull()
}
val bytes = decodeVideoCirclePayload(payload) ?: return null
val extension =
MimeTypeMap.getSingleton().getExtensionFromMimeType(mimeType)?.ifBlank { null } ?: "mp4"
val directory = File(context.cacheDir, "video_circles").apply { mkdirs() }
val file = File(directory, "$attachmentId.$extension")
runCatching { file.writeBytes(bytes) }.getOrNull() ?: return null
return Uri.fromFile(file)
}
private fun normalizeVoiceWaves(source: List<Float>, targetLength: Int): List<Float> {
if (targetLength <= 0) return emptyList()
if (source.isEmpty()) return List(targetLength) { 0f }
@@ -2320,6 +2380,373 @@ private fun VoiceAttachment(
}
}
@Composable
private fun VideoCircleAttachment(
attachment: MessageAttachment,
chachaKey: String,
chachaKeyPlainHex: String,
privateKey: String,
senderPublicKey: String,
isOutgoing: Boolean,
isDarkTheme: Boolean,
timestamp: java.util.Date,
messageStatus: MessageStatus = MessageStatus.READ
) {
val context = LocalContext.current
val scope = rememberCoroutineScope()
val previewMeta = remember(attachment.preview) { parseVideoCirclePreview(getPreview(attachment)) }
val fallbackDurationMs = previewMeta.durationSec.coerceAtLeast(1) * 1000
var payload by
remember(attachment.id, attachment.blob) {
mutableStateOf(attachment.blob.trim())
}
var downloadStatus by
remember(attachment.id, attachment.blob, attachment.transportTag, attachment.localUri) {
mutableStateOf(
when {
attachment.localUri.isNotBlank() -> DownloadStatus.DOWNLOADED
attachment.blob.isNotBlank() -> DownloadStatus.DOWNLOADED
attachment.transportTag.isNotBlank() -> DownloadStatus.NOT_DOWNLOADED
else -> DownloadStatus.ERROR
}
)
}
var errorText by remember { mutableStateOf("") }
var playbackUri by remember(attachment.id, attachment.localUri) {
mutableStateOf(
runCatching {
if (attachment.localUri.isNotBlank()) Uri.parse(attachment.localUri) else null
}.getOrNull()
)
}
var boundUri by remember(attachment.id) { mutableStateOf<String?>(null) }
var isPrepared by remember(attachment.id) { mutableStateOf(false) }
var isPlaying by remember(attachment.id) { mutableStateOf(false) }
var playbackPositionMs by remember(attachment.id) { mutableIntStateOf(0) }
var playbackDurationMs by remember(attachment.id) { mutableIntStateOf(fallbackDurationMs) }
var videoViewRef by remember(attachment.id) { mutableStateOf<VideoView?>(null) }
LaunchedEffect(payload, attachment.localUri, attachment.id, previewMeta.mimeType) {
if (playbackUri != null) return@LaunchedEffect
if (attachment.localUri.isNotBlank()) {
playbackUri = runCatching { Uri.parse(attachment.localUri) }.getOrNull()
return@LaunchedEffect
}
if (payload.isBlank()) return@LaunchedEffect
val prepared =
ensureVideoCirclePlaybackUri(
context = context,
attachmentId = attachment.id,
payload = payload,
mimeType = previewMeta.mimeType
)
if (prepared != null) {
playbackUri = prepared
if (downloadStatus != DownloadStatus.DOWNLOADING &&
downloadStatus != DownloadStatus.DECRYPTING
) {
downloadStatus = DownloadStatus.DOWNLOADED
}
if (errorText.isNotBlank()) errorText = ""
} else {
downloadStatus = DownloadStatus.ERROR
if (errorText.isBlank()) errorText = "Cannot decode video"
}
}
LaunchedEffect(isPlaying, videoViewRef) {
val player = videoViewRef ?: return@LaunchedEffect
while (isPlaying) {
playbackPositionMs = runCatching { player.currentPosition }.getOrDefault(0).coerceAtLeast(0)
delay(120)
}
}
DisposableEffect(attachment.id) {
onDispose {
runCatching {
videoViewRef?.stopPlayback()
}
videoViewRef = null
isPlaying = false
isPrepared = false
boundUri = null
}
}
val triggerDownload: () -> Unit = download@{
if (attachment.transportTag.isBlank()) {
downloadStatus = DownloadStatus.ERROR
errorText = "Video is not available"
return@download
}
scope.launch {
downloadStatus = DownloadStatus.DOWNLOADING
errorText = ""
val decrypted =
downloadAndDecryptVoicePayload(
attachmentId = attachment.id,
downloadTag = attachment.transportTag,
chachaKey = chachaKey,
privateKey = privateKey,
transportServer = attachment.transportServer,
chachaKeyPlainHex = chachaKeyPlainHex
)
if (decrypted.isNullOrBlank()) {
downloadStatus = DownloadStatus.ERROR
errorText = "Failed to decrypt"
return@launch
}
val saved =
runCatching {
AttachmentFileManager.saveAttachment(
context = context,
blob = decrypted,
attachmentId = attachment.id,
publicKey = senderPublicKey,
privateKey = privateKey
)
}
.getOrDefault(false)
payload = decrypted
playbackUri =
ensureVideoCirclePlaybackUri(
context = context,
attachmentId = attachment.id,
payload = decrypted,
mimeType = previewMeta.mimeType
)
if (!saved) {
runCatching { android.util.Log.w(TAG, "Video circle cache save failed: ${attachment.id}") }
}
if (playbackUri == null) {
downloadStatus = DownloadStatus.ERROR
errorText = "Cannot decode video"
} else {
downloadStatus = DownloadStatus.DOWNLOADED
}
}
}
val onMainAction: () -> Unit = {
when (downloadStatus) {
DownloadStatus.NOT_DOWNLOADED, DownloadStatus.ERROR -> triggerDownload()
DownloadStatus.DOWNLOADING, DownloadStatus.DECRYPTING -> Unit
DownloadStatus.DOWNLOADED, DownloadStatus.PENDING -> {
if (playbackUri == null) {
triggerDownload()
} else {
isPlaying = !isPlaying
}
}
}
}
val durationToShowSec =
if (isPrepared && playbackDurationMs > 0) {
(playbackDurationMs / 1000).coerceAtLeast(1)
} else {
previewMeta.durationSec.coerceAtLeast(1)
}
val secondaryTextColor =
if (isOutgoing) Color.White.copy(alpha = 0.82f)
else if (isDarkTheme) Color(0xFFCCD3E0)
else Color(0xFF5F6D82)
Box(
modifier =
Modifier.padding(vertical = 4.dp)
.size(220.dp)
.clip(CircleShape)
.background(
if (isOutgoing) {
Color(0xFF3A9DFB)
} else if (isDarkTheme) {
Color(0xFF22252B)
} else {
Color(0xFFE8EEF7)
}
)
.clickable(
interactionSource = remember { MutableInteractionSource() },
indication = null
) { onMainAction() }
) {
val uri = playbackUri
if (uri != null && (downloadStatus == DownloadStatus.DOWNLOADED || downloadStatus == DownloadStatus.PENDING)) {
AndroidView(
factory = { ctx -> VideoView(ctx) },
modifier = Modifier.fillMaxSize(),
update = { videoView ->
videoViewRef = videoView
val targetUri = uri.toString()
if (boundUri != targetUri) {
boundUri = targetUri
isPrepared = false
playbackPositionMs = 0
runCatching {
videoView.setVideoURI(uri)
videoView.setOnPreparedListener { mediaPlayer ->
mediaPlayer.isLooping = false
playbackDurationMs =
mediaPlayer.duration
.coerceAtLeast(fallbackDurationMs)
isPrepared = true
if (isPlaying) {
runCatching { videoView.start() }
}
}
videoView.setOnCompletionListener {
isPlaying = false
playbackPositionMs = playbackDurationMs
}
}
}
if (isPrepared) {
if (isPlaying && !videoView.isPlaying) {
runCatching { videoView.start() }
} else if (!isPlaying && videoView.isPlaying) {
runCatching { videoView.pause() }
}
}
}
)
} else {
Box(
modifier =
Modifier.fillMaxSize()
.background(
Brush.radialGradient(
colors =
if (isOutgoing) {
listOf(
Color(0x6637A7FF),
Color(0x3337A7FF),
Color(0x0037A7FF)
)
} else if (isDarkTheme) {
listOf(
Color(0x553A4150),
Color(0x33262C39),
Color(0x00262C39)
)
} else {
listOf(
Color(0x5593B4E8),
Color(0x338AB0E5),
Color(0x008AB0E5)
)
}
)
)
)
}
Box(
modifier =
Modifier.align(Alignment.Center)
.size(52.dp)
.clip(CircleShape)
.background(Color.Black.copy(alpha = 0.38f)),
contentAlignment = Alignment.Center
) {
when (downloadStatus) {
DownloadStatus.DOWNLOADING, DownloadStatus.DECRYPTING -> {
CircularProgressIndicator(
modifier = Modifier.size(26.dp),
color = Color.White,
strokeWidth = 2.2.dp
)
}
DownloadStatus.NOT_DOWNLOADED -> {
Icon(
painter = painterResource(R.drawable.msg_download),
contentDescription = null,
tint = Color.White,
modifier = Modifier.size(24.dp)
)
}
DownloadStatus.ERROR -> {
Icon(
imageVector = Icons.Default.Close,
contentDescription = null,
tint = Color(0xFFFF8A8A),
modifier = Modifier.size(24.dp)
)
}
else -> {
Icon(
imageVector = if (isPlaying) Icons.Default.Pause else Icons.Default.PlayArrow,
contentDescription = null,
tint = Color.White,
modifier = Modifier.size(28.dp)
)
}
}
}
Row(
modifier =
Modifier.align(Alignment.BottomEnd)
.padding(end = 10.dp, bottom = 8.dp),
verticalAlignment = Alignment.CenterVertically,
horizontalArrangement = Arrangement.spacedBy(4.dp)
) {
Text(
text = formatVoiceDuration(durationToShowSec),
fontSize = 11.sp,
color = secondaryTextColor
)
if (isOutgoing) {
when (messageStatus) {
MessageStatus.SENDING -> {
Icon(
painter = TelegramIcons.Clock,
contentDescription = null,
tint = secondaryTextColor,
modifier = Modifier.size(14.dp)
)
}
MessageStatus.SENT,
MessageStatus.DELIVERED -> {
Icon(
painter = TelegramIcons.Done,
contentDescription = null,
tint = secondaryTextColor,
modifier = Modifier.size(14.dp)
)
}
MessageStatus.READ -> {
Box(modifier = Modifier.height(14.dp)) {
Icon(
painter = TelegramIcons.Done,
contentDescription = null,
tint = secondaryTextColor,
modifier = Modifier.size(14.dp)
)
Icon(
painter = TelegramIcons.Done,
contentDescription = null,
tint = secondaryTextColor,
modifier = Modifier.size(14.dp).offset(x = 4.dp)
)
}
}
MessageStatus.ERROR -> {
Icon(
imageVector = Icons.Default.Error,
contentDescription = null,
tint = Color(0xFFE55757),
modifier = Modifier.size(14.dp)
)
}
}
}
}
}
}
/** File attachment - Telegram style */
@Composable
fun FileAttachment(

View File

@@ -684,7 +684,18 @@ fun MessageBubble(
message.attachments.all {
it.type ==
com.rosetta.messenger.network.AttachmentType
.IMAGE
.IMAGE ||
it.type ==
com.rosetta.messenger.network
.AttachmentType
.VIDEO_CIRCLE
}
val hasOnlyVideoCircle =
hasOnlyMedia &&
message.attachments.all {
it.type ==
com.rosetta.messenger.network.AttachmentType
.VIDEO_CIRCLE
}
// Фото + caption (как в Telegram)
@@ -725,7 +736,8 @@ fun MessageBubble(
hasImageWithCaption -> PaddingValues(0.dp)
else -> PaddingValues(horizontal = 10.dp, vertical = 8.dp)
}
val bubbleBorderWidth = if (hasOnlyMedia) 1.dp else 0.dp
val bubbleBorderWidth =
if (hasOnlyMedia && !hasOnlyVideoCircle) 1.dp else 0.dp
// Telegram-style: ширина пузырька = ширина фото
// Caption переносится на новые строки, не расширяя пузырёк
@@ -743,7 +755,9 @@ fun MessageBubble(
// Вычисляем ширину фото для ограничения пузырька
val photoWidth =
if (hasImageWithCaption || hasOnlyMedia) {
if (isImageCollage) {
if (hasOnlyVideoCircle) {
220.dp
} else if (isImageCollage) {
maxCollageWidth
} else {
val firstImage =

View File

@@ -11,12 +11,15 @@ import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Close
import androidx.compose.material.icons.filled.Mic
import androidx.compose.material.icons.filled.Videocam
import androidx.compose.animation.*
import androidx.compose.animation.core.*
import androidx.compose.foundation.Canvas
import androidx.compose.foundation.background
import androidx.compose.foundation.border
import androidx.compose.foundation.clickable
import androidx.compose.foundation.gestures.awaitEachGesture
import androidx.compose.foundation.gestures.awaitFirstDown
import androidx.compose.foundation.interaction.MutableInteractionSource
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
@@ -28,6 +31,7 @@ import androidx.compose.material3.*
import androidx.compose.ui.draw.alpha
import com.rosetta.messenger.ui.icons.TelegramIcons
import androidx.compose.runtime.*
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.snapshotFlow
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
@@ -36,9 +40,12 @@ import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.TransformOrigin
import androidx.compose.ui.graphics.graphicsLayer
import androidx.compose.ui.input.pointer.pointerInput
import androidx.compose.foundation.gestures.detectTapGestures
import androidx.compose.ui.input.pointer.changedToUpIgnoreConsumed
import androidx.compose.ui.input.pointer.pointerInput
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.layout.onGloballyPositioned
import androidx.compose.ui.layout.positionInWindow
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.platform.LocalDensity
import androidx.compose.ui.platform.LocalFocusManager
@@ -73,6 +80,7 @@ import com.rosetta.messenger.ui.chats.ChatViewModel
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.distinctUntilChanged
import kotlinx.coroutines.launch
import kotlinx.coroutines.Job
import java.io.File
import java.util.Locale
import java.util.UUID
@@ -142,6 +150,19 @@ private fun formatVoiceRecordTimer(elapsedMs: Long): String {
return "$minutes:$seconds,$tenths"
}
private enum class RecordMode {
VOICE,
VIDEO
}
private enum class RecordUiState {
IDLE,
PRESSING,
RECORDING,
LOCKED,
PAUSED
}
@Composable
private fun RecordBlinkDot(
isDarkTheme: Boolean,
@@ -544,15 +565,93 @@ fun MessageInputBar(
var voiceRecorder by remember { mutableStateOf<MediaRecorder?>(null) }
var voiceOutputFile by remember { mutableStateOf<File?>(null) }
var isVoiceRecording by remember { mutableStateOf(false) }
var isVoiceRecordTransitioning by remember { mutableStateOf(false) }
var recordMode by rememberSaveable { mutableStateOf(RecordMode.VOICE) }
var recordUiState by remember { mutableStateOf(RecordUiState.IDLE) }
var pressStartX by remember { mutableFloatStateOf(0f) }
var pressStartY by remember { mutableFloatStateOf(0f) }
var slideDx by remember { mutableFloatStateOf(0f) }
var slideDy by remember { mutableFloatStateOf(0f) }
var pendingLongPressJob by remember { mutableStateOf<Job?>(null) }
var pendingRecordAfterPermission by remember { mutableStateOf(false) }
var voiceRecordStartedAtMs by remember { mutableLongStateOf(0L) }
var voiceElapsedMs by remember { mutableLongStateOf(0L) }
var voiceWaves by remember { mutableStateOf<List<Float>>(emptyList()) }
var isVoicePaused by remember { mutableStateOf(false) }
var voicePausedElapsedMs by remember { mutableLongStateOf(0L) }
var inputPanelHeightPx by remember { mutableIntStateOf(0) }
var inputPanelY by remember { mutableFloatStateOf(0f) }
var normalInputRowHeightPx by remember { mutableIntStateOf(0) }
var normalInputRowY by remember { mutableFloatStateOf(0f) }
var recordingInputRowHeightPx by remember { mutableIntStateOf(0) }
var recordingInputRowY by remember { mutableFloatStateOf(0f) }
fun inputJumpLog(msg: String) {
try {
val ts = java.text.SimpleDateFormat("HH:mm:ss.SSS", java.util.Locale.getDefault())
.format(java.util.Date())
val dir = java.io.File(context.filesDir, "crash_reports")
if (!dir.exists()) dir.mkdirs()
val line = "$ts [InputJump] $msg\n"
// Write newest records to TOP so they are immediately visible in Crash Details preview.
fun writeNewestFirst(file: java.io.File, maxChars: Int = 220_000) {
val existing = if (file.exists()) runCatching { file.readText() }.getOrDefault("") else ""
file.writeText(line + existing.take(maxChars))
}
writeNewestFirst(java.io.File(dir, "rosettadev1.txt"))
writeNewestFirst(java.io.File(dir, "rosettadev1_input.txt"))
} catch (_: Exception) {}
}
fun inputHeightsSnapshot(): String {
val panelDp = with(density) { inputPanelHeightPx.toDp().value.toInt() }
val normalDp = with(density) { normalInputRowHeightPx.toDp().value.toInt() }
val recDp = with(density) { recordingInputRowHeightPx.toDp().value.toInt() }
return "panel=${inputPanelHeightPx}px(${panelDp}dp) normal=${normalInputRowHeightPx}px(${normalDp}dp) rec=${recordingInputRowHeightPx}px(${recDp}dp)"
}
fun setRecordUiState(newState: RecordUiState, reason: String) {
if (recordUiState == newState) return
val oldState = recordUiState
recordUiState = newState
inputJumpLog("recordState $oldState -> $newState reason=$reason mode=$recordMode")
}
fun resetGestureState() {
slideDx = 0f
slideDy = 0f
pressStartX = 0f
pressStartY = 0f
pendingLongPressJob?.cancel()
pendingLongPressJob = null
}
fun toggleRecordModeByTap() {
recordMode = if (recordMode == RecordMode.VOICE) RecordMode.VIDEO else RecordMode.VOICE
inputJumpLog("recordMode toggled -> $recordMode (short tap)")
}
val shouldPinBottomForInput =
isKeyboardVisible ||
coordinator.isEmojiBoxVisible ||
isVoiceRecordTransitioning ||
recordUiState == RecordUiState.PRESSING ||
recordUiState == RecordUiState.PAUSED
val shouldAddNavBarPadding = hasNativeNavigationBar && !shouldPinBottomForInput
fun stopVoiceRecording(send: Boolean) {
isVoiceRecordTransitioning = false
inputJumpLog(
"stopVoiceRecording begin send=$send mode=$recordMode state=$recordUiState voice=$isVoiceRecording kb=$isKeyboardVisible " +
"emojiBox=${coordinator.isEmojiBoxVisible} panelH=$inputPanelHeightPx " +
"normalH=$normalInputRowHeightPx recH=$recordingInputRowHeightPx"
)
val recorder = voiceRecorder
val outputFile = voiceOutputFile
val elapsedSnapshot =
if (voiceRecordStartedAtMs > 0L) {
if (isVoicePaused && voicePausedElapsedMs > 0L) {
voicePausedElapsedMs
} else if (voiceRecordStartedAtMs > 0L) {
maxOf(voiceElapsedMs, System.currentTimeMillis() - voiceRecordStartedAtMs)
} else {
voiceElapsedMs
@@ -563,6 +662,8 @@ fun MessageInputBar(
voiceRecorder = null
voiceOutputFile = null
isVoiceRecording = false
isVoicePaused = false
voicePausedElapsedMs = 0L
voiceRecordStartedAtMs = 0L
voiceElapsedMs = 0L
voiceWaves = emptyList()
@@ -589,10 +690,21 @@ fun MessageInputBar(
}
}
runCatching { outputFile?.delete() }
resetGestureState()
setRecordUiState(RecordUiState.IDLE, "stop(send=$send)")
inputJumpLog(
"stopVoiceRecording end send=$send mode=$recordMode state=$recordUiState voice=$isVoiceRecording kb=$isKeyboardVisible " +
"emojiBox=${coordinator.isEmojiBoxVisible} panelH=$inputPanelHeightPx " +
"normalH=$normalInputRowHeightPx recH=$recordingInputRowHeightPx"
)
}
fun startVoiceRecording() {
if (isVoiceRecording) return
inputJumpLog(
"startVoiceRecording begin mode=$recordMode state=$recordUiState kb=$isKeyboardVisible emojiBox=${coordinator.isEmojiBoxVisible} " +
"emojiPicker=$showEmojiPicker panelH=$inputPanelHeightPx normalH=$normalInputRowHeightPx"
)
try {
val voiceDir = File(context.cacheDir, "voice_recordings").apply { mkdirs() }
@@ -619,12 +731,39 @@ fun MessageInputBar(
voiceRecordStartedAtMs = System.currentTimeMillis()
voiceElapsedMs = 0L
voiceWaves = emptyList()
isVoiceRecording = true
isVoiceRecordTransitioning = true
val imm = context.getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
imm.hideSoftInputFromWindow(view.windowToken, 0)
focusManager.clearFocus(force = true)
if (showEmojiPicker || coordinator.isEmojiBoxVisible) {
onToggleEmojiPicker(false)
}
inputJumpLog(
"startVoiceRecording armed mode=$recordMode state=$recordUiState voice=$isVoiceRecording kb=$isKeyboardVisible " +
"emojiBox=${coordinator.isEmojiBoxVisible} transitioning=$isVoiceRecordTransitioning " +
"pinBottom=$shouldPinBottomForInput " +
"panelH=$inputPanelHeightPx recH=$recordingInputRowHeightPx"
)
scope.launch {
repeat(12) {
if (!isKeyboardVisible && !coordinator.isEmojiBoxVisible) return@repeat
delay(16)
}
isVoiceRecording = true
isVoiceRecordTransitioning = false
if (recordUiState == RecordUiState.PRESSING || recordUiState == RecordUiState.IDLE) {
setRecordUiState(RecordUiState.RECORDING, "voice-recorder-started")
}
inputJumpLog(
"startVoiceRecording ui-enter mode=$recordMode state=$recordUiState voice=$isVoiceRecording kb=$isKeyboardVisible " +
"emojiBox=${coordinator.isEmojiBoxVisible} transitioning=$isVoiceRecordTransitioning " +
"panelH=$inputPanelHeightPx recH=$recordingInputRowHeightPx"
)
}
} catch (_: Exception) {
isVoiceRecordTransitioning = false
stopVoiceRecording(send = false)
android.widget.Toast.makeText(
context,
@@ -634,13 +773,74 @@ fun MessageInputBar(
}
}
fun pauseVoiceRecording() {
val recorder = voiceRecorder ?: return
if (!isVoiceRecording || isVoicePaused) return
inputJumpLog("pauseVoiceRecording mode=$recordMode state=$recordUiState")
try {
recorder.pause()
isVoicePaused = true
voicePausedElapsedMs = voiceElapsedMs
setRecordUiState(RecordUiState.PAUSED, "pause-pressed")
} catch (e: Exception) {
inputJumpLog("pauseVoiceRecording failed: ${e.message}")
}
}
fun resumeVoiceRecording() {
val recorder = voiceRecorder ?: return
if (!isVoiceRecording || !isVoicePaused) return
inputJumpLog("resumeVoiceRecording mode=$recordMode state=$recordUiState")
try {
recorder.resume()
voiceRecordStartedAtMs = System.currentTimeMillis() - voicePausedElapsedMs
isVoicePaused = false
voicePausedElapsedMs = 0L
setRecordUiState(RecordUiState.LOCKED, "resume-pressed")
} catch (e: Exception) {
inputJumpLog("resumeVoiceRecording failed: ${e.message}")
}
}
LaunchedEffect(Unit) {
snapshotFlow {
val kb = coordinator.keyboardHeight.value.toInt()
val em = coordinator.emojiHeight.value.toInt()
val panelY = (inputPanelY * 10f).toInt() / 10f
val normalY = (normalInputRowY * 10f).toInt() / 10f
val recY = (recordingInputRowY * 10f).toInt() / 10f
val pinBottom =
isKeyboardVisible ||
coordinator.isEmojiBoxVisible ||
isVoiceRecordTransitioning ||
recordUiState == RecordUiState.PRESSING ||
recordUiState == RecordUiState.PAUSED
val navPad = hasNativeNavigationBar && !pinBottom
"mode=$recordMode state=$recordUiState slideDx=${slideDx.toInt()} slideDy=${slideDy.toInt()} " +
"voice=$isVoiceRecording kbVis=$isKeyboardVisible kbDp=$kb emojiBox=${coordinator.isEmojiBoxVisible} " +
"emojiVisible=$showEmojiPicker emojiDp=$em suppress=$suppressKeyboard " +
"voiceTransitioning=$isVoiceRecordTransitioning " +
"pinBottom=$pinBottom navPad=$navPad " +
"panelH=$inputPanelHeightPx panelY=$panelY normalH=$normalInputRowHeightPx " +
"normalY=$normalY recH=$recordingInputRowHeightPx recY=$recY"
}.distinctUntilChanged().collect { stateLine ->
inputJumpLog(stateLine)
}
}
val recordAudioPermissionLauncher =
rememberLauncherForActivityResult(
contract = ActivityResultContracts.RequestPermission()
) { granted ->
if (granted) {
if (pendingRecordAfterPermission) {
pendingRecordAfterPermission = false
setRecordUiState(RecordUiState.RECORDING, "audio-permission-granted")
startVoiceRecording()
}
} else {
pendingRecordAfterPermission = false
setRecordUiState(RecordUiState.IDLE, "audio-permission-denied")
android.widget.Toast.makeText(
context,
"Microphone permission is required for voice messages",
@@ -649,7 +849,11 @@ fun MessageInputBar(
}
}
fun requestVoiceRecording() {
fun requestVoiceRecordingFromHold(): Boolean {
inputJumpLog(
"requestVoiceRecordingFromHold mode=$recordMode state=$recordUiState voice=$isVoiceRecording kb=$isKeyboardVisible " +
"emojiBox=${coordinator.isEmojiBoxVisible} ${inputHeightsSnapshot()}"
)
val granted =
ContextCompat.checkSelfPermission(
context,
@@ -657,14 +861,36 @@ fun MessageInputBar(
) == PackageManager.PERMISSION_GRANTED
if (granted) {
startVoiceRecording()
return true
} else {
pendingRecordAfterPermission = true
recordAudioPermissionLauncher.launch(Manifest.permission.RECORD_AUDIO)
return true
}
}
LaunchedEffect(isVoiceRecording, voiceRecorder) {
if (!isVoiceRecording) return@LaunchedEffect
while (isVoiceRecording && voiceRecorder != null) {
val holdToRecordDelayMs = 260L
val cancelDragThresholdPx = with(density) { 92.dp.toPx() }
val lockDragThresholdPx = with(density) { 70.dp.toPx() }
fun tryStartRecordingForCurrentMode(): Boolean {
return if (recordMode == RecordMode.VOICE) {
setRecordUiState(RecordUiState.RECORDING, "hold-threshold-passed")
requestVoiceRecordingFromHold()
} else {
setRecordUiState(RecordUiState.IDLE, "video-mode-record-not-ready")
android.widget.Toast.makeText(
context,
"Video circles recording will be enabled in next step",
android.widget.Toast.LENGTH_SHORT
).show()
false
}
}
LaunchedEffect(isVoiceRecording, voiceRecorder, isVoicePaused) {
if (!isVoiceRecording || isVoicePaused) return@LaunchedEffect
while (isVoiceRecording && voiceRecorder != null && !isVoicePaused) {
if (voiceRecordStartedAtMs > 0L) {
voiceElapsedMs =
(System.currentTimeMillis() - voiceRecordStartedAtMs).coerceAtLeast(0L)
@@ -673,9 +899,9 @@ fun MessageInputBar(
}
}
LaunchedEffect(isVoiceRecording, voiceRecorder) {
if (!isVoiceRecording) return@LaunchedEffect
while (isVoiceRecording && voiceRecorder != null) {
LaunchedEffect(isVoiceRecording, voiceRecorder, isVoicePaused) {
if (!isVoiceRecording || isVoicePaused) return@LaunchedEffect
while (isVoiceRecording && voiceRecorder != null && !isVoicePaused) {
val amplitude = runCatching { voiceRecorder?.maxAmplitude ?: 0 }.getOrDefault(0)
val normalized = (amplitude.toFloat() / 32_767f).coerceIn(0f, 1f)
voiceWaves = (voiceWaves + normalized).takeLast(120)
@@ -685,8 +911,12 @@ fun MessageInputBar(
DisposableEffect(Unit) {
onDispose {
pendingRecordAfterPermission = false
resetGestureState()
if (isVoiceRecording) {
stopVoiceRecording(send = false)
} else {
setRecordUiState(RecordUiState.IDLE, "dispose")
}
}
}
@@ -925,19 +1155,18 @@ fun MessageInputBar(
)
)
val shouldAddNavBarPadding =
hasNativeNavigationBar &&
!isKeyboardVisible &&
!coordinator.isEmojiBoxVisible
Column(
modifier = Modifier
.fillMaxWidth()
.background(color = backgroundColor)
.padding(
bottom = if (isKeyboardVisible || coordinator.isEmojiBoxVisible) 0.dp else 16.dp
bottom = if (shouldPinBottomForInput) 0.dp else 16.dp
)
.then(if (shouldAddNavBarPadding) Modifier.navigationBarsPadding() else Modifier)
.onGloballyPositioned { coordinates ->
inputPanelHeightPx = coordinates.size.height
inputPanelY = coordinates.positionInWindow().y
}
) {
AnimatedVisibility(
visible = mentionSuggestions.isNotEmpty(),
@@ -1354,13 +1583,17 @@ fun MessageInputBar(
modifier = Modifier
.fillMaxWidth()
.heightIn(min = 48.dp)
.padding(horizontal = 12.dp, vertical = 8.dp),
.padding(horizontal = 12.dp, vertical = 8.dp)
.onGloballyPositioned { coordinates ->
recordingInputRowHeightPx = coordinates.size.height
recordingInputRowY = coordinates.positionInWindow().y
},
contentAlignment = Alignment.CenterEnd
) {
Box(
modifier = Modifier
.fillMaxWidth()
.heightIn(min = 40.dp)
.height(40.dp)
.clip(RoundedCornerShape(20.dp))
.background(recordingPanelColor)
.padding(start = 13.dp, end = 94.dp) // record panel paddings
@@ -1385,38 +1618,69 @@ fun MessageInputBar(
}
Text(
text = "CANCEL",
color = PrimaryBlue,
fontSize = 15.sp,
fontWeight = FontWeight.Bold,
modifier = Modifier
text =
if (recordUiState == RecordUiState.LOCKED) {
"CANCEL"
} else {
"Slide left to cancel • up to lock"
},
color = if (recordUiState == RecordUiState.LOCKED) PrimaryBlue else recordingTextColor.copy(alpha = 0.82f),
fontSize = if (recordUiState == RecordUiState.LOCKED) 15.sp else 13.sp,
fontWeight = if (recordUiState == RecordUiState.LOCKED) FontWeight.Bold else FontWeight.Medium,
maxLines = 1,
overflow = TextOverflow.Ellipsis,
modifier =
Modifier
.align(Alignment.Center)
.graphicsLayer {
alpha = recordUiAlpha
translationX = with(density) { recordUiShift.toPx() }
}
.clickable(
.then(
if (recordUiState == RecordUiState.LOCKED) {
Modifier.clickable(
interactionSource = remember { MutableInteractionSource() },
indication = null
) { stopVoiceRecording(send = false) }
) {
inputJumpLog(
"tap CANCEL (locked) mode=$recordMode state=$recordUiState " +
"voice=$isVoiceRecording kb=$isKeyboardVisible emojiBox=${coordinator.isEmojiBoxVisible} " +
inputHeightsSnapshot()
)
stopVoiceRecording(send = false)
}
} else {
Modifier
}
)
)
}
Box(
modifier = Modifier
.requiredSize(104.dp) // do not affect input row height
.size(40.dp)
.offset(x = 8.dp),
contentAlignment = Alignment.Center
) {
VoiceButtonBlob(
voiceLevel = voiceLevel,
isDarkTheme = isDarkTheme,
modifier = Modifier.fillMaxSize()
modifier =
Modifier
.fillMaxSize()
.graphicsLayer {
// Visual-only enlargement like Telegram record circle,
// while keeping layout hitbox at normal input size.
scaleX = 2.05f
scaleY = 2.05f
clip = false
}
)
if (recordUiState == RecordUiState.LOCKED) {
Box(
modifier = Modifier
.size(82.dp) // Telegram RecordCircle radius 41dp
.requiredSize(82.dp)
.shadow(
elevation = 10.dp,
shape = CircleShape,
@@ -1427,7 +1691,13 @@ fun MessageInputBar(
.clickable(
interactionSource = remember { MutableInteractionSource() },
indication = null
) { stopVoiceRecording(send = true) },
) {
inputJumpLog(
"tap SEND (locked) mode=$recordMode state=$recordUiState voice=$isVoiceRecording " +
"kb=$isKeyboardVisible emojiBox=${coordinator.isEmojiBoxVisible} ${inputHeightsSnapshot()}"
)
stopVoiceRecording(send = true)
},
contentAlignment = Alignment.Center
) {
Icon(
@@ -1437,6 +1707,23 @@ fun MessageInputBar(
modifier = Modifier.size(30.dp)
)
}
} else {
Box(
modifier =
Modifier
.requiredSize(82.dp)
.clip(CircleShape)
.background(PrimaryBlue.copy(alpha = 0.92f)),
contentAlignment = Alignment.Center
) {
Icon(
imageVector = if (recordMode == RecordMode.VOICE) Icons.Default.Mic else Icons.Default.Videocam,
contentDescription = null,
tint = Color.White,
modifier = Modifier.size(30.dp)
)
}
}
}
}
} else {
@@ -1444,7 +1731,11 @@ fun MessageInputBar(
modifier = Modifier
.fillMaxWidth()
.heightIn(min = 48.dp)
.padding(horizontal = 12.dp, vertical = 8.dp),
.padding(horizontal = 12.dp, vertical = 8.dp)
.onGloballyPositioned { coordinates ->
normalInputRowHeightPx = coordinates.size.height
normalInputRowY = coordinates.positionInWindow().y
},
verticalAlignment = Alignment.Bottom
) {
IconButton(
@@ -1481,6 +1772,12 @@ fun MessageInputBar(
requestFocus = hasReply,
onViewCreated = { view -> editTextView = view },
onFocusChanged = { hasFocus ->
if (hasFocus) {
inputJumpLog(
"tap INPUT focus=true voice=$isVoiceRecording kb=$isKeyboardVisible " +
"emojiBox=${coordinator.isEmojiBoxVisible} ${inputHeightsSnapshot()}"
)
}
if (hasFocus && showEmojiPicker) {
onToggleEmojiPicker(false)
}
@@ -1533,13 +1830,141 @@ fun MessageInputBar(
enter = scaleIn(tween(140)) + fadeIn(tween(140)),
exit = scaleOut(tween(100)) + fadeOut(tween(100))
) {
IconButton(
onClick = { requestVoiceRecording() },
modifier = Modifier.size(40.dp)
Box(
modifier =
Modifier
.size(40.dp)
.pointerInput(Unit) {
awaitEachGesture {
if (canSend || isSending || isVoiceRecording || isVoiceRecordTransitioning) {
return@awaitEachGesture
}
val down = awaitFirstDown(requireUnconsumed = false)
val tapSlopPx = viewConfiguration.touchSlop
var pointerIsDown = true
var maxAbsDx = 0f
var maxAbsDy = 0f
pressStartX = down.position.x
pressStartY = down.position.y
slideDx = 0f
slideDy = 0f
pendingRecordAfterPermission = false
setRecordUiState(RecordUiState.PRESSING, "mic-down")
inputJumpLog(
"mic DOWN mode=$recordMode state=$recordUiState " +
"voice=$isVoiceRecording kb=$isKeyboardVisible ${inputHeightsSnapshot()}"
)
pendingLongPressJob?.cancel()
pendingLongPressJob =
scope.launch {
delay(holdToRecordDelayMs)
if (pointerIsDown && recordUiState == RecordUiState.PRESSING) {
val started = tryStartRecordingForCurrentMode()
if (!started) {
resetGestureState()
setRecordUiState(RecordUiState.IDLE, "hold-start-failed")
}
}
}
var finished = false
while (!finished) {
val event = awaitPointerEvent()
val change = event.changes.firstOrNull { it.id == down.id }
?: event.changes.firstOrNull()
?: continue
if (change.changedToUpIgnoreConsumed()) {
pointerIsDown = false
pendingLongPressJob?.cancel()
pendingLongPressJob = null
pendingRecordAfterPermission = false
when (recordUiState) {
RecordUiState.PRESSING -> {
val movedBeyondTap =
maxAbsDx > tapSlopPx || maxAbsDy > tapSlopPx
if (!movedBeyondTap) {
toggleRecordModeByTap()
setRecordUiState(RecordUiState.IDLE, "short-tap-toggle")
} else {
setRecordUiState(RecordUiState.IDLE, "press-release-after-move")
}
}
RecordUiState.RECORDING -> {
inputJumpLog(
"mic UP -> send (unlocked) mode=$recordMode state=$recordUiState"
)
if (isVoiceRecording || voiceRecorder != null) {
stopVoiceRecording(send = true)
} else {
setRecordUiState(RecordUiState.IDLE, "release-without-recorder")
}
}
RecordUiState.LOCKED -> {
inputJumpLog(
"mic UP while LOCKED -> keep recording mode=$recordMode state=$recordUiState"
)
}
RecordUiState.PAUSED -> {
inputJumpLog(
"mic UP while PAUSED -> stay paused mode=$recordMode state=$recordUiState"
)
}
RecordUiState.IDLE -> Unit
}
resetGestureState()
finished = true
} else if (recordUiState == RecordUiState.PRESSING) {
val dx = change.position.x - pressStartX
val dy = change.position.y - pressStartY
val absDx = kotlin.math.abs(dx)
val absDy = kotlin.math.abs(dy)
if (absDx > maxAbsDx) maxAbsDx = absDx
if (absDy > maxAbsDy) maxAbsDy = absDy
} else if (
recordUiState == RecordUiState.RECORDING ||
recordUiState == RecordUiState.LOCKED
) {
val dx = change.position.x - pressStartX
val dy = change.position.y - pressStartY
slideDx = dx
slideDy = dy
if (recordUiState == RecordUiState.RECORDING) {
if (dx <= -cancelDragThresholdPx) {
inputJumpLog(
"gesture CANCEL dx=${dx.toInt()} threshold=${cancelDragThresholdPx.toInt()} mode=$recordMode"
)
stopVoiceRecording(send = false)
setRecordUiState(RecordUiState.IDLE, "slide-cancel")
resetGestureState()
finished = true
} else if (dy <= -lockDragThresholdPx) {
setRecordUiState(
RecordUiState.LOCKED,
"slide-lock dy=${dy.toInt()}"
)
}
}
}
change.consume()
}
pendingLongPressJob?.cancel()
pendingLongPressJob = null
if (recordUiState == RecordUiState.PRESSING) {
setRecordUiState(RecordUiState.IDLE, "gesture-end")
resetGestureState()
}
}
},
contentAlignment = Alignment.Center
) {
Icon(
imageVector = Icons.Default.Mic,
contentDescription = "Record voice message",
imageVector = if (recordMode == RecordMode.VOICE) Icons.Default.Mic else Icons.Default.Videocam,
contentDescription = "Record message",
tint = PrimaryBlue,
modifier = Modifier.size(24.dp)
)