бейдж упоминаний в чат-листе, прямая навигация по @mention, тап на аватарку → профиль, RequestChats на UIKit
This commit is contained in:
315
RosettaTests/SlidingWindowTests.swift
Normal file
315
RosettaTests/SlidingWindowTests.swift
Normal file
@@ -0,0 +1,315 @@
|
||||
import XCTest
|
||||
@testable import Rosetta
|
||||
|
||||
/// Tests for MessageRepository sliding window: soft cache limits, bidirectional pagination,
|
||||
/// and batch re-decryption. Ensures cache doesn't grow unbounded during scroll and that
|
||||
/// pagination trim removes messages from the correct edge.
|
||||
@MainActor
|
||||
final class SlidingWindowTests: XCTestCase {
|
||||
private var ctx: DBTestContext!
|
||||
private let opponent = "02peer_sliding_window_test"
|
||||
|
||||
override func setUpWithError() throws {
|
||||
ctx = DBTestContext()
|
||||
}
|
||||
|
||||
override func tearDownWithError() throws {
|
||||
ctx.teardown()
|
||||
ctx = nil
|
||||
}
|
||||
|
||||
// MARK: - Helpers
|
||||
|
||||
/// Inserts `count` incoming messages with sequential timestamps into DB + cache.
|
||||
private func insertMessages(count: Int, startTimestamp: Int64 = 1000) async throws {
|
||||
try await ctx.bootstrap()
|
||||
|
||||
var events: [FixtureEvent] = []
|
||||
for i in 0..<count {
|
||||
events.append(.incoming(
|
||||
opponent: opponent,
|
||||
messageId: "msg-\(String(format: "%04d", i))",
|
||||
timestamp: startTimestamp + Int64(i),
|
||||
text: "Message \(i)"
|
||||
))
|
||||
}
|
||||
try await ctx.runScenario(FixtureScenario(name: "bulk insert", events: events))
|
||||
}
|
||||
|
||||
// MARK: - maxCacheSize Sanity
|
||||
|
||||
/// Verify that maxCacheSize is 200 (Telegram parity).
|
||||
func testMaxCacheSizeIs200() {
|
||||
XCTAssertEqual(MessageRepository.maxCacheSize, 200)
|
||||
}
|
||||
|
||||
/// Verify that pageSize is 50 (Android parity).
|
||||
func testPageSizeIs50() {
|
||||
XCTAssertEqual(MessageRepository.pageSize, 50)
|
||||
}
|
||||
|
||||
// MARK: - Initial Load
|
||||
|
||||
/// Initial cache load uses pageSize (50), not maxCacheSize (200).
|
||||
/// After async initial load optimization, messages(for:) returns [] on cache miss
|
||||
/// and triggers background decryption. Wait for Combine emission.
|
||||
func testInitialLoadUsesPageSize() async throws {
|
||||
try await insertMessages(count: 100)
|
||||
|
||||
// Clear cache, force reload
|
||||
MessageRepository.shared.reset()
|
||||
try await ctx.bootstrap()
|
||||
|
||||
// Trigger async initial load
|
||||
let _ = MessageRepository.shared.messages(for: opponent)
|
||||
|
||||
// Wait for background decrypt to complete and update cache
|
||||
for _ in 0..<20 {
|
||||
try? await Task.sleep(for: .milliseconds(50))
|
||||
let cached = MessageRepository.shared.messages(for: opponent)
|
||||
if !cached.isEmpty {
|
||||
XCTAssertEqual(cached.count, MessageRepository.pageSize,
|
||||
"Initial load must use pageSize (\(MessageRepository.pageSize)), got \(cached.count)")
|
||||
return
|
||||
}
|
||||
}
|
||||
XCTFail("Async initial load did not complete within 1 second")
|
||||
}
|
||||
|
||||
// MARK: - loadOlderMessages Soft Trim
|
||||
|
||||
/// When cache exceeds 3× maxCacheSize via loadOlderMessages,
|
||||
/// newest messages are trimmed to 2× maxCacheSize.
|
||||
func testLoadOlderTrimsCacheWhenExceeding3xMax() async throws {
|
||||
// Insert enough messages to trigger soft trim
|
||||
// We need > 3 * 200 = 600 messages in cache
|
||||
let totalMessages = 700
|
||||
try await insertMessages(count: totalMessages)
|
||||
|
||||
// Reset and reload latest 200
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
let initial = MessageRepository.shared.messages(for: opponent)
|
||||
XCTAssertEqual(initial.count, MessageRepository.maxCacheSize)
|
||||
|
||||
// Paginate up multiple times to grow cache beyond 3× maxCacheSize
|
||||
var loadCount = 0
|
||||
while loadCount < 15 {
|
||||
guard let earliest = MessageRepository.shared.messages(for: opponent).first else { break }
|
||||
let older = MessageRepository.shared.loadOlderMessages(
|
||||
for: opponent,
|
||||
beforeTimestamp: earliest.timestamp,
|
||||
beforeMessageId: earliest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
if older.isEmpty { break }
|
||||
loadCount += 1
|
||||
}
|
||||
|
||||
let finalCache = MessageRepository.shared.messages(for: opponent)
|
||||
let hardLimit = MessageRepository.maxCacheSize * 3
|
||||
|
||||
XCTAssertLessThanOrEqual(finalCache.count, hardLimit,
|
||||
"Cache must not exceed 3× maxCacheSize (\(hardLimit)), got \(finalCache.count)")
|
||||
}
|
||||
|
||||
/// loadOlderMessages does NOT trim when cache is within 3× maxCacheSize.
|
||||
func testLoadOlderDoesNotTrimBelowThreshold() async throws {
|
||||
try await insertMessages(count: 300)
|
||||
|
||||
// Load initial 200
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
|
||||
// One pagination = +50, total ~250 (below 600 threshold)
|
||||
guard let earliest = MessageRepository.shared.messages(for: opponent).first else {
|
||||
XCTFail("No messages")
|
||||
return
|
||||
}
|
||||
let older = MessageRepository.shared.loadOlderMessages(
|
||||
for: opponent,
|
||||
beforeTimestamp: earliest.timestamp,
|
||||
beforeMessageId: earliest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
|
||||
let cached = MessageRepository.shared.messages(for: opponent)
|
||||
// 200 + 50 = 250 (no dedup issues since IDs are unique)
|
||||
// Some may be deduped, but should be > 200
|
||||
XCTAssertGreaterThan(cached.count, MessageRepository.maxCacheSize,
|
||||
"Cache should grow beyond maxCacheSize during pagination")
|
||||
XCTAssertLessThanOrEqual(cached.count, MessageRepository.maxCacheSize * 3,
|
||||
"Cache should stay below soft limit")
|
||||
}
|
||||
|
||||
// MARK: - loadNewerMessages Soft Trim
|
||||
|
||||
/// When cache exceeds 3× maxCacheSize via loadNewerMessages,
|
||||
/// oldest messages are trimmed to 2× maxCacheSize.
|
||||
func testLoadNewerTrimsCacheWhenExceeding3xMax() async throws {
|
||||
let totalMessages = 700
|
||||
try await insertMessages(count: totalMessages)
|
||||
|
||||
// Load oldest 200 (simulate user scrolled all the way up)
|
||||
// We'll manually load from the beginning
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
|
||||
// Paginate up to load oldest messages first
|
||||
for _ in 0..<12 {
|
||||
guard let earliest = MessageRepository.shared.messages(for: opponent).first else { break }
|
||||
let older = MessageRepository.shared.loadOlderMessages(
|
||||
for: opponent,
|
||||
beforeTimestamp: earliest.timestamp,
|
||||
beforeMessageId: earliest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
if older.isEmpty { break }
|
||||
}
|
||||
|
||||
// Now paginate DOWN (loadNewerMessages) to grow cache from the other direction
|
||||
for _ in 0..<15 {
|
||||
guard let latest = MessageRepository.shared.messages(for: opponent).last else { break }
|
||||
let newer = MessageRepository.shared.loadNewerMessages(
|
||||
for: opponent,
|
||||
afterTimestamp: latest.timestamp,
|
||||
afterMessageId: latest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
if newer.isEmpty { break }
|
||||
}
|
||||
|
||||
let finalCache = MessageRepository.shared.messages(for: opponent)
|
||||
let hardLimit = MessageRepository.maxCacheSize * 3
|
||||
|
||||
XCTAssertLessThanOrEqual(finalCache.count, hardLimit,
|
||||
"Cache must not exceed 3× maxCacheSize (\(hardLimit)) after bidirectional pagination, got \(finalCache.count)")
|
||||
}
|
||||
|
||||
// MARK: - reloadLatest Resets Cache
|
||||
|
||||
/// reloadLatest must reset cache to exactly maxCacheSize.
|
||||
func testReloadLatestResetsCacheSize() async throws {
|
||||
try await insertMessages(count: 300)
|
||||
|
||||
// reloadLatest loads maxCacheSize (200) messages
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
let initial = MessageRepository.shared.messages(for: opponent)
|
||||
XCTAssertEqual(initial.count, MessageRepository.maxCacheSize,
|
||||
"reloadLatest must load exactly maxCacheSize messages")
|
||||
|
||||
// Paginate up to grow cache beyond maxCacheSize
|
||||
for _ in 0..<3 {
|
||||
guard let earliest = MessageRepository.shared.messages(for: opponent).first else { break }
|
||||
let _ = MessageRepository.shared.loadOlderMessages(
|
||||
for: opponent,
|
||||
beforeTimestamp: earliest.timestamp,
|
||||
beforeMessageId: earliest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
}
|
||||
|
||||
let beforeReload = MessageRepository.shared.messages(for: opponent).count
|
||||
XCTAssertGreaterThan(beforeReload, MessageRepository.maxCacheSize,
|
||||
"Cache should have grown via pagination, got \(beforeReload)")
|
||||
|
||||
// Jump to bottom = reloadLatest resets cache
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
|
||||
let afterReload = MessageRepository.shared.messages(for: opponent).count
|
||||
XCTAssertEqual(afterReload, MessageRepository.maxCacheSize,
|
||||
"reloadLatest must reset cache to maxCacheSize (\(MessageRepository.maxCacheSize)), got \(afterReload)")
|
||||
}
|
||||
|
||||
// MARK: - Message Order Preserved
|
||||
|
||||
/// Messages must stay sorted by timestamp ASC after pagination trim.
|
||||
func testMessageOrderPreservedAfterTrim() async throws {
|
||||
try await insertMessages(count: 700)
|
||||
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
|
||||
// Paginate up to trigger trim
|
||||
for _ in 0..<15 {
|
||||
guard let earliest = MessageRepository.shared.messages(for: opponent).first else { break }
|
||||
let older = MessageRepository.shared.loadOlderMessages(
|
||||
for: opponent,
|
||||
beforeTimestamp: earliest.timestamp,
|
||||
beforeMessageId: earliest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
if older.isEmpty { break }
|
||||
}
|
||||
|
||||
let cached = MessageRepository.shared.messages(for: opponent)
|
||||
// Verify ascending timestamp order
|
||||
for i in 1..<cached.count {
|
||||
XCTAssertGreaterThanOrEqual(cached[i].timestamp, cached[i - 1].timestamp,
|
||||
"Messages must be sorted by timestamp ASC after trim. " +
|
||||
"Index \(i): \(cached[i].timestamp) < \(cached[i - 1].timestamp)")
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - No Duplicate Messages After Pagination
|
||||
|
||||
/// Pagination must not create duplicate message IDs in cache.
|
||||
func testNoDuplicatesAfterPagination() async throws {
|
||||
try await insertMessages(count: 400)
|
||||
|
||||
MessageRepository.shared.reloadLatest(for: opponent)
|
||||
|
||||
// Paginate up
|
||||
for _ in 0..<5 {
|
||||
guard let earliest = MessageRepository.shared.messages(for: opponent).first else { break }
|
||||
let _ = MessageRepository.shared.loadOlderMessages(
|
||||
for: opponent,
|
||||
beforeTimestamp: earliest.timestamp,
|
||||
beforeMessageId: earliest.id,
|
||||
limit: MessageRepository.pageSize
|
||||
)
|
||||
}
|
||||
|
||||
let cached = MessageRepository.shared.messages(for: opponent)
|
||||
let ids = cached.map(\.id)
|
||||
let uniqueIds = Set(ids)
|
||||
XCTAssertEqual(ids.count, uniqueIds.count,
|
||||
"Cache must not contain duplicate message IDs. \(ids.count) total, \(uniqueIds.count) unique")
|
||||
}
|
||||
|
||||
// MARK: - In-Memory Patch (Delivery Status)
|
||||
|
||||
/// Delivery status update patches cache in-memory without full refresh.
|
||||
func testDeliveryStatusPatchesInMemory() async throws {
|
||||
try await ctx.bootstrap()
|
||||
try await ctx.runScenario(FixtureScenario(name: "patch test", events: [
|
||||
.outgoing(opponent: opponent, messageId: "patch-1", timestamp: 5000, text: "hello"),
|
||||
]))
|
||||
|
||||
let before = MessageRepository.shared.messages(for: opponent)
|
||||
XCTAssertEqual(before.first?.deliveryStatus, .waiting)
|
||||
|
||||
// Patch delivery status
|
||||
MessageRepository.shared.updateDeliveryStatus(messageId: "patch-1", status: .delivered)
|
||||
|
||||
let after = MessageRepository.shared.messages(for: opponent)
|
||||
XCTAssertEqual(after.first?.deliveryStatus, .delivered,
|
||||
"Delivery status must be patched in-memory without full cache refresh")
|
||||
}
|
||||
|
||||
// MARK: - In-Memory Patch (Read Status)
|
||||
|
||||
/// markOutgoingAsRead patches cache in-memory.
|
||||
func testMarkOutgoingReadPatchesInMemory() async throws {
|
||||
try await ctx.bootstrap()
|
||||
try await ctx.runScenario(FixtureScenario(name: "read patch", events: [
|
||||
.outgoing(opponent: opponent, messageId: "read-1", timestamp: 6000, text: "hello"),
|
||||
.markDelivered(opponent: opponent, messageId: "read-1"),
|
||||
]))
|
||||
|
||||
let before = MessageRepository.shared.messages(for: opponent)
|
||||
XCTAssertFalse(before.first?.isRead ?? true)
|
||||
|
||||
MessageRepository.shared.markOutgoingAsRead(opponentKey: opponent, myPublicKey: ctx.account)
|
||||
|
||||
let after = MessageRepository.shared.messages(for: opponent)
|
||||
XCTAssertTrue(after.first?.isRead ?? false,
|
||||
"Read status must be patched in-memory")
|
||||
}
|
||||
}
|
||||
308
RosettaTests/VoiceRecordingParityCheckerTests.swift
Normal file
308
RosettaTests/VoiceRecordingParityCheckerTests.swift
Normal file
@@ -0,0 +1,308 @@
|
||||
import CryptoKit
|
||||
import Foundation
|
||||
import XCTest
|
||||
|
||||
final class VoiceRecordingParityCheckerTests: XCTestCase {
|
||||
|
||||
func testVoiceRecordingParityBaselineHasNoBlockingFindings() throws {
|
||||
let root = URL(fileURLWithPath: #filePath)
|
||||
.deletingLastPathComponent()
|
||||
.deletingLastPathComponent()
|
||||
|
||||
let baselineURL = root.appendingPathComponent("docs/voice-recording-parity-baseline.json")
|
||||
let baselineData = try Data(contentsOf: baselineURL)
|
||||
let baselineAny = try JSONSerialization.jsonObject(with: baselineData, options: [])
|
||||
guard let baseline = baselineAny as? [String: Any] else {
|
||||
XCTFail("invalid baseline format")
|
||||
return
|
||||
}
|
||||
|
||||
var findings: [[String: String]] = []
|
||||
|
||||
let constants = baseline["constants"] as? [[String: Any]] ?? []
|
||||
for spec in constants {
|
||||
let id = spec["id"] as? String ?? "unknown"
|
||||
let severity = spec["severity"] as? String ?? "P1"
|
||||
let file = spec["file"] as? String ?? ""
|
||||
let pattern = spec["pattern"] as? String ?? ""
|
||||
let expected = spec["expected"] as? String ?? ""
|
||||
let rawMatch = spec["raw_match"] as? Bool ?? false
|
||||
|
||||
let rosettaText = try readText(root: root, relativePath: file)
|
||||
let actual = regexCapture(text: rosettaText, pattern: pattern, rawMatch: rawMatch)
|
||||
if actual == nil {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "missing_pattern",
|
||||
"id": id,
|
||||
"evidence": file
|
||||
])
|
||||
} else if let actual, actual != expected {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "value_mismatch",
|
||||
"id": id,
|
||||
"evidence": file,
|
||||
"actual": actual,
|
||||
"expected": expected
|
||||
])
|
||||
}
|
||||
|
||||
if let telegramFile = spec["telegram_file"] as? String,
|
||||
let telegramPattern = spec["telegram_pattern"] as? String {
|
||||
let telegramText = try readText(root: root, relativePath: telegramFile)
|
||||
if regexCapture(text: telegramText, pattern: telegramPattern, rawMatch: true) == nil {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "telegram_reference_missing",
|
||||
"id": id,
|
||||
"evidence": telegramFile
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let geometry = baseline["geometry"] as? [[String: Any]] ?? []
|
||||
for spec in geometry {
|
||||
let id = spec["id"] as? String ?? "unknown"
|
||||
let severity = spec["severity"] as? String ?? "P1"
|
||||
let file = spec["file"] as? String ?? ""
|
||||
let pattern = spec["pattern"] as? String ?? ""
|
||||
let expected = spec["expected"] as? String ?? ""
|
||||
let rawMatch = spec["raw_match"] as? Bool ?? false
|
||||
|
||||
let rosettaText = try readText(root: root, relativePath: file)
|
||||
let actual = regexCapture(text: rosettaText, pattern: pattern, rawMatch: rawMatch)
|
||||
if actual == nil {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "missing_pattern",
|
||||
"id": id,
|
||||
"evidence": file
|
||||
])
|
||||
} else if let actual, actual != expected {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "value_mismatch",
|
||||
"id": id,
|
||||
"evidence": file,
|
||||
"actual": actual,
|
||||
"expected": expected
|
||||
])
|
||||
}
|
||||
|
||||
if let telegramFile = spec["telegram_file"] as? String,
|
||||
let telegramPattern = spec["telegram_pattern"] as? String {
|
||||
let telegramText = try readText(root: root, relativePath: telegramFile)
|
||||
if regexCapture(text: telegramText, pattern: telegramPattern, rawMatch: true) == nil {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "telegram_reference_missing",
|
||||
"id": id,
|
||||
"evidence": telegramFile
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let flow = baseline["flow"] as? [String: Any] {
|
||||
let flowSeverity = flow["severity"] as? String ?? "P1"
|
||||
let stateFile = flow["state_file"] as? String ?? ""
|
||||
let expectedStates = flow["expected_states"] as? [String] ?? []
|
||||
let stateText = try readText(root: root, relativePath: stateFile)
|
||||
let actualStates = regexMatches(text: stateText, pattern: "case\\s+([A-Za-z_][A-Za-z0-9_]*)")
|
||||
if actualStates != expectedStates {
|
||||
findings.append([
|
||||
"severity": flowSeverity,
|
||||
"kind": "state_machine_mismatch",
|
||||
"id": "flow_states",
|
||||
"evidence": stateFile
|
||||
])
|
||||
}
|
||||
|
||||
let requiredTransitions = flow["required_transitions"] as? [[String: Any]] ?? []
|
||||
for transition in requiredTransitions {
|
||||
let transitionId = transition["id"] as? String ?? "unknown"
|
||||
let transitionSeverity = transition["severity"] as? String ?? "P1"
|
||||
let transitionFile = transition["file"] as? String ?? ""
|
||||
let snippet = transition["snippet"] as? String ?? ""
|
||||
let transitionText = try readText(root: root, relativePath: transitionFile)
|
||||
if !transitionText.contains(snippet) {
|
||||
findings.append([
|
||||
"severity": transitionSeverity,
|
||||
"kind": "transition_missing",
|
||||
"id": transitionId,
|
||||
"evidence": transitionFile
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let accessibility = baseline["accessibility"] as? [[String: Any]] ?? []
|
||||
for spec in accessibility {
|
||||
let id = spec["id"] as? String ?? "unknown"
|
||||
let severity = spec["severity"] as? String ?? "P1"
|
||||
let file = spec["file"] as? String ?? ""
|
||||
let snippet = spec["snippet"] as? String ?? ""
|
||||
let text = try readText(root: root, relativePath: file)
|
||||
if !text.contains(snippet) {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "accessibility_missing",
|
||||
"id": id,
|
||||
"evidence": file
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
let animations = baseline["animations"] as? [[String: Any]] ?? []
|
||||
for spec in animations {
|
||||
let id = spec["id"] as? String ?? "unknown"
|
||||
let severity = spec["severity"] as? String ?? "P1"
|
||||
let file = spec["file"] as? String ?? ""
|
||||
let snippet = spec["snippet"] as? String ?? ""
|
||||
let text = try readText(root: root, relativePath: file)
|
||||
if !text.contains(snippet) {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "animation_snippet_missing",
|
||||
"id": id,
|
||||
"evidence": file
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
if let assets = baseline["assets"] as? [String: Any] {
|
||||
let imagesets = assets["imagesets"] as? [[String: Any]] ?? []
|
||||
for imageset in imagesets {
|
||||
let assetId = imageset["id"] as? String ?? "unknown"
|
||||
let severity = imageset["severity"] as? String ?? "P1"
|
||||
let path = imageset["path"] as? String ?? ""
|
||||
let files = imageset["files"] as? [[String: Any]] ?? []
|
||||
|
||||
let imagesetURL = root.appendingPathComponent(path)
|
||||
if !FileManager.default.fileExists(atPath: imagesetURL.path) {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "asset_missing",
|
||||
"id": assetId,
|
||||
"evidence": path
|
||||
])
|
||||
continue
|
||||
}
|
||||
|
||||
for fileSpec in files {
|
||||
let fileName = fileSpec["name"] as? String ?? ""
|
||||
let expectedSha = fileSpec["sha256"] as? String ?? ""
|
||||
let fileURL = imagesetURL.appendingPathComponent(fileName)
|
||||
if !FileManager.default.fileExists(atPath: fileURL.path) {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "asset_file_missing",
|
||||
"id": "\(assetId)/\(fileName)",
|
||||
"evidence": path
|
||||
])
|
||||
continue
|
||||
}
|
||||
|
||||
let actualSha = try sha256(fileURL)
|
||||
if actualSha != expectedSha {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "asset_hash_mismatch",
|
||||
"id": "\(assetId)/\(fileName)",
|
||||
"evidence": path
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let lottie = assets["lottie"] as? [[String: Any]] ?? []
|
||||
for lottieSpec in lottie {
|
||||
let lottieId = lottieSpec["id"] as? String ?? "unknown"
|
||||
let severity = lottieSpec["severity"] as? String ?? "P1"
|
||||
let path = lottieSpec["path"] as? String ?? ""
|
||||
let expectedSha = lottieSpec["sha256"] as? String ?? ""
|
||||
let lottieURL = root.appendingPathComponent(path)
|
||||
|
||||
if !FileManager.default.fileExists(atPath: lottieURL.path) {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "lottie_missing",
|
||||
"id": lottieId,
|
||||
"evidence": path
|
||||
])
|
||||
continue
|
||||
}
|
||||
|
||||
let actualSha = try sha256(lottieURL)
|
||||
if actualSha != expectedSha {
|
||||
findings.append([
|
||||
"severity": severity,
|
||||
"kind": "lottie_hash_mismatch",
|
||||
"id": lottieId,
|
||||
"evidence": path
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let blocking = findings.filter { finding in
|
||||
let severity = finding["severity"] ?? "P3"
|
||||
return severity == "P0" || severity == "P1"
|
||||
}
|
||||
|
||||
if !blocking.isEmpty {
|
||||
let data = try JSONSerialization.data(withJSONObject: blocking, options: [.prettyPrinted, .sortedKeys])
|
||||
let details = String(data: data, encoding: .utf8) ?? "<unprintable>"
|
||||
XCTFail("blocking voice parity findings:\n\(details)")
|
||||
}
|
||||
}
|
||||
|
||||
private func readText(root: URL, relativePath: String) throws -> String {
|
||||
let url = root.appendingPathComponent(relativePath)
|
||||
return try String(contentsOf: url, encoding: .utf8)
|
||||
}
|
||||
|
||||
private func sha256(_ url: URL) throws -> String {
|
||||
let data = try Data(contentsOf: url)
|
||||
let digest = SHA256.hash(data: data)
|
||||
return digest.map { String(format: "%02x", $0) }.joined()
|
||||
}
|
||||
|
||||
private func regexCapture(text: String, pattern: String, rawMatch: Bool) -> String? {
|
||||
guard let regex = try? NSRegularExpression(pattern: pattern, options: []) else {
|
||||
return nil
|
||||
}
|
||||
let nsText = text as NSString
|
||||
let range = NSRange(location: 0, length: nsText.length)
|
||||
guard let match = regex.firstMatch(in: text, options: [], range: range) else {
|
||||
return nil
|
||||
}
|
||||
if rawMatch {
|
||||
return pattern
|
||||
}
|
||||
guard match.numberOfRanges > 1 else {
|
||||
return nil
|
||||
}
|
||||
let captureRange = match.range(at: 1)
|
||||
guard captureRange.location != NSNotFound else {
|
||||
return nil
|
||||
}
|
||||
return nsText.substring(with: captureRange)
|
||||
}
|
||||
|
||||
private func regexMatches(text: String, pattern: String) -> [String] {
|
||||
guard let regex = try? NSRegularExpression(pattern: pattern, options: []) else {
|
||||
return []
|
||||
}
|
||||
let nsText = text as NSString
|
||||
let range = NSRange(location: 0, length: nsText.length)
|
||||
return regex.matches(in: text, options: [], range: range).compactMap { match in
|
||||
guard match.numberOfRanges > 1 else { return nil }
|
||||
let captureRange = match.range(at: 1)
|
||||
guard captureRange.location != NSNotFound else { return nil }
|
||||
return nsText.substring(with: captureRange)
|
||||
}
|
||||
}
|
||||
}
|
||||
150
RosettaTests/VoiceRecordingParityMathTests.swift
Normal file
150
RosettaTests/VoiceRecordingParityMathTests.swift
Normal file
@@ -0,0 +1,150 @@
|
||||
import XCTest
|
||||
@testable import Rosetta
|
||||
|
||||
final class VoiceRecordingParityMathTests: XCTestCase {
|
||||
|
||||
func testParityConstants() {
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.holdThreshold, 0.19, accuracy: 0.0001)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.cancelDistanceThreshold, -150)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.cancelHapticThreshold, -100)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.lockDistanceThreshold, -110)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.lockHapticThreshold, -60)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.velocityGate, -400)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.preHoldCancelDistance, 10)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.micHitInsetX, -10)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.locknessDivisor, 105)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.dragNormalizeDivisor, 300)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.cancelTransformThreshold, 8)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.sendAccessibilityHitSize, 120)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.minVoiceDuration, 0.5)
|
||||
XCTAssertEqual(VoiceRecordingParityConstants.minFreeDiskBytes, 8 * 1024 * 1024)
|
||||
}
|
||||
|
||||
func testReleaseDecisionVelocityGate() {
|
||||
let cancel = VoiceRecordingParityMath.releaseDecision(
|
||||
velocityX: -500,
|
||||
velocityY: 0,
|
||||
distanceX: -10,
|
||||
distanceY: 0
|
||||
)
|
||||
XCTAssertEqual(cancel, .cancel)
|
||||
|
||||
let lock = VoiceRecordingParityMath.releaseDecision(
|
||||
velocityX: 0,
|
||||
velocityY: -500,
|
||||
distanceX: 0,
|
||||
distanceY: -10
|
||||
)
|
||||
XCTAssertEqual(lock, .lock)
|
||||
}
|
||||
|
||||
func testReleaseDecisionDistanceFallback() {
|
||||
let cancel = VoiceRecordingParityMath.releaseDecision(
|
||||
velocityX: 0,
|
||||
velocityY: 0,
|
||||
distanceX: -120,
|
||||
distanceY: 0
|
||||
)
|
||||
XCTAssertEqual(cancel, .cancel)
|
||||
|
||||
let lock = VoiceRecordingParityMath.releaseDecision(
|
||||
velocityX: 0,
|
||||
velocityY: 0,
|
||||
distanceX: 0,
|
||||
distanceY: -80
|
||||
)
|
||||
XCTAssertEqual(lock, .lock)
|
||||
|
||||
let finish = VoiceRecordingParityMath.releaseDecision(
|
||||
velocityX: 0,
|
||||
velocityY: 0,
|
||||
distanceX: -20,
|
||||
distanceY: -20
|
||||
)
|
||||
XCTAssertEqual(finish, .finish)
|
||||
}
|
||||
|
||||
func testDominantAxis() {
|
||||
XCTAssertEqual(
|
||||
VoiceRecordingParityMath.dominantAxisDistances(distanceX: -110, distanceY: -40).0,
|
||||
-110
|
||||
)
|
||||
XCTAssertEqual(
|
||||
VoiceRecordingParityMath.dominantAxisDistances(distanceX: -110, distanceY: -40).1,
|
||||
0
|
||||
)
|
||||
|
||||
XCTAssertEqual(
|
||||
VoiceRecordingParityMath.dominantAxisDistances(distanceX: -20, distanceY: -80).0,
|
||||
0
|
||||
)
|
||||
XCTAssertEqual(
|
||||
VoiceRecordingParityMath.dominantAxisDistances(distanceX: -20, distanceY: -80).1,
|
||||
-80
|
||||
)
|
||||
}
|
||||
|
||||
func testLocknessNormalization() {
|
||||
XCTAssertEqual(VoiceRecordingParityMath.lockness(distanceY: 0), 0)
|
||||
XCTAssertEqual(VoiceRecordingParityMath.lockness(distanceY: -52.5), 0.5, accuracy: 0.0001)
|
||||
XCTAssertEqual(VoiceRecordingParityMath.lockness(distanceY: -500), 1)
|
||||
}
|
||||
|
||||
func testNormalizedDrag() {
|
||||
XCTAssertEqual(VoiceRecordingParityMath.normalizedDrag(distance: 0), 0, accuracy: 0.0001)
|
||||
XCTAssertEqual(VoiceRecordingParityMath.normalizedDrag(distance: 150), 0.5, accuracy: 0.0001)
|
||||
XCTAssertEqual(VoiceRecordingParityMath.normalizedDrag(distance: -450), 1, accuracy: 0.0001)
|
||||
}
|
||||
|
||||
func testCancelTransformThreshold() {
|
||||
XCTAssertFalse(VoiceRecordingParityMath.shouldApplyCancelTransform(-8))
|
||||
XCTAssertTrue(VoiceRecordingParityMath.shouldApplyCancelTransform(-8.1))
|
||||
}
|
||||
|
||||
func testShortRecordingDiscardGuard() {
|
||||
XCTAssertTrue(VoiceRecordingParityMath.shouldDiscard(duration: 0.49))
|
||||
XCTAssertFalse(VoiceRecordingParityMath.shouldDiscard(duration: 0.5))
|
||||
XCTAssertFalse(VoiceRecordingParityMath.shouldDiscard(duration: 1.2))
|
||||
}
|
||||
|
||||
func testMinTrimDurationFormula() {
|
||||
let minDuration = VoiceRecordingParityConstants.minTrimDuration(duration: 10, waveformWidth: 280)
|
||||
XCTAssertEqual(minDuration, 2.0, accuracy: 0.0001)
|
||||
|
||||
let clamped = VoiceRecordingParityConstants.minTrimDuration(duration: 1, waveformWidth: 600)
|
||||
XCTAssertEqual(clamped, 1.0, accuracy: 0.0001)
|
||||
}
|
||||
|
||||
func testClampTrimRange() {
|
||||
let range = VoiceRecordingParityMath.clampTrimRange((-3)...(12), duration: 8)
|
||||
XCTAssertEqual(range.lowerBound, 0)
|
||||
XCTAssertEqual(range.upperBound, 8)
|
||||
}
|
||||
|
||||
func testWaveformSliceRange() {
|
||||
let range = VoiceRecordingParityMath.waveformSliceRange(
|
||||
sampleCount: 100,
|
||||
totalDuration: 10,
|
||||
trimRange: 2...7
|
||||
)
|
||||
XCTAssertEqual(range, 20..<70)
|
||||
}
|
||||
|
||||
func testWaveformSliceRangeInvalidInputs() {
|
||||
XCTAssertNil(
|
||||
VoiceRecordingParityMath.waveformSliceRange(
|
||||
sampleCount: 0,
|
||||
totalDuration: 10,
|
||||
trimRange: 2...7
|
||||
)
|
||||
)
|
||||
XCTAssertNil(
|
||||
VoiceRecordingParityMath.waveformSliceRange(
|
||||
sampleCount: 100,
|
||||
totalDuration: 0,
|
||||
trimRange: 2...7
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user