diff --git a/Convos/Debug View/BackupDebugView.swift b/Convos/Debug View/BackupDebugView.swift index fa493d5e1..0f5a06215 100644 --- a/Convos/Debug View/BackupDebugView.swift +++ b/Convos/Debug View/BackupDebugView.swift @@ -205,7 +205,7 @@ struct BackupDebugView: View { private func revokeVaultInstallationsAction() { let vaultKeyStore = makeVaultKeyStore() let vaultManager = session.vaultService as? VaultManager - runAction(title: "Revoke vault installations") { [environment] in + runAction(title: "Revoke all other vault installations") { [environment] in let vaultIdentity = try await vaultKeyStore.loadAny() let keepId: String? = await vaultManager?.vaultInstallationId let count = try await XMTPInstallationRevoker.revokeOtherInstallations( @@ -316,6 +316,16 @@ struct BackupDebugView: View { ) let vaultManager = session.vaultService as? VaultManager + let capturedEnvironment = environment + let revoker: RestoreInstallationRevoker = { inboxId, signingKey, keepId in + try await XMTPInstallationRevoker.revokeOtherInstallations( + inboxId: inboxId, + signingKey: signingKey, + keepInstallationId: keepId, + environment: capturedEnvironment + ) + } + return RestoreManager( vaultKeyStore: vaultKeyStore, identityStore: identityStore, @@ -323,6 +333,7 @@ struct BackupDebugView: View { archiveImporter: archiveImporter, restoreLifecycleController: session as? any RestoreLifecycleControlling, vaultManager: vaultManager, + installationRevoker: revoker, environment: environment ) } diff --git a/ConvosCore/Sources/ConvosCore/Backup/BackupManager.swift b/ConvosCore/Sources/ConvosCore/Backup/BackupManager.swift index 63d17311b..dc45eded0 100644 --- a/ConvosCore/Sources/ConvosCore/Backup/BackupManager.swift +++ b/ConvosCore/Sources/ConvosCore/Backup/BackupManager.swift @@ -1,6 +1,17 @@ import Foundation import GRDB +public enum BackupError: LocalizedError { + case broadcastKeysFailed(any Error) + + public var errorDescription: String? { + switch self { + case .broadcastKeysFailed(let error): + return "Failed to broadcast conversation keys to vault: \(error.localizedDescription)" + } + } +} + public struct ConversationArchiveResult: Sendable { public let inboxId: String public let success: Bool @@ -47,6 +58,7 @@ public actor BackupManager { ) let outputURL = try writeToICloudOrLocal(bundleData: bundleData, metadata: metadata) + Log.info("[Backup] saved to \(outputURL.path)") BackupBundle.cleanup(directory: stagingDir) return outputURL } catch { @@ -64,23 +76,32 @@ public actor BackupManager { try await archiveProvider.broadcastKeysToVault() Log.info("[Backup] keys broadcast to vault") } catch { - Log.warning("[Backup] failed to broadcast keys to vault (non-fatal): \(error)") + // Fail loud: if we can't broadcast keys to the vault, the archive may not + // contain every inbox's key. That would produce a backup that silently + // cannot be fully restored. Better to surface the failure now than create + // an incomplete bundle the user trusts. + Log.error("[Backup] failed to broadcast keys to vault: \(error)") + throw BackupError.broadcastKeysFailed(error) } Log.info("[Backup] creating vault archive") try await createVaultArchive(encryptionKey: encryptionKey, in: stagingDir) + Log.info("[Backup] vault archive created") + Log.info("[Backup] creating conversation archives") let conversationResults = await createConversationArchives(in: stagingDir) + let successCount = conversationResults.filter(\.success).count let failedResults = conversationResults.filter { !$0.success } + Log.info("[Backup] conversation archives: \(successCount)/\(conversationResults.count) succeeded") if !failedResults.isEmpty { - Log.warning("Failed to archive \(failedResults.count)/\(conversationResults.count) conversation(s)") for result in failedResults { - Log.warning(" \(result.inboxId): \(result.error?.localizedDescription ?? "unknown error")") + Log.warning("[Backup] conversation archive failed for \(result.inboxId): \(result.error?.localizedDescription ?? "unknown error")") } } + Log.info("[Backup] copying database snapshot") try copyDatabase(to: stagingDir) + Log.info("[Backup] database snapshot copied") - let successCount = conversationResults.filter(\.success).count let metadata = BackupBundleMetadata( deviceId: DeviceInfo.deviceIdentifier, deviceName: DeviceInfo.deviceName, @@ -90,16 +111,14 @@ public actor BackupManager { try BackupBundleMetadata.write(metadata, to: stagingDir) let bundleData = try BackupBundle.pack(directory: stagingDir, encryptionKey: encryptionKey) + let bundleSizeKB = bundleData.count / 1024 + Log.info("[Backup] bundle packed: \(bundleSizeKB)KB, \(successCount) conversation(s), vault=true, db=true") return (bundleData, metadata) } - private func createVaultArchive(encryptionKey: Data, in directory: URL) async { + private func createVaultArchive(encryptionKey: Data, in directory: URL) async throws { let archivePath = BackupBundle.vaultArchivePath(in: directory) - do { - try await archiveProvider.createVaultArchive(at: archivePath, encryptionKey: encryptionKey) - } catch { - Log.warning("[Backup] vault archive creation failed (non-fatal): \(error)") - } + try await archiveProvider.createVaultArchive(at: archivePath, encryptionKey: encryptionKey) } private func createConversationArchives(in directory: URL) async -> [ConversationArchiveResult] { @@ -108,7 +127,7 @@ public actor BackupManager { let repo = InboxesRepository(databaseReader: databaseReader) inboxes = try repo.nonVaultUsedInboxes() } catch { - Log.warning("Failed to load inboxes for backup: \(error)") + Log.warning("[Backup] failed to load inboxes: \(error)") return [] } @@ -118,7 +137,7 @@ public actor BackupManager { do { identity = try await identityStore.identity(for: inbox.inboxId) } catch { - Log.warning("No identity found for inbox \(inbox.inboxId), skipping archive") + Log.warning("[Backup] no identity for inbox \(inbox.inboxId), skipping archive") results.append(.init(inboxId: inbox.inboxId, success: false, error: error)) continue } @@ -132,7 +151,7 @@ public actor BackupManager { ) results.append(.init(inboxId: inbox.inboxId, success: true, error: nil)) } catch { - Log.warning("Failed to archive conversation \(inbox.inboxId): \(error)") + Log.warning("[Backup] failed to archive conversation \(inbox.inboxId): \(error)") results.append(.init(inboxId: inbox.inboxId, success: false, error: error)) } } @@ -179,7 +198,7 @@ public actor BackupManager { .appendingPathComponent("backups", isDirectory: true) .appendingPathComponent(deviceId, isDirectory: true) try FileManager.default.createDirectory(at: localDir, withIntermediateDirectories: true) - Log.warning("iCloud container unavailable, backup saved locally") + Log.warning("[Backup] iCloud container unavailable, saved locally") return localDir } } diff --git a/ConvosCore/Sources/ConvosCore/Backup/ConvosRestoreArchiveImporter.swift b/ConvosCore/Sources/ConvosCore/Backup/ConvosRestoreArchiveImporter.swift index b352d2046..0aa69e1fa 100644 --- a/ConvosCore/Sources/ConvosCore/Backup/ConvosRestoreArchiveImporter.swift +++ b/ConvosCore/Sources/ConvosCore/Backup/ConvosRestoreArchiveImporter.swift @@ -13,7 +13,11 @@ public struct ConvosRestoreArchiveImporter: RestoreArchiveImporter { self.environment = environment } - public func importConversationArchive(inboxId: String, path: String, encryptionKey: Data) async throws { + public func importConversationArchive(inboxId: String, path: String, encryptionKey: Data) async throws -> String { + // RestoreManager has already staged/wiped the local XMTP DBs for conversation + // inboxes before calling us, so no existing client can be reused here. Create + // a single fresh client and import the archive into it — any prior `Client.build` + // probe would register an extra installation on the network as a side effect. let identity = try await identityStore.identity(for: inboxId) let api = XMTPAPIOptionsBuilder.build(environment: environment) let options = ClientOptions( @@ -23,19 +27,6 @@ public struct ConvosRestoreArchiveImporter: RestoreArchiveImporter { deviceSyncEnabled: false ) - do { - let client = try await Client.build( - publicIdentity: identity.keys.signingKey.identity, - options: options, - inboxId: inboxId - ) - try? client.dropLocalDatabaseConnection() - Log.info("[Restore] conversation XMTP DB already exists for \(inboxId), skipping archive import") - return - } catch { - Log.info("[Restore] no existing XMTP DB for \(inboxId), importing archive") - } - let client = try await Client.create( account: identity.keys.signingKey, options: options @@ -43,6 +34,22 @@ public struct ConvosRestoreArchiveImporter: RestoreArchiveImporter { defer { try? client.dropLocalDatabaseConnection() } try await client.importArchive(path: path, encryptionKey: encryptionKey) - Log.info("[Restore] conversation archive imported for \(inboxId)") + + // After archive import, the XMTP SDK's consent state for restored + // groups may be 'unknown'. shouldProcessConversation in StreamProcessor + // drops messages from groups that aren't 'allowed', so the first + // incoming message after restore would be silently lost. Set consent + // to 'allowed' for all groups in this inbox's archive. + let groups = try client.conversations.listGroups() + for group in groups { + try await group.updateConsentState(state: .allowed) + } + if !groups.isEmpty { + Log.info("[Restore] set consent=allowed for \(groups.count) group(s) in \(inboxId)") + } + + let newInstallationId = client.installationID + Log.info("[Restore] conversation archive imported for \(inboxId) (new installationId=\(newInstallationId))") + return newInstallationId } } diff --git a/ConvosCore/Sources/ConvosCore/Backup/ConvosVaultArchiveImporter.swift b/ConvosCore/Sources/ConvosCore/Backup/ConvosVaultArchiveImporter.swift index 61a17f344..9f504b564 100644 --- a/ConvosCore/Sources/ConvosCore/Backup/ConvosVaultArchiveImporter.swift +++ b/ConvosCore/Sources/ConvosCore/Backup/ConvosVaultArchiveImporter.swift @@ -3,17 +3,17 @@ import Foundation @preconcurrency import XMTPiOS public struct ConvosVaultArchiveImporter: VaultArchiveImporter { - private let vaultKeyStore: VaultKeyStore private let environment: AppEnvironment public init(vaultKeyStore: VaultKeyStore, environment: AppEnvironment) { - self.vaultKeyStore = vaultKeyStore self.environment = environment } - public func importVaultArchive(from path: URL, encryptionKey: Data) async throws -> [VaultKeyEntry] { - Log.info("[Restore] loading vault identity") - let vaultIdentity = try await vaultKeyStore.loadAny() + public func importVaultArchive( + from path: URL, + encryptionKey: Data, + vaultIdentity: KeychainIdentity + ) async throws -> [VaultKeyEntry] { let api = XMTPAPIOptionsBuilder.build(environment: environment) let codecs: [any ContentCodec] = [ @@ -26,28 +26,19 @@ public struct ConvosVaultArchiveImporter: VaultArchiveImporter { TextCodec(), ] - let existingOptions = ClientOptions( - api: api, - codecs: codecs, - dbEncryptionKey: vaultIdentity.keys.databaseKey, - deviceSyncEnabled: false - ) - - if let existingClient = try? await Client.build( - publicIdentity: vaultIdentity.keys.signingKey.identity, - options: existingOptions, - inboxId: vaultIdentity.inboxId - ) { - Log.info("[Restore] vault XMTP DB already exists, extracting keys from existing vault") - defer { try? existingClient.dropLocalDatabaseConnection() } - try await existingClient.conversations.sync() - return try await extractKeys(from: existingClient) - } - + // Always import the archive from the backup into an isolated temp + // directory. Reusing an existing vault XMTP DB on disk is wrong + // when the keychain holds multiple vault identities (e.g. after + // iCloud Keychain sync) — loadAny() might return the restoring + // device's vault instead of the backup device's, and the existing + // DB would contain a different set of key messages. let importDir = FileManager.default.temporaryDirectory .appendingPathComponent("xmtp-vault-import-\(UUID().uuidString)") try FileManager.default.createDirectory(at: importDir, withIntermediateDirectories: true) - Log.info("[Restore] no existing vault XMTP DB, importing archive into isolated directory") + defer { + try? FileManager.default.removeItem(at: importDir) + } + Log.info("[Restore] importing vault archive into isolated directory (inboxId=\(vaultIdentity.inboxId))") let importOptions = ClientOptions( api: api, @@ -61,25 +52,25 @@ public struct ConvosVaultArchiveImporter: VaultArchiveImporter { account: vaultIdentity.keys.signingKey, options: importOptions ) + defer { try? client.dropLocalDatabaseConnection() } - Log.info("[Restore] importing vault archive (inboxId: \(client.inboxID))") + Log.info("[Restore] importing vault archive (client inboxId=\(client.inboxID))") try await client.importArchive(path: path.path, encryptionKey: encryptionKey) Log.info("[Restore] vault archive import succeeded") - let entries = try await extractKeys(from: client) - try? client.dropLocalDatabaseConnection() - try? FileManager.default.removeItem(at: importDir) - return entries + return try await extractKeys(from: client) } private func extractKeys(from client: Client) async throws -> [VaultKeyEntry] { - try await client.conversations.sync() + // Read groups and messages from the local DB only — no network sync. + // The archive import already populated the local DB with everything + // we need, and syncing would fail if the vault group was deactivated + // on the network by a previous restore's revocation step. let groups = try client.conversations.listGroups() Log.info("[Restore] reading messages from \(groups.count) vault group(s)") var allMessages: [DecodedMessage] = [] for group in groups { - try await group.sync() let messages = try await group.messages() allMessages.append(contentsOf: messages) } @@ -95,7 +86,7 @@ public struct ConvosVaultArchiveImporter: VaultArchiveImporter { } let entries = VaultManager.extractKeyEntries(bundles: bundles, shares: shares) - Log.info("[Restore] extracted \(entries.count) key entries") + Log.info("[Restore] extracted \(entries.count) key entries from \(bundles.count) bundle(s) and \(shares.count) share(s)") return entries } } diff --git a/ConvosCore/Sources/ConvosCore/Backup/RestoreManager.swift b/ConvosCore/Sources/ConvosCore/Backup/RestoreManager.swift index 7f125d63d..93de5aae3 100644 --- a/ConvosCore/Sources/ConvosCore/Backup/RestoreManager.swift +++ b/ConvosCore/Sources/ConvosCore/Backup/RestoreManager.swift @@ -14,11 +14,14 @@ public enum RestoreState: Sendable, Equatable { } public protocol RestoreArchiveImporter: Sendable { - func importConversationArchive(inboxId: String, path: String, encryptionKey: Data) async throws + /// Import a conversation archive into a fresh XMTP client and return the + /// installation id that was registered for this inbox on the network. The + /// caller uses this id as the "keeper" when revoking older installations. + func importConversationArchive(inboxId: String, path: String, encryptionKey: Data) async throws -> String } public protocol VaultArchiveImporter: Sendable { - func importVaultArchive(from path: URL, encryptionKey: Data) async throws -> [VaultKeyEntry] + func importVaultArchive(from path: URL, encryptionKey: Data, vaultIdentity: KeychainIdentity) async throws -> [VaultKeyEntry] } public protocol RestoreLifecycleControlling: Sendable { @@ -26,6 +29,15 @@ public protocol RestoreLifecycleControlling: Sendable { func finishRestore() async } +/// Closure that revokes every installation for `inboxId` except `keepInstallationId`. +/// Return value is the number of installations revoked. Default production +/// implementation wraps `XMTPInstallationRevoker`; tests pass `nil` to skip. +public typealias RestoreInstallationRevoker = @Sendable ( + _ inboxId: String, + _ signingKey: any SigningKey, + _ keepInstallationId: String? +) async throws -> Int + public actor RestoreManager { private let vaultKeyStore: VaultKeyStore private let vaultArchiveImporter: any VaultArchiveImporter @@ -35,6 +47,7 @@ public actor RestoreManager { private let restoreLifecycleController: (any RestoreLifecycleControlling)? private let vaultManager: VaultManager? private let environment: AppEnvironment + private let installationRevoker: RestoreInstallationRevoker? public private(set) var state: RestoreState = .idle @@ -46,6 +59,7 @@ public actor RestoreManager { archiveImporter: any RestoreArchiveImporter, restoreLifecycleController: (any RestoreLifecycleControlling)? = nil, vaultManager: VaultManager? = nil, + installationRevoker: RestoreInstallationRevoker? = nil, environment: AppEnvironment ) { self.vaultKeyStore = vaultKeyStore @@ -58,6 +72,7 @@ public actor RestoreManager { self.archiveImporter = archiveImporter self.restoreLifecycleController = restoreLifecycleController self.vaultManager = vaultManager + self.installationRevoker = installationRevoker self.environment = environment } @@ -66,14 +81,22 @@ public actor RestoreManager { let stagingDir = try BackupBundle.createStagingDirectory() var preparedForRestore = false + // Rollback state: populated once destructive operations begin, cleared once + // the restore is committed (DB replaced + keys saved). If an error is thrown + // before commit, we use these to restore the pre-restore state of the device. + var xmtpStashDir: URL? + var preRestoreIdentities: [KeychainIdentity] = [] + var committed = false + do { Log.info("[Restore] reading bundle (\(bundleURL.lastPathComponent))") let bundleData = try Data(contentsOf: bundleURL) - let (encryptionKey, _) = try await decryptBundle( + let (encryptionKey, vaultIdentity) = try await decryptBundle( bundleData: bundleData, to: stagingDir ) + Log.info("[Restore] decrypted with vault identity inboxId=\(vaultIdentity.inboxId)") let metadata = try BackupBundleMetadata.read(from: stagingDir) Log.info("[Restore] backup v\(metadata.version) from \(metadata.deviceName) (\(metadata.createdAt))") @@ -86,24 +109,61 @@ public actor RestoreManager { } Log.info("[Restore] importing vault archive and extracting keys") - let keyEntries = try await importVaultArchive(encryptionKey: encryptionKey, in: stagingDir) + let keyEntries = try await importVaultArchive( + encryptionKey: encryptionKey, + vaultIdentity: vaultIdentity, + in: stagingDir + ) Log.info("[Restore] extracted \(keyEntries.count) key(s) from vault archive") - Log.info("[Restore] wiping local XMTP state for clean restore") - await wipeLocalXMTPState() - Log.info("[Restore] local XMTP state wiped") + if keyEntries.isEmpty, metadata.inboxCount > 0 { + Log.error("[Restore] backup contains \(metadata.inboxCount) conversation(s) but vault yielded 0 keys — aborting before destructive operations") + throw RestoreError.incompleteBackup(inboxCount: metadata.inboxCount) + } + + Log.info("[Restore] snapshotting existing keychain identities for rollback") + preRestoreIdentities = (try? await identityStore.loadAll()) ?? [] + Log.info("[Restore] snapshotted \(preRestoreIdentities.count) identity/identities") + + Log.info("[Restore] staging local XMTP files aside") + xmtpStashDir = try stageXMTPFiles() + Log.info("[Restore] XMTP files staged") + + Log.info("[Restore] clearing keychain identities") + do { + try await identityStore.deleteAll() + } catch { + Log.warning("[Restore] failed to clear conversation keychain identities: \(error)") + } Log.info("[Restore] saving keys to keychain") let failedKeyCount = await saveKeysToKeychain(entries: keyEntries) Log.info("[Restore] keys saved (\(failedKeyCount) failed)") + if !keyEntries.isEmpty, failedKeyCount == keyEntries.count { + // Every single key failed to save — keychain is empty and DB replace + // would leave the device unable to decrypt any restored conversation. + // Abort before touching the database. + throw RestoreError.keychainRestoreFailed + } Log.info("[Restore] replacing database") try replaceDatabase(from: stagingDir) Log.info("[Restore] database replaced") + // Commit point: DB + keychain are consistent with the restored state. + // The staged XMTP files are stale and can be discarded; past this point + // any failures are non-fatal and do not roll back. + committed = true + if let stash = xmtpStashDir { + deleteStagedXMTPFiles(at: stash) + xmtpStashDir = nil + } + Log.info("[Restore] importing conversation archives") - await importConversationArchives(in: stagingDir) - Log.info("[Restore] conversation archives imported") + let importedInboxes = await importConversationArchives(in: stagingDir) + Log.info("[Restore] conversation archives imported (\(importedInboxes.count) inbox(es))") + + await revokeStaleInstallationsForRestoredInboxes(importedInboxes) Log.info("[Restore] marking all conversations inactive") let localStateWriter = ConversationLocalStateWriter(databaseWriter: databaseManager.dbWriter) @@ -128,6 +188,13 @@ public actor RestoreManager { BackupBundle.cleanup(directory: stagingDir) } catch { + if !committed { + Log.warning("[Restore] rolling back keychain and XMTP state after failure: \(error)") + await rollbackKeychain(to: preRestoreIdentities) + if let stash = xmtpStashDir { + restoreStagedXMTPFiles(from: stash) + } + } if preparedForRestore { await restoreLifecycleController?.finishRestore() } @@ -137,6 +204,86 @@ public actor RestoreManager { } } + // MARK: - Staging / rollback + + private func stageXMTPFiles() throws -> URL { + let fileManager = FileManager.default + let stashDir = fileManager.temporaryDirectory + .appendingPathComponent("xmtp-restore-stash-\(UUID().uuidString)") + try fileManager.createDirectory(at: stashDir, withIntermediateDirectories: true) + + let sourceDir = environment.defaultDatabasesDirectoryURL + guard let files = try? fileManager.contentsOfDirectory( + at: sourceDir, + includingPropertiesForKeys: nil, + options: [.skipsHiddenFiles] + ) else { + return stashDir + } + + var moved = 0 + for file in files where file.lastPathComponent.hasPrefix("xmtp-") && + !file.lastPathComponent.hasPrefix("xmtp-restore-stash-") { + let destination = stashDir.appendingPathComponent(file.lastPathComponent) + do { + try fileManager.moveItem(at: file, to: destination) + moved += 1 + } catch { + Log.warning("[Restore] failed to stage XMTP file \(file.lastPathComponent): \(error)") + } + } + Log.info("[Restore] staged \(moved) XMTP file(s) to \(stashDir.lastPathComponent)") + return stashDir + } + + private func restoreStagedXMTPFiles(from stashDir: URL) { + let fileManager = FileManager.default + let destinationDir = environment.defaultDatabasesDirectoryURL + + guard let files = try? fileManager.contentsOfDirectory( + at: stashDir, + includingPropertiesForKeys: nil + ) else { + try? fileManager.removeItem(at: stashDir) + return + } + + for file in files { + let destination = destinationDir.appendingPathComponent(file.lastPathComponent) + try? fileManager.removeItem(at: destination) + do { + try fileManager.moveItem(at: file, to: destination) + } catch { + Log.warning("[Restore] failed to restore staged XMTP file \(file.lastPathComponent): \(error)") + } + } + try? fileManager.removeItem(at: stashDir) + Log.info("[Restore] restored staged XMTP files") + } + + private func deleteStagedXMTPFiles(at stashDir: URL) { + try? FileManager.default.removeItem(at: stashDir) + } + + private func rollbackKeychain(to snapshot: [KeychainIdentity]) async { + do { + try await identityStore.deleteAll() + } catch { + Log.warning("[Restore] rollback: failed to clear keychain before restoring snapshot: \(error)") + } + for identity in snapshot { + do { + _ = try await identityStore.save( + inboxId: identity.inboxId, + clientId: identity.clientId, + keys: identity.keys + ) + } catch { + Log.warning("[Restore] rollback: failed to restore identity \(identity.inboxId): \(error)") + } + } + } + // MARK: - Vault re-creation private func reCreateVault() async { @@ -179,41 +326,6 @@ public actor RestoreManager { Log.info("[Restore.reCreateVault] === DONE ===") } - // MARK: - Wipe - - private func wipeLocalXMTPState() async { - do { - try await identityStore.deleteAll() - Log.info("[Restore] cleared conversation keychain identities") - } catch { - Log.warning("[Restore] failed to clear conversation keychain identities (non-fatal): \(error)") - } - - // Only wipe conversation XMTP databases (AppGroup container). - // The vault XMTP database (Documents) is preserved — it was already - // used to extract keys and will be reconnected after restore. - deleteXMTPFiles(in: environment.defaultDatabasesDirectoryURL) - } - - private func deleteXMTPFiles(in directory: URL) { - let fileManager = FileManager.default - guard let files = try? fileManager.contentsOfDirectory( - at: directory, - includingPropertiesForKeys: nil, - options: [.skipsHiddenFiles] - ) else { return } - - var count = 0 - for file in files where file.lastPathComponent.hasPrefix("xmtp-") { - if (try? fileManager.removeItem(at: file)) != nil { - count += 1 - } - } - if count > 0 { - Log.info("[Restore] deleted \(count) XMTP file(s) from \(directory.lastPathComponent)") - } - } - // MARK: - Bundle decryption private func decryptBundle( @@ -251,7 +363,11 @@ public actor RestoreManager { // MARK: - Vault archive import - private func importVaultArchive(encryptionKey: Data, in directory: URL) async throws -> [VaultKeyEntry] { + private func importVaultArchive( + encryptionKey: Data, + vaultIdentity: KeychainIdentity, + in directory: URL + ) async throws -> [VaultKeyEntry] { state = .importingVault let vaultArchivePath = BackupBundle.vaultArchivePath(in: directory) @@ -263,7 +379,11 @@ public actor RestoreManager { throw RestoreError.missingVaultArchive } - return try await vaultArchiveImporter.importVaultArchive(from: vaultArchivePath, encryptionKey: encryptionKey) + return try await vaultArchiveImporter.importVaultArchive( + from: vaultArchivePath, + encryptionKey: encryptionKey, + vaultIdentity: vaultIdentity + ) } // MARK: - Key restoration @@ -308,7 +428,11 @@ public actor RestoreManager { // MARK: - Conversation archive import - private func importConversationArchives(in directory: URL) async { + /// Returns the set of `(inboxId, newInstallationId)` pairs for every + /// conversation archive that was successfully imported. The installation id + /// is the one registered on the XMTP network during archive import — it is + /// the "keeper" for post-restore revocation of stale installations. + private func importConversationArchives(in directory: URL) async -> [(inboxId: String, newInstallationId: String)] { let conversationsDir = directory .appendingPathComponent("conversations", isDirectory: true) @@ -318,11 +442,12 @@ public actor RestoreManager { includingPropertiesForKeys: nil ) else { Log.info("No conversation archives to import") - return + return [] } let archiveFiles = contents.filter { $0.pathExtension == "encrypted" } var completed = 0 + var imported: [(inboxId: String, newInstallationId: String)] = [] for archiveFile in archiveFiles { let inboxId = archiveFile.deletingPathExtension().lastPathComponent @@ -338,17 +463,55 @@ public actor RestoreManager { } do { - try await archiveImporter.importConversationArchive( + let newInstallationId = try await archiveImporter.importConversationArchive( inboxId: inboxId, path: archiveFile.path, encryptionKey: identity.keys.databaseKey ) + imported.append((inboxId: inboxId, newInstallationId: newInstallationId)) } catch { Log.warning("Failed to import conversation archive \(inboxId): \(error)") } completed += 1 } state = .importingConversations(completed: completed, total: archiveFiles.count) + return imported + } + + /// After a successful archive import on device B, every inbox has a brand + /// new installation on the network (the one we just created) alongside the + /// original installations from device A. Revoke every installation *except* + /// the one we just created so that device A flips to `stale` on its next + /// foreground cycle and stops diverging from the restored state. + private func revokeStaleInstallationsForRestoredInboxes( + _ imported: [(inboxId: String, newInstallationId: String)] + ) async { + guard let installationRevoker else { + Log.info("[Restore] installationRevoker not configured, skipping post-import revocation") + return + } + guard !imported.isEmpty else { return } + + Log.info("[Restore] revoking stale installations for \(imported.count) restored inbox(es)") + for entry in imported { + let identity: KeychainIdentity + do { + identity = try await identityStore.identity(for: entry.inboxId) + } catch { + Log.warning("[Restore] cannot load identity for \(entry.inboxId), skipping revocation: \(error)") + continue + } + do { + let revoked = try await installationRevoker( + entry.inboxId, + identity.keys.signingKey, + entry.newInstallationId + ) + Log.info("[Restore] revoked \(revoked) stale installation(s) for \(entry.inboxId)") + } catch { + Log.warning("[Restore] revocation failed for \(entry.inboxId) (non-fatal): \(error)") + } + } } // MARK: - Helpers @@ -410,6 +573,8 @@ public actor RestoreManager { case decryptionFailed case missingVaultArchive case missingDatabase + case keychainRestoreFailed + case incompleteBackup(inboxCount: Int) var errorDescription: String? { switch self { @@ -421,6 +586,10 @@ public actor RestoreManager { return "Backup bundle does not contain a vault archive" case .missingDatabase: return "Backup bundle does not contain a database" + case .keychainRestoreFailed: + return "Failed to save any restored keys to the keychain" + case .incompleteBackup(let inboxCount): + return "Backup contains \(inboxCount) conversation(s) but the vault archive yielded no decryption keys. The backup may have been created before keys were broadcast to the vault." } } } diff --git a/ConvosCore/Sources/ConvosCore/Syncing/StreamProcessor.swift b/ConvosCore/Sources/ConvosCore/Syncing/StreamProcessor.swift index 106e0e911..2c86b42c7 100644 --- a/ConvosCore/Sources/ConvosCore/Syncing/StreamProcessor.swift +++ b/ConvosCore/Sources/ConvosCore/Syncing/StreamProcessor.swift @@ -27,9 +27,19 @@ protocol StreamProcessorProtocol: Actor { ) async func setInviteJoinErrorHandler(_ handler: (any InviteJoinErrorHandler)?) + + /// Reactivate any conversations that are present in the XMTP client's + /// group list but still marked inactive in the local DB. Called after the + /// initial sync completes to handle post-restore conversations that would + /// otherwise stay "Awaiting reconnection" until a new message arrives. + func reactivateRestoredConversations(knownGroupIds: Set) async } extension StreamProcessorProtocol { + func reactivateRestoredConversations(knownGroupIds: Set) async { + // Default no-op for conformers that don't handle post-restore reactivation. + } + func processConversation( _ conversation: XMTPiOS.Group, params: SyncClientParams @@ -303,6 +313,27 @@ actor StreamProcessor: StreamProcessorProtocol { // MARK: - Reactivation + func reactivateRestoredConversations(knownGroupIds: Set) async { + guard !knownGroupIds.isEmpty else { return } + do { + let inactiveIds: [String] = try await databaseReader.read { db in + try String.fetchAll(db, sql: """ + SELECT conversationId FROM conversationLocalState WHERE isActive = 0 + """) + } + let toReactivate = inactiveIds.filter { knownGroupIds.contains($0) } + guard !toReactivate.isEmpty else { return } + + for conversationId in toReactivate { + try await markRecentUpdatesAsReconnection(conversationId: conversationId) + try await localStateWriter.setActive(true, for: conversationId) + } + Log.info("Reactivated \(toReactivate.count) restored conversation(s) confirmed by XMTP sync") + } catch { + Log.warning("reactivateRestoredConversations failed: \(error)") + } + } + private func reactivateIfNeeded(conversationId: String) async { do { let isInactive = try await databaseReader.read { db in diff --git a/ConvosCore/Sources/ConvosCore/Syncing/SyncingManager.swift b/ConvosCore/Sources/ConvosCore/Syncing/SyncingManager.swift index 4aa80c9ef..bdf51a5db 100644 --- a/ConvosCore/Sources/ConvosCore/Syncing/SyncingManager.swift +++ b/ConvosCore/Sources/ConvosCore/Syncing/SyncingManager.swift @@ -460,6 +460,16 @@ actor SyncingManager: SyncingManagerProtocol { if discoveredCount > 0 { Log.info("Discovered \(discoveredCount) new conversations after sync") } + + // After restore, conversations are marked inactive ("Awaiting + // reconnection") and only reactivate on message receipt. But + // quiet conversations would stay inactive indefinitely because + // discoverNewConversations skips groups already in the DB. + // Pass the full set of XMTP-confirmed group IDs so the stream + // processor can flip any inactive conversations whose groups + // were successfully synced. + let allGroupIds = Set(groups.map(\.id)) + await streamProcessor.reactivateRestoredConversations(knownGroupIds: allGroupIds) } catch { Log.error("Failed to discover new conversations: \(error)") } diff --git a/ConvosCore/Sources/ConvosCore/Vault/VaultManager.swift b/ConvosCore/Sources/ConvosCore/Vault/VaultManager.swift index 1306bd901..fa965cfae 100644 --- a/ConvosCore/Sources/ConvosCore/Vault/VaultManager.swift +++ b/ConvosCore/Sources/ConvosCore/Vault/VaultManager.swift @@ -301,12 +301,24 @@ public actor VaultManager { Log.warning("[Vault.reCreate] failed to count vault inbox rows: \(error)") } - Log.info("[Vault.reCreate] step 1/5: revoking other installations on old vault") + Log.info("[Vault.reCreate] step 1/5: revoking all installations on old vault") if let oldInboxId, let vaultKeyStore { do { let identity = try await vaultKeyStore.load(inboxId: oldInboxId) - try await revokeAllOtherInstallations(signingKey: identity.keys.signingKey) - Log.info("[Vault.reCreate] step 1/5: revoked other installations on old vault inboxId=\(oldInboxId)") + // Use the stateless network-only revoker. The local vault client + // has been paused by `prepareForRestore` so its connection pool + // is disconnected; using `revokeAllOtherInstallations(signingKey:)` + // here would hit `Client error: Pool needs to reconnect before use`. + // We want to revoke every installation on the old inbox (including + // the current one, since reCreate is about abandoning the whole + // vault identity), so `keepInstallationId` is nil. + let count = try await XMTPInstallationRevoker.revokeOtherInstallations( + inboxId: oldInboxId, + signingKey: identity.keys.signingKey, + keepInstallationId: nil, + environment: environment + ) + Log.info("[Vault.reCreate] step 1/5: revoked \(count) installation(s) on old vault inboxId=\(oldInboxId)") } catch { Log.warning("[Vault.reCreate] step 1/5: revocation failed (non-fatal): \(error)") } diff --git a/ConvosCore/Tests/ConvosCoreTests/RestoreManagerTests.swift b/ConvosCore/Tests/ConvosCoreTests/RestoreManagerTests.swift index 2bbe90330..3c7a27be4 100644 --- a/ConvosCore/Tests/ConvosCoreTests/RestoreManagerTests.swift +++ b/ConvosCore/Tests/ConvosCoreTests/RestoreManagerTests.swift @@ -10,11 +10,12 @@ final class MockRestoreArchiveImporter: RestoreArchiveImporter, @unchecked Senda var importedArchives: [(inboxId: String, path: String)] = [] var failingInboxIds: Set = [] - func importConversationArchive(inboxId: String, path: String, encryptionKey: Data) async throws { + func importConversationArchive(inboxId: String, path: String, encryptionKey: Data) async throws -> String { if failingInboxIds.contains(inboxId) { throw NSError(domain: "test", code: 1, userInfo: [NSLocalizedDescriptionKey: "simulated import failure"]) } importedArchives.append((inboxId: inboxId, path: path)) + return "mock-installation-\(inboxId)" } } @@ -23,7 +24,7 @@ final class MockRestoreArchiveImporter: RestoreArchiveImporter, @unchecked Senda final class MockVaultArchiveImporter: VaultArchiveImporter, @unchecked Sendable { var keyEntriesToReturn: [VaultKeyEntry] = [] - func importVaultArchive(from path: URL, encryptionKey: Data) async throws -> [VaultKeyEntry] { + func importVaultArchive(from path: URL, encryptionKey: Data, vaultIdentity: KeychainIdentity) async throws -> [VaultKeyEntry] { keyEntriesToReturn } } diff --git a/docs/plans/backup-restore-followups.md b/docs/plans/backup-restore-followups.md new file mode 100644 index 000000000..146ff25af --- /dev/null +++ b/docs/plans/backup-restore-followups.md @@ -0,0 +1,82 @@ +# Backup & Restore Follow-ups + +Deferred work items surfaced during PR #603 (vault-backup-bundle) Macroscope review and Jarod's Loom feedback. Items here are **not** blocking #603 merge — they either belong upstack on #618 (vault-restore-flow) or are enhancements for a later PR. + +## Status legend +- **Upstack (#618)**: fix lives on `vault-restore-flow`, not `vault-backup-bundle` +- **Backlog**: enhancement, no PR assigned yet + +--- + +## 1. `ConversationsViewModel.leave()` fights `recomputeVisibleConversations()` — Upstack (#618) + +**Source:** Macroscope 🟠 High, `Convos/Conversations List/ConversationsViewModel.swift:413` + +`leave(conversation:)` mutates `conversations` directly. The `recomputeVisibleConversations()` flow (added in #618 for stale-inbox recovery) rebuilds `conversations` from `unfilteredConversations` on the next tick, so the leave is visually undone until the DB delete propagates. + +**Fix:** adopt the same `hiddenConversationIds` pattern `explodeConversation()` uses. Add the id to `hiddenConversationIds`, call `recomputeVisibleConversations()`, then clear the hidden id once the DB delete observation lands. + +**Why deferred:** `recomputeVisibleConversations` and `hiddenConversationIds` were introduced on #618. Fix belongs on that branch. + +--- + +## 2. `staleInboxIdsPublisher()` missing vault filter — Upstack (#618) + +**Source:** Macroscope 🟠 High, `ConvosCore/Sources/ConvosCore/Storage/Repositories/InboxesRepository.swift:96` + +Sibling publishers (`staleDeviceStatePublisher`, `anyInboxStalePublisher`) filter `isVault == false`. `staleInboxIdsPublisher` does not. A stale vault inbox would have its `inboxId` added to the hidden set and hide its conversations in the list, contradicting the rest of the stale-device logic which intentionally excludes vault inboxes. + +**Fix:** add `.filter(DBInbox.Columns.isVault == false)` to the query, matching the other two publishers. + +**Why deferred:** publisher and its call site were added on #618. + +--- + +## 3. Disk-space preflight for backup + UI surfacing — Backlog + +**Source:** Jarod Q2. + +Backup creates a staging directory with a full GRDB copy + per-inbox XMTP archives + the encrypted bundle. Peak usage is ~2–3x final bundle size. Today backups are tiny (~381KB for 7 convos, no images), so this is not urgent — but a user with a much larger DB on a near-full device could hit a mid-backup failure with no warning. + +**Proposed work:** +- Add a preflight check in `BackupManager.createBackup()` that reads free space on the staging volume and bails with a dedicated `BackupError.insufficientDiskSpace(required:available:)` before any work begins. +- Pick a conservative multiplier (e.g., estimate `3 * dbFileSize + fixedOverhead`). +- Surface available storage in `BackupDebugView` status rows (next to "Last backup"). +- Later: user-facing error copy when surfacing backups in settings. + +**Why deferred:** enhancement, current backup sizes make it a non-issue. Not a correctness bug. + +--- + +## 4. Backup size / compression evaluation — Backlog + +**Source:** Jarod Q4. + +Current observations: +- ~381KB for 7 conversations, text-only. +- Bundle is raw AES-GCM ciphertext, no compression. +- Media is **not** included — encrypted image refs point to external URLs with a 30-day validity. + +**Proposed work:** +- Add instrumentation (QA event or debug log) recording bundle size, DB size, and archive count per backup run. +- Collect data from dogfooding across a range of account sizes. +- Revisit compression (`Compression` framework, zlib on the tar stream before AES) if bundles exceed ~5–10MB regularly. Compression before encryption is safe here because bundles are not transmitted over an attacker-observable channel where length-based side channels matter. +- Decide whether media inclusion is in scope for a future backup version. + +**Why deferred:** current sizes don't justify the complexity. Need data first. + +--- + +## Out of scope / already handled + +- **Vault re-creation on restore** — handled on #618 via `VaultManager.reCreate` + `RestoreManager.reCreateVault`. The single-device "vault is inactive after restore" problem Louis raised is already addressed there. +- **Missing vault archive = silent data loss** — handled on #618 (`RestoreManager` throws `missingVaultArchive`). + +## Being fixed on #603 (not deferred) + +Listed here for cross-reference; these land on `vault-backup-bundle` itself: + +1. `RestoreManager` destructive-ops ordering — stage XMTP files + keychain aside, replace DB, import archives, only then delete the staged state. Covers Macroscope 🟡 `RestoreManager.swift:92` and Jarod Q1. +2. `ConvosVaultArchiveImporter.swift:47` — add `defer` cleanup on the import path. +3. `BackupManager` — fail the backup if `broadcastKeysToVault` fails (Jarod Q3, fail-loud). +4. `BackupDebugView.swift:107` — align `runAction` title with button label so the spinner renders.