diff --git a/web/redux/persist-constants.js b/web/redux/persist-constants.js index 1c24fc4f1..cc3f20389 100644 --- a/web/redux/persist-constants.js +++ b/web/redux/persist-constants.js @@ -1,7 +1,8 @@ // @flow const rootKey = 'root'; const rootKeyPrefix = 'persist:'; const completeRootKey = `${rootKeyPrefix}${rootKey}`; +const storeVersion = 75; -export { rootKey, rootKeyPrefix, completeRootKey }; +export { rootKey, rootKeyPrefix, completeRootKey, storeVersion }; diff --git a/web/redux/persist.js b/web/redux/persist.js index f19fc2f7e..2a2fb12b1 100644 --- a/web/redux/persist.js +++ b/web/redux/persist.js @@ -1,537 +1,537 @@ // @flow import invariant from 'invariant'; import { getStoredState, purgeStoredState } from 'redux-persist'; import storage from 'redux-persist/es/storage/index.js'; import type { PersistConfig } from 'redux-persist/src/types.js'; import { type ClientDBKeyserverStoreOperation, keyserverStoreOpsHandlers, type ReplaceKeyserverOperation, } from 'lib/ops/keyserver-store-ops.js'; import type { ClientDBMessageStoreOperation } from 'lib/ops/message-store-ops.js'; import type { ClientDBThreadStoreOperation } from 'lib/ops/thread-store-ops.js'; import { patchRawThreadInfoWithSpecialRole } from 'lib/permissions/special-roles.js'; import { createAsyncMigrate, type StorageMigrationFunction, } from 'lib/shared/create-async-migrate.js'; import { keyserverStoreTransform } from 'lib/shared/transforms/keyserver-store-transform.js'; import { messageStoreMessagesBlocklistTransform } from 'lib/shared/transforms/message-store-transform.js'; import { defaultAlertInfos } from 'lib/types/alert-types.js'; import { defaultCalendarQuery } from 'lib/types/entry-types.js'; import type { KeyserverInfo } from 'lib/types/keyserver-types.js'; import { messageTypes } from 'lib/types/message-types-enum.js'; import type { ClientDBMessageInfo } from 'lib/types/message-types.js'; import type { WebNavInfo } from 'lib/types/nav-types.js'; import { cookieTypes } from 'lib/types/session-types.js'; import { defaultConnectionInfo } from 'lib/types/socket-types.js'; import { defaultGlobalThemeInfo } from 'lib/types/theme-types.js'; import type { ClientDBThreadInfo } from 'lib/types/thread-types.js'; import { getConfig } from 'lib/utils/config.js'; import { parseCookies } from 'lib/utils/cookie-utils.js'; import { isDev } from 'lib/utils/dev-utils.js'; import { generateIDSchemaMigrationOpsForDrafts, convertDraftStoreToNewIDSchema, } from 'lib/utils/migration-utils.js'; import { entries } from 'lib/utils/objects.js'; import { convertClientDBThreadInfoToRawThreadInfo, convertRawThreadInfoToClientDBThreadInfo, } from 'lib/utils/thread-ops-utils.js'; import commReduxStorageEngine from './comm-redux-storage-engine.js'; import { handleReduxMigrationFailure, persistWhitelist, } from './handle-redux-migration-failure.js'; -import { rootKey, rootKeyPrefix } from './persist-constants.js'; +import { rootKey, rootKeyPrefix, storeVersion } from './persist-constants.js'; import type { AppState } from './redux-setup.js'; import { unshimClientDB } from './unshim-utils.js'; import { authoritativeKeyserverID } from '../authoritative-keyserver.js'; import { getCommSharedWorker } from '../shared-worker/shared-worker-provider.js'; import { getOlmWasmPath } from '../shared-worker/utils/constants.js'; import { isSQLiteSupported } from '../shared-worker/utils/db-utils.js'; import { workerRequestMessageTypes } from '../types/worker-types.js'; declare var keyserverURL: string; const legacyMigrations = { [1]: async (state: any) => { const { primaryIdentityPublicKey, ...stateWithoutPrimaryIdentityPublicKey } = state; return { ...stateWithoutPrimaryIdentityPublicKey, cryptoStore: { primaryAccount: null, primaryIdentityKeys: null, notificationAccount: null, notificationIdentityKeys: null, }, }; }, [2]: async (state: AppState) => { return state; }, [3]: async (state: AppState) => { let newState = state; if (state.draftStore) { newState = { ...newState, draftStore: convertDraftStoreToNewIDSchema(state.draftStore), }; } const sharedWorker = await getCommSharedWorker(); const isSupported = await sharedWorker.isSupported(); if (!isSupported) { return newState; } const stores = await sharedWorker.schedule({ type: workerRequestMessageTypes.GET_CLIENT_STORE, }); invariant(stores?.store, 'Stores should exist'); await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { draftStoreOperations: generateIDSchemaMigrationOpsForDrafts( stores.store.drafts, ), }, }); return newState; }, [4]: async (state: any) => { const { lastCommunicatedPlatformDetails, keyserverStore, ...rest } = state; return { ...rest, keyserverStore: { ...keyserverStore, keyserverInfos: { ...keyserverStore.keyserverInfos, [authoritativeKeyserverID]: { ...keyserverStore.keyserverInfos[authoritativeKeyserverID], lastCommunicatedPlatformDetails, }, }, }, }; }, [5]: async (state: any) => { const sharedWorker = await getCommSharedWorker(); const isSupported = await sharedWorker.isSupported(); if (!isSupported) { return state; } if (!state.draftStore) { return state; } const { drafts } = state.draftStore; const draftStoreOperations = []; for (const key in drafts) { const text = drafts[key]; draftStoreOperations.push({ type: 'update', payload: { key, text }, }); } await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { draftStoreOperations }, }); return state; }, [6]: async (state: AppState) => ({ ...state, integrityStore: { threadHashes: {}, threadHashingStatus: 'starting' }, }), [7]: async (state: AppState): Promise => { if (!document.cookie) { return state; } const params = parseCookies(document.cookie); let cookie = null; if (params[cookieTypes.USER]) { cookie = `${cookieTypes.USER}=${params[cookieTypes.USER]}`; } else if (params[cookieTypes.ANONYMOUS]) { cookie = `${cookieTypes.ANONYMOUS}=${params[cookieTypes.ANONYMOUS]}`; } return { ...state, keyserverStore: { ...state.keyserverStore, keyserverInfos: { ...state.keyserverStore.keyserverInfos, [authoritativeKeyserverID]: { ...state.keyserverStore.keyserverInfos[authoritativeKeyserverID], cookie, }, }, }, }; }, [8]: async (state: AppState) => ({ ...state, globalThemeInfo: defaultGlobalThemeInfo, }), [9]: async (state: AppState) => ({ ...state, keyserverStore: { ...state.keyserverStore, keyserverInfos: { ...state.keyserverStore.keyserverInfos, [authoritativeKeyserverID]: { ...state.keyserverStore.keyserverInfos[authoritativeKeyserverID], urlPrefix: keyserverURL, }, }, }, }), [10]: async (state: AppState) => { const { keyserverInfos } = state.keyserverStore; const newKeyserverInfos: { [string]: KeyserverInfo } = {}; for (const key in keyserverInfos) { newKeyserverInfos[key] = { ...keyserverInfos[key], connection: { ...defaultConnectionInfo }, updatesCurrentAsOf: 0, sessionID: null, }; } return { ...state, keyserverStore: { ...state.keyserverStore, keyserverInfos: newKeyserverInfos, }, }; }, [11]: async (state: AppState) => { const sharedWorker = await getCommSharedWorker(); const isSupported = await sharedWorker.isSupported(); if (!isSupported) { return state; } const replaceOps: $ReadOnlyArray = entries( state.keyserverStore.keyserverInfos, ).map(([id, keyserverInfo]) => ({ type: 'replace_keyserver', payload: { id, keyserverInfo, }, })); const keyserverStoreOperations: $ReadOnlyArray = keyserverStoreOpsHandlers.convertOpsToClientDBOps([ { type: 'remove_all_keyservers' }, ...replaceOps, ]); try { await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { keyserverStoreOperations }, }); return state; } catch (e) { console.log(e); return handleReduxMigrationFailure(state); } }, [12]: async (state: AppState) => { const sharedWorker = await getCommSharedWorker(); const isSupported = await sharedWorker.isSupported(); if (!isSupported) { return state; } const replaceOps: $ReadOnlyArray = entries( state.keyserverStore.keyserverInfos, ) .filter(([, keyserverInfo]) => !keyserverInfo.actualizedCalendarQuery) .map(([id, keyserverInfo]) => ({ type: 'replace_keyserver', payload: { id, keyserverInfo: { ...keyserverInfo, actualizedCalendarQuery: defaultCalendarQuery( getConfig().platformDetails.platform, ), }, }, })); if (replaceOps.length === 0) { return state; } const newState = { ...state, keyserverStore: keyserverStoreOpsHandlers.processStoreOperations( state.keyserverStore, replaceOps, ), }; const keyserverStoreOperations = keyserverStoreOpsHandlers.convertOpsToClientDBOps(replaceOps); try { await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { keyserverStoreOperations }, }); return newState; } catch (e) { console.log(e); return handleReduxMigrationFailure(newState); } }, [13]: async (state: any) => { const { cryptoStore, ...rest } = state; const sharedWorker = await getCommSharedWorker(); await sharedWorker.schedule({ type: workerRequestMessageTypes.INITIALIZE_CRYPTO_ACCOUNT, olmWasmPath: getOlmWasmPath(), initialCryptoStore: cryptoStore, }); return rest; }, [14]: async (state: AppState) => { const sharedWorker = await getCommSharedWorker(); const isSupported = await sharedWorker.isSupported(); if (!isSupported) { return state; } const stores = await sharedWorker.schedule({ type: workerRequestMessageTypes.GET_CLIENT_STORE, }); const keyserversDBInfo = stores?.store?.keyservers; if (!keyserversDBInfo) { return state; } const { translateClientDBData } = keyserverStoreOpsHandlers; const keyservers = translateClientDBData(keyserversDBInfo); // There is no modification of the keyserver data, but the ops handling // should correctly split the data between synced and non-synced tables const replaceOps: $ReadOnlyArray = entries( keyservers, ).map(([id, keyserverInfo]) => ({ type: 'replace_keyserver', payload: { id, keyserverInfo, }, })); const keyserverStoreOperations: $ReadOnlyArray = keyserverStoreOpsHandlers.convertOpsToClientDBOps([ { type: 'remove_all_keyservers' }, ...replaceOps, ]); try { await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { keyserverStoreOperations }, }); return state; } catch (e) { console.log(e); return handleReduxMigrationFailure(state); } }, [15]: (state: any) => { const { notifPermissionAlertInfo, ...rest } = state; const newState = { ...rest, alertStore: { alertInfos: defaultAlertInfos, }, }; return newState; }, [16]: async (state: AppState) => { // 1. Check if `databaseModule` is supported and early-exit if not. const sharedWorker = await getCommSharedWorker(); const isDatabaseSupported = await sharedWorker.isSupported(); if (!isDatabaseSupported) { return state; } // 2. Get existing `stores` from SQLite. const stores = await sharedWorker.schedule({ type: workerRequestMessageTypes.GET_CLIENT_STORE, }); const messages: ?$ReadOnlyArray = stores?.store?.messages; if (messages === null || messages === undefined || messages.length === 0) { return state; } // 3. Filter out `UNSUPPORTED.UPDATE_RELATIONSHIP` `ClientDBMessageInfo`s. const unsupportedMessageIDsToRemove = messages .filter((message: ClientDBMessageInfo) => { if (parseInt(message.type) !== messageTypes.UPDATE_RELATIONSHIP) { return false; } if (message.content === null || message.content === undefined) { return false; } const { operation } = JSON.parse(message.content); return operation === 'farcaster_mutual'; }) .map(message => message.id); // 4. Construct `ClientDBMessageStoreOperation`s const messageStoreOperations: $ReadOnlyArray = [ { type: 'remove', payload: { ids: unsupportedMessageIDsToRemove }, }, ]; // 5. Process the constructed `messageStoreOperations`. await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { messageStoreOperations }, }); return state; }, [17]: async (state: AppState) => { // 1. Check if `databaseModule` is supported and early-exit if not. const sharedWorker = await getCommSharedWorker(); const isDatabaseSupported = await sharedWorker.isSupported(); if (!isDatabaseSupported) { return state; } // 2. Get existing `stores` from SQLite. const stores = await sharedWorker.schedule({ type: workerRequestMessageTypes.GET_CLIENT_STORE, }); const threads: ?$ReadOnlyArray = stores?.store?.threads; if (threads === null || threads === undefined || threads.length === 0) { return state; } // 3. Convert to `RawThreadInfo`, patch in `specialRole`, and convert back. const patchedClientDBThreadInfos: $ReadOnlyArray = threads .map(convertClientDBThreadInfoToRawThreadInfo) .map(patchRawThreadInfoWithSpecialRole) .map(convertRawThreadInfoToClientDBThreadInfo); // 4. Construct operations to remove existing threads and replace them // with threads that have the `specialRole` field patched in. const threadStoreOperations: ClientDBThreadStoreOperation[] = []; threadStoreOperations.push({ type: 'remove_all' }); for (const clientDBThreadInfo: ClientDBThreadInfo of patchedClientDBThreadInfos) { threadStoreOperations.push({ type: 'replace', payload: clientDBThreadInfo, }); } // 5. Process the constructed `threadStoreOperations`. await sharedWorker.schedule({ type: workerRequestMessageTypes.PROCESS_STORE_OPERATIONS, storeOperations: { threadStoreOperations }, }); return state; }, [18]: (state: AppState) => unshimClientDB(state, [messageTypes.UPDATE_RELATIONSHIP]), }; const migrateStorageToSQLite: StorageMigrationFunction< WebNavInfo, AppState, > = async debug => { const sharedWorker = await getCommSharedWorker(); const isSupported = await sharedWorker.isSupported(); if (!isSupported) { return undefined; } const oldStorage = await getStoredState({ storage, key: rootKey }); if (!oldStorage) { return undefined; } purgeStoredState({ storage, key: rootKey }); if (debug) { console.log('redux-persist: migrating state to SQLite storage'); } const allKeys = Object.keys(oldStorage); const transforms = persistConfig.transforms ?? []; const newStorage = { ...oldStorage }; for (const transform of transforms) { for (const key of allKeys) { const transformedStore = transform.out(newStorage[key], key, newStorage); newStorage[key] = transformedStore; } } return newStorage; }; const migrations = { // This migration doesn't change the store but sets a persisted version // in the DB [75]: (state: AppState) => ({ state, ops: [], }), }; const persistConfig: PersistConfig = { keyPrefix: rootKeyPrefix, key: rootKey, storage: commReduxStorageEngine, whitelist: isSQLiteSupported() ? persistWhitelist : [...persistWhitelist, 'draftStore'], migrate: (createAsyncMigrate( legacyMigrations, { debug: isDev }, migrations, (error: Error, state: AppState) => handleReduxMigrationFailure(state), migrateStorageToSQLite, ): any), - version: 75, + version: storeVersion, transforms: [messageStoreMessagesBlocklistTransform, keyserverStoreTransform], }; export { persistConfig }; diff --git a/web/shared-worker/types/sqlite-query-executor.js b/web/shared-worker/types/sqlite-query-executor.js index 6af7061b7..c0db0da71 100644 --- a/web/shared-worker/types/sqlite-query-executor.js +++ b/web/shared-worker/types/sqlite-query-executor.js @@ -1,189 +1,190 @@ // @flow import type { ClientDBAuxUserInfo } from 'lib/ops/aux-user-store-ops.js'; import type { ClientDBCommunityInfo } from 'lib/ops/community-store-ops.js'; import type { ClientDBEntryInfo } from 'lib/ops/entries-store-ops.js'; import type { ClientDBIntegrityThreadHash } from 'lib/ops/integrity-store-ops.js'; import type { ClientDBKeyserverInfo } from 'lib/ops/keyserver-store-ops.js'; import type { ClientDBReport } from 'lib/ops/report-store-ops.js'; import type { ClientDBSyncedMetadataEntry } from 'lib/ops/synced-metadata-store-ops.js'; import type { ClientDBThreadActivityEntry } from 'lib/ops/thread-activity-store-ops.js'; import type { ClientDBUserInfo } from 'lib/ops/user-store-ops.js'; import type { ClientDBDraftInfo } from 'lib/types/draft-types.js'; import type { OutboundP2PMessage, InboundP2PMessage, } from 'lib/types/sqlite-types.js'; import { type NullableInt, type NullableString, type WebClientDBThreadInfo, } from './entities.js'; export type WebMessage = { +id: string, +localID: NullableString, +thread: string, +user: string, +type: number, +futureType: NullableInt, +content: NullableString, +time: string, }; export type Media = { +id: string, +container: string, +thread: string, +uri: string, +type: 'photo' | 'video', +extras: string, }; export type OlmPersistSession = { +targetDeviceID: string, +sessionData: string, +version: number, }; declare export class SQLiteQueryExecutor { constructor(sqliteFilePath: string): void; updateDraft(key: string, text: string): void; moveDraft(oldKey: string, newKey: string): boolean; getAllDrafts(): ClientDBDraftInfo[]; removeAllDrafts(): void; removeDrafts(ids: $ReadOnlyArray): void; getAllMessagesWeb(): $ReadOnlyArray<{ +message: WebMessage, +medias: $ReadOnlyArray, }>; removeAllMessages(): void; removeMessages(ids: $ReadOnlyArray): void; removeMessagesForThreads(threadIDs: $ReadOnlyArray): void; replaceMessageWeb(message: WebMessage): void; rekeyMessage(from: string, to: string): void; removeAllMedia(): void; removeMediaForThreads(threadIDs: $ReadOnlyArray): void; removeMediaForMessages(msgIDs: $ReadOnlyArray): void; removeMediaForMessage(msgID: string): void; replaceMedia(media: Media): void; rekeyMediaContainers(from: string, to: string): void; replaceMessageStoreThreads( threads: $ReadOnlyArray<{ +id: string, +startReached: number }>, ): void; removeMessageStoreThreads($ReadOnlyArray): void; getAllMessageStoreThreads(): $ReadOnlyArray<{ +id: string, +startReached: number, }>; removeAllMessageStoreThreads(): void; setMetadata(entryName: string, data: string): void; clearMetadata(entryName: string): void; getMetadata(entryName: string): string; replaceReport(report: ClientDBReport): void; removeReports(ids: $ReadOnlyArray): void; removeAllReports(): void; getAllReports(): ClientDBReport[]; setPersistStorageItem(key: string, item: string): void; removePersistStorageItem(key: string): void; getPersistStorageItem(key: string): string; replaceUser(userInfo: ClientDBUserInfo): void; removeUsers(ids: $ReadOnlyArray): void; removeAllUsers(): void; getAllUsers(): ClientDBUserInfo[]; replaceThreadWeb(thread: WebClientDBThreadInfo): void; removeThreads(ids: $ReadOnlyArray): void; removeAllThreads(): void; getAllThreadsWeb(): WebClientDBThreadInfo[]; replaceKeyserver(keyserverInfo: ClientDBKeyserverInfo): void; removeKeyservers(ids: $ReadOnlyArray): void; removeAllKeyservers(): void; getAllKeyservers(): ClientDBKeyserverInfo[]; replaceCommunity(communityInfo: ClientDBCommunityInfo): void; removeCommunities(ids: $ReadOnlyArray): void; removeAllCommunities(): void; getAllCommunities(): ClientDBCommunityInfo[]; replaceIntegrityThreadHashes( threadHashes: $ReadOnlyArray, ): void; removeIntegrityThreadHashes(ids: $ReadOnlyArray): void; removeAllIntegrityThreadHashes(): void; getAllIntegrityThreadHashes(): ClientDBIntegrityThreadHash[]; replaceSyncedMetadataEntry( syncedMetadataEntry: ClientDBSyncedMetadataEntry, ): void; removeSyncedMetadata(names: $ReadOnlyArray): void; removeAllSyncedMetadata(): void; getAllSyncedMetadata(): ClientDBSyncedMetadataEntry[]; replaceAuxUserInfo(auxUserInfo: ClientDBAuxUserInfo): void; removeAuxUserInfos(ids: $ReadOnlyArray): void; removeAllAuxUserInfos(): void; getAllAuxUserInfos(): ClientDBAuxUserInfo[]; replaceThreadActivityEntry( threadActivityEntry: ClientDBThreadActivityEntry, ): void; removeThreadActivityEntries(ids: $ReadOnlyArray): void; removeAllThreadActivityEntries(): void; getAllThreadActivityEntries(): ClientDBThreadActivityEntry[]; replaceEntry(entryInfo: ClientDBEntryInfo): void; removeEntries(ids: $ReadOnlyArray): void; removeAllEntries(): void; getAllEntries(): $ReadOnlyArray; beginTransaction(): void; commitTransaction(): void; rollbackTransaction(): void; getContentAccountID(): number; getNotifsAccountID(): number; getOlmPersistAccountDataWeb(accountID: number): NullableString; getOlmPersistSessionsData(): $ReadOnlyArray; storeOlmPersistAccount(accountID: number, accountData: string): void; storeOlmPersistSession(session: OlmPersistSession): void; restoreFromMainCompaction( mainCompactionPath: string, mainCompactionEncryptionKey: string, + maxVersion: string, ): void; restoreFromBackupLog(backupLog: Uint8Array): void; addOutboundP2PMessages(messages: $ReadOnlyArray): void; removeOutboundP2PMessagesOlderThan( lastConfirmedMessageID: string, deviceID: string, ): void; removeAllOutboundP2PMessages(deviceID: string): void; getAllOutboundP2PMessages(): $ReadOnlyArray; setCiphertextForOutboundP2PMessage( messageID: string, deviceID: string, ciphertext: string, ): void; markOutboundP2PMessageAsSent(messageID: string, deviceID: string): void; addInboundP2PMessage(message: InboundP2PMessage): void; getAllInboundP2PMessage(): $ReadOnlyArray; removeInboundP2PMessages(ids: $ReadOnlyArray): void; // method is provided to manually signal that a C++ object // is no longer needed and can be deleted delete(): void; } export type SQLiteQueryExecutorType = typeof SQLiteQueryExecutor; diff --git a/web/shared-worker/worker/backup.js b/web/shared-worker/worker/backup.js index 076315dc0..74d5e57e6 100644 --- a/web/shared-worker/worker/backup.js +++ b/web/shared-worker/worker/backup.js @@ -1,76 +1,80 @@ // @flow import backupService from 'lib/facts/backup-service.js'; import { decryptCommon } from 'lib/media/aes-crypto-utils-common.js'; import type { AuthMetadata } from 'lib/shared/identity-client-context.js'; import { getProcessingStoreOpsExceptionMessage } from './process-operations.js'; import { BackupClient, RequestedData, } from '../../backup-client-wasm/wasm/backup-client-wasm.js'; -import { completeRootKey } from '../../redux/persist-constants.js'; +import { + completeRootKey, + storeVersion, +} from '../../redux/persist-constants.js'; import type { EmscriptenModule } from '../types/module.js'; import type { SQLiteQueryExecutor } from '../types/sqlite-query-executor.js'; import { COMM_SQLITE_BACKUP_RESTORE_DATABASE_PATH } from '../utils/constants.js'; import { importDatabaseContent } from '../utils/db-utils.js'; async function restoreBackup( sqliteQueryExecutor: SQLiteQueryExecutor, dbModule: EmscriptenModule, authMetadata: AuthMetadata, backupID: string, backupDataKey: string, backupLogDataKey: string, ) { const decryptionKey = new TextEncoder().encode(backupLogDataKey); const { userID, deviceID, accessToken } = authMetadata; if (!userID || !deviceID || !accessToken) { throw new Error('Backup restore requires full authMetadata'); } const userIdentity = { userID, deviceID, accessToken }; const client = new BackupClient(backupService.url); const result = await client.downloadBackupData( { type: 'BackupID', backupID, userIdentity, }, RequestedData.UserData, ); importDatabaseContent( result, dbModule, COMM_SQLITE_BACKUP_RESTORE_DATABASE_PATH, ); try { const reduxPersistData = sqliteQueryExecutor.getPersistStorageItem(completeRootKey); sqliteQueryExecutor.restoreFromMainCompaction( COMM_SQLITE_BACKUP_RESTORE_DATABASE_PATH, backupDataKey, + `${storeVersion ?? -1}`, ); sqliteQueryExecutor.setPersistStorageItem( completeRootKey, reduxPersistData, ); } catch (err) { throw new Error(getProcessingStoreOpsExceptionMessage(err, dbModule)); } await client.downloadLogs(userIdentity, backupID, async log => { const content = await decryptCommon(crypto, decryptionKey, log); try { sqliteQueryExecutor.restoreFromBackupLog(content); } catch (err) { throw new Error(getProcessingStoreOpsExceptionMessage(err, dbModule)); } }); } export { restoreBackup };