diff --git a/.changeset/rare-windows-argue.md b/.changeset/rare-windows-argue.md new file mode 100644 index 000000000..d7c5548d0 --- /dev/null +++ b/.changeset/rare-windows-argue.md @@ -0,0 +1,7 @@ +--- +'@powersync/web': minor +--- + +- Fixed some edge cases where multiple tabs with OPFS can cause sync deadlocks. +- Fixed issue where calling `powerSync.close()` would cause a disconnect if using multiple tabs (the default should not be to disconnect if using multiple tabs) +- Improved shared sync implementation database delegation and opening strategy. diff --git a/.changeset/witty-steaks-worry.md b/.changeset/witty-steaks-worry.md new file mode 100644 index 000000000..f92c5e058 --- /dev/null +++ b/.changeset/witty-steaks-worry.md @@ -0,0 +1,5 @@ +--- +'@powersync/common': minor +--- + +- Improved serializing of upload and download errors for SyncStatus events. Some JS `Error`s are not cloneable, the JSON representation of a SyncStatus should now always be cloneable. diff --git a/demos/angular-supabase-todolist/package.json b/demos/angular-supabase-todolist/package.json index ce7412a5d..7fe942c75 100644 --- a/demos/angular-supabase-todolist/package.json +++ b/demos/angular-supabase-todolist/package.json @@ -23,7 +23,7 @@ "@angular/platform-browser-dynamic": "^19.2.4", "@angular/router": "^19.2.4", "@angular/service-worker": "^19.2.4", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/web": "workspace:*", "@supabase/supabase-js": "^2.44.4", "rxjs": "~7.8.1", diff --git a/demos/example-capacitor/package.json b/demos/example-capacitor/package.json index 383f9e1e0..0d95fd4eb 100644 --- a/demos/example-capacitor/package.json +++ b/demos/example-capacitor/package.json @@ -25,7 +25,7 @@ "@capacitor/core": "latest", "@capacitor/ios": "^7.4.3", "@capacitor/splash-screen": "latest", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/capacitor": "workspace:*", "@powersync/react": "workspace:*", "@powersync/web": "workspace:*", diff --git a/demos/example-electron/package.json b/demos/example-electron/package.json index 191664b73..1bfe60e8a 100644 --- a/demos/example-electron/package.json +++ b/demos/example-electron/package.json @@ -21,7 +21,7 @@ "dependencies": { "@emotion/react": "^11.13.0", "@emotion/styled": "^11.13.0", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/icons-material": "^5.15.16", "@mui/material": "^5.15.16", "@mui/x-data-grid": "^6.19.11", diff --git a/demos/example-nextjs/package.json b/demos/example-nextjs/package.json index c7cae9efc..b305fd9ed 100644 --- a/demos/example-nextjs/package.json +++ b/demos/example-nextjs/package.json @@ -14,7 +14,7 @@ "@emotion/react": "^11.11.4", "@emotion/styled": "^11.11.5", "@fontsource/roboto": "^5.0.13", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@lexical/react": "^0.15.0", "@mui/icons-material": "^5.15.18", "@mui/material": "^5.15.18", diff --git a/demos/react-multi-client/package.json b/demos/react-multi-client/package.json index f66dfde76..710e02634 100644 --- a/demos/react-multi-client/package.json +++ b/demos/react-multi-client/package.json @@ -10,7 +10,7 @@ "test:build": "pnpm build" }, "dependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/react": "workspace:*", "@powersync/web": "workspace:*", "@supabase/supabase-js": "^2.43.1", diff --git a/demos/react-native-web-supabase-todolist/package.json b/demos/react-native-web-supabase-todolist/package.json index f1d1a5097..2914bbf97 100644 --- a/demos/react-native-web-supabase-todolist/package.json +++ b/demos/react-native-web-supabase-todolist/package.json @@ -14,7 +14,7 @@ "@expo/metro-runtime": "^4.0.1", "@expo/vector-icons": "^14.0.2", "@journeyapps/react-native-quick-sqlite": "^2.5.0", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/attachments": "workspace:*", "@powersync/react": "workspace:*", "@powersync/react-native": "workspace:*", diff --git a/demos/react-supabase-todolist-optional-sync/package.json b/demos/react-supabase-todolist-optional-sync/package.json index 3d6ae3db5..54dc834b7 100644 --- a/demos/react-supabase-todolist-optional-sync/package.json +++ b/demos/react-supabase-todolist-optional-sync/package.json @@ -11,7 +11,7 @@ "dependencies": { "@emotion/react": "11.11.4", "@emotion/styled": "11.11.5", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/icons-material": "^5.15.12", "@mui/material": "^5.15.12", "@mui/x-data-grid": "^6.19.6", diff --git a/demos/react-supabase-todolist-sync-streams/package.json b/demos/react-supabase-todolist-sync-streams/package.json index d35763305..8f21faeb5 100644 --- a/demos/react-supabase-todolist-sync-streams/package.json +++ b/demos/react-supabase-todolist-sync-streams/package.json @@ -11,7 +11,7 @@ "dependencies": { "@emotion/react": "11.11.4", "@emotion/styled": "11.11.5", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/icons-material": "^5.15.12", "@mui/material": "^5.15.12", "@mui/x-data-grid": "^6.19.6", diff --git a/demos/react-supabase-todolist-tanstackdb/package.json b/demos/react-supabase-todolist-tanstackdb/package.json index ce552097e..3358cec9e 100644 --- a/demos/react-supabase-todolist-tanstackdb/package.json +++ b/demos/react-supabase-todolist-tanstackdb/package.json @@ -11,7 +11,7 @@ "dependencies": { "@emotion/react": "11.11.4", "@emotion/styled": "11.11.5", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/icons-material": "^5.15.12", "@mui/material": "^5.15.12", "@mui/x-data-grid": "^6.19.6", diff --git a/demos/react-supabase-todolist/package.json b/demos/react-supabase-todolist/package.json index d35763305..8f21faeb5 100644 --- a/demos/react-supabase-todolist/package.json +++ b/demos/react-supabase-todolist/package.json @@ -11,7 +11,7 @@ "dependencies": { "@emotion/react": "11.11.4", "@emotion/styled": "11.11.5", - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/icons-material": "^5.15.12", "@mui/material": "^5.15.12", "@mui/x-data-grid": "^6.19.6", diff --git a/demos/react-supabase-todolist/src/components/providers/SystemProvider.tsx b/demos/react-supabase-todolist/src/components/providers/SystemProvider.tsx index 8a3f3c209..5aae4f4e2 100644 --- a/demos/react-supabase-todolist/src/components/providers/SystemProvider.tsx +++ b/demos/react-supabase-todolist/src/components/providers/SystemProvider.tsx @@ -3,7 +3,14 @@ import { AppSchema, ListRecord, LISTS_TABLE, TODOS_TABLE } from '@/library/power import { SupabaseConnector } from '@/library/powersync/SupabaseConnector'; import { CircularProgress } from '@mui/material'; import { PowerSyncContext } from '@powersync/react'; -import { createBaseLogger, DifferentialWatchedQuery, LogLevel, PowerSyncDatabase } from '@powersync/web'; +import { + createBaseLogger, + DifferentialWatchedQuery, + LogLevel, + PowerSyncDatabase, + WASQLiteOpenFactory, + WASQLiteVFS +} from '@powersync/web'; import React, { Suspense } from 'react'; import { NavigationPanelContextProvider } from '../navigation/NavigationPanelContext'; @@ -12,8 +19,15 @@ export const useSupabase = () => React.useContext(SupabaseContext); export const db = new PowerSyncDatabase({ schema: AppSchema, - database: { - dbFilename: 'example.db' + database: new WASQLiteOpenFactory({ + dbFilename: 'example.db', + vfs: WASQLiteVFS.OPFSCoopSyncVFS, + flags: { + enableMultiTabs: typeof SharedWorker !== 'undefined' + } + }), + flags: { + enableMultiTabs: typeof SharedWorker !== 'undefined' } }); diff --git a/demos/yjs-react-supabase-text-collab/package.json b/demos/yjs-react-supabase-text-collab/package.json index c3765dab3..84cfc38ec 100644 --- a/demos/yjs-react-supabase-text-collab/package.json +++ b/demos/yjs-react-supabase-text-collab/package.json @@ -9,7 +9,7 @@ "start": "pnpm build && pnpm preview" }, "dependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/material": "^5.15.12", "@mui/x-data-grid": "^6.19.6", "@powersync/react": "workspace:*", diff --git a/packages/common/src/client/ConnectionManager.ts b/packages/common/src/client/ConnectionManager.ts index 859add550..0d877d7e2 100644 --- a/packages/common/src/client/ConnectionManager.ts +++ b/packages/common/src/client/ConnectionManager.ts @@ -1,4 +1,5 @@ import { ILogger } from 'js-logger'; +import { SyncStatus } from '../db/crud/SyncStatus.js'; import { BaseListener, BaseObserver } from '../utils/BaseObserver.js'; import { PowerSyncBackendConnector } from './connection/PowerSyncBackendConnector.js'; import { @@ -13,7 +14,6 @@ import { SyncStreamSubscribeOptions, SyncStreamSubscription } from './sync/sync-streams.js'; -import { SyncStatus } from '../db/crud/SyncStatus.js'; /** * @internal diff --git a/packages/common/src/client/sync/stream/AbstractStreamingSyncImplementation.ts b/packages/common/src/client/sync/stream/AbstractStreamingSyncImplementation.ts index ab3e37c7c..78955858b 100644 --- a/packages/common/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +++ b/packages/common/src/client/sync/stream/AbstractStreamingSyncImplementation.ts @@ -16,7 +16,7 @@ import { import { CrudEntry } from '../bucket/CrudEntry.js'; import { SyncDataBucket } from '../bucket/SyncDataBucket.js'; import { AbstractRemote, FetchStrategy, SyncStreamOptions } from './AbstractRemote.js'; -import { coreStatusToJs, EstablishSyncStream, Instruction, SyncPriorityStatus } from './core-instruction.js'; +import { EstablishSyncStream, Instruction, coreStatusToJs } from './core-instruction.js'; import { BucketRequest, CrudUploadNotification, @@ -1223,7 +1223,6 @@ The next upload iteration will be delayed.`); resolve(); return; } - const { retryDelayMs } = this.options; let timeoutId: ReturnType | undefined; diff --git a/packages/common/src/db/crud/SyncStatus.ts b/packages/common/src/db/crud/SyncStatus.ts index f5686c037..5b8c45990 100644 --- a/packages/common/src/db/crud/SyncStatus.ts +++ b/packages/common/src/db/crud/SyncStatus.ts @@ -1,7 +1,7 @@ -import { CoreStreamSubscription } from '../../client/sync/stream/core-instruction.js'; import { SyncClientImplementation } from '../../client/sync/stream/AbstractStreamingSyncImplementation.js'; -import { InternalProgressInformation, ProgressWithOperations, SyncProgress } from './SyncProgress.js'; +import { CoreStreamSubscription } from '../../client/sync/stream/core-instruction.js'; import { SyncStreamDescription, SyncSubscriptionDescription } from '../../client/sync/sync-streams.js'; +import { InternalProgressInformation, ProgressWithOperations, SyncProgress } from './SyncProgress.js'; export type SyncDataFlowStatus = Partial<{ downloading: boolean; @@ -250,13 +250,28 @@ export class SyncStatus { return { connected: this.connected, connecting: this.connecting, - dataFlow: this.dataFlowStatus, + dataFlow: { + ...this.dataFlowStatus, + uploadError: this.serializeError(this.dataFlowStatus.uploadError), + downloadError: this.serializeError(this.dataFlowStatus.downloadError) + }, lastSyncedAt: this.lastSyncedAt, hasSynced: this.hasSynced, priorityStatusEntries: this.priorityStatusEntries }; } + protected serializeError(error?: Error) { + if (typeof error == 'undefined') { + return undefined; + } + return { + name: error.name, + message: error.message, + stack: error.stack + }; + } + private static comparePriorities(a: SyncPriorityStatus, b: SyncPriorityStatus) { return b.priority - a.priority; // Reverse because higher priorities have lower numbers } diff --git a/packages/drizzle-driver/package.json b/packages/drizzle-driver/package.json index 7ebf9aa0c..66c24d5fa 100644 --- a/packages/drizzle-driver/package.json +++ b/packages/drizzle-driver/package.json @@ -47,7 +47,7 @@ "drizzle-orm": "<1.0.0" }, "devDependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/web": "workspace:*", "@types/node": "^20.17.6", "drizzle-orm": "^0.44.7", @@ -55,4 +55,4 @@ "vite-plugin-top-level-await": "^1.4.4", "vite-plugin-wasm": "^3.3.0" } -} \ No newline at end of file +} diff --git a/packages/kysely-driver/package.json b/packages/kysely-driver/package.json index 31353349c..852db67fb 100644 --- a/packages/kysely-driver/package.json +++ b/packages/kysely-driver/package.json @@ -49,7 +49,7 @@ "kysely": "^0.28.0" }, "devDependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/web": "workspace:*", "@types/node": "^20.17.6", "vite": "^6.1.0", diff --git a/packages/web/package.json b/packages/web/package.json index 7152e3459..8572d0b26 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -61,7 +61,7 @@ "author": "JOURNEYAPPS", "license": "Apache-2.0", "peerDependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@powersync/common": "workspace:^1.43.1" }, "dependencies": { @@ -72,7 +72,7 @@ "commander": "^12.1.0" }, "devDependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@types/uuid": "^9.0.6", "crypto-browserify": "^3.12.0", "p-defer": "^4.0.1", diff --git a/packages/web/src/db/PowerSyncDatabase.ts b/packages/web/src/db/PowerSyncDatabase.ts index 0e60de018..fe88ac71d 100644 --- a/packages/web/src/db/PowerSyncDatabase.ts +++ b/packages/web/src/db/PowerSyncDatabase.ts @@ -1,32 +1,31 @@ import { - type BucketStorageAdapter, - type PowerSyncBackendConnector, - type PowerSyncCloseOptions, - type RequiredAdditionalConnectionOptions, AbstractPowerSyncDatabase, DBAdapter, - DEFAULT_POWERSYNC_CLOSE_OPTIONS, - isDBAdapter, - isSQLOpenFactory, PowerSyncDatabaseOptions, PowerSyncDatabaseOptionsWithDBAdapter, PowerSyncDatabaseOptionsWithOpenFactory, PowerSyncDatabaseOptionsWithSettings, SqliteBucketStorage, - StreamingSyncImplementation + StreamingSyncImplementation, + isDBAdapter, + isSQLOpenFactory, + type BucketStorageAdapter, + type PowerSyncBackendConnector, + type PowerSyncCloseOptions, + type RequiredAdditionalConnectionOptions } from '@powersync/common'; import { Mutex } from 'async-mutex'; import { getNavigatorLocks } from '../shared/navigator'; +import { WebDBAdapter } from './adapters/WebDBAdapter'; import { WASQLiteOpenFactory } from './adapters/wa-sqlite/WASQLiteOpenFactory'; import { DEFAULT_WEB_SQL_FLAGS, ResolvedWebSQLOpenOptions, - resolveWebSQLFlags, - WebSQLFlags + WebSQLFlags, + resolveWebSQLFlags } from './adapters/web-sql-flags'; -import { WebDBAdapter } from './adapters/WebDBAdapter'; -import { SharedWebStreamingSyncImplementation } from './sync/SharedWebStreamingSyncImplementation'; import { SSRStreamingSyncImplementation } from './sync/SSRWebStreamingSyncImplementation'; +import { SharedWebStreamingSyncImplementation } from './sync/SharedWebStreamingSyncImplementation'; import { WebRemote } from './sync/WebRemote'; import { WebStreamingSyncImplementation, @@ -160,14 +159,13 @@ export class PowerSyncDatabase extends AbstractPowerSyncDatabase { * By default the sync stream client is only disconnected if * multiple tabs are not enabled. */ - close(options: PowerSyncCloseOptions = DEFAULT_POWERSYNC_CLOSE_OPTIONS): Promise { + close(options?: PowerSyncCloseOptions): Promise { if (this.unloadListener) { window.removeEventListener('unload', this.unloadListener); } - return super.close({ // Don't disconnect by default if multiple tabs are enabled - disconnect: options.disconnect ?? !this.resolvedFlags.enableMultiTabs + disconnect: options?.disconnect ?? !this.resolvedFlags.enableMultiTabs }); } diff --git a/packages/web/src/db/adapters/AsyncDatabaseConnection.ts b/packages/web/src/db/adapters/AsyncDatabaseConnection.ts index 1b983802c..0ae2dc481 100644 --- a/packages/web/src/db/adapters/AsyncDatabaseConnection.ts +++ b/packages/web/src/db/adapters/AsyncDatabaseConnection.ts @@ -17,6 +17,18 @@ export type ProxiedQueryResult = Omit & { */ export type OnTableChangeCallback = (event: BatchedUpdateNotification) => void; +/** + * Thrown when an underlying database connection is closed. + * This is particularly relevant when worker connections are marked as closed while + * operations are still in progress. + */ +export class ConnectionClosedError extends Error { + constructor(message: string) { + super(message); + this.name = 'ConnectionClosedError'; + } +} + /** * @internal * An async Database connection which provides basic async SQL methods. diff --git a/packages/web/src/db/adapters/LockedAsyncDatabaseAdapter.ts b/packages/web/src/db/adapters/LockedAsyncDatabaseAdapter.ts index 2b1f2221a..2d3700de6 100644 --- a/packages/web/src/db/adapters/LockedAsyncDatabaseAdapter.ts +++ b/packages/web/src/db/adapters/LockedAsyncDatabaseAdapter.ts @@ -10,8 +10,8 @@ import { createLogger, type ILogger } from '@powersync/common'; -import { getNavigatorLocks } from '../..//shared/navigator'; -import { AsyncDatabaseConnection } from './AsyncDatabaseConnection'; +import { getNavigatorLocks } from '../../shared/navigator'; +import { AsyncDatabaseConnection, ConnectionClosedError } from './AsyncDatabaseConnection'; import { SharedConnectionWorker, WebDBAdapter } from './WebDBAdapter'; import { WorkerWrappedAsyncDatabaseConnection } from './WorkerWrappedAsyncDatabaseConnection'; import { WASQLiteVFS } from './wa-sqlite/WASQLiteConnection'; @@ -26,10 +26,16 @@ export interface LockedAsyncDatabaseAdapterOptions { openConnection: () => Promise; debugMode?: boolean; logger?: ILogger; + defaultLockTimeoutMs?: number; + reOpenOnConnectionClosed?: boolean; } export type LockedAsyncDatabaseAdapterListener = DBAdapterListener & { initialized?: () => void; + /** + * Fired when the database is re-opened after being closed. + */ + databaseReOpened?: () => void; }; /** @@ -51,6 +57,7 @@ export class LockedAsyncDatabaseAdapter private _config: ResolvedWebSQLOpenOptions | null = null; protected pendingAbortControllers: Set; protected requiresHolds: boolean | null; + protected databaseOpenPromise: Promise | null = null; closing: boolean; closed: boolean; @@ -105,16 +112,72 @@ export class LockedAsyncDatabaseAdapter return this.initPromise; } + protected async openInternalDB() { + /** + * Execute opening of the db in a lock in order not to interfere with other operations. + */ + return this._acquireLock(async () => { + // Dispose any previous table change listener. + this._disposeTableChangeListener?.(); + this._disposeTableChangeListener = null; + this._db?.close().catch((ex) => this.logger.warn(`Error closing database before opening new instance`, ex)); + const isReOpen = !!this._db; + this._db = null; + + this._db = await this.options.openConnection(); + await this._db.init(); + this._config = await this._db.getConfig(); + await this.registerOnChangeListener(this._db); + if (isReOpen) { + this.iterateListeners((cb) => cb.databaseReOpened?.()); + } + /** + * This is only required for the long-lived shared IndexedDB connections. + */ + this.requiresHolds = (this._config as ResolvedWASQLiteOpenFactoryOptions).vfs == WASQLiteVFS.IDBBatchAtomicVFS; + }); + } + + protected _reOpen() { + this.databaseOpenPromise = this.openInternalDB().finally(() => { + this.databaseOpenPromise = null; + }); + return this.databaseOpenPromise; + } + + /** + * Re-opens the underlying database. + * Returns a pending operation if one is already in progress. + */ + async reOpenInternalDB(): Promise { + if (!this.options.reOpenOnConnectionClosed) { + throw new Error(`Cannot re-open underlying database, reOpenOnConnectionClosed is not enabled`); + } + if (this.databaseOpenPromise) { + return this.databaseOpenPromise; + } + return this._reOpen(); + } + protected async _init() { - this._db = await this.options.openConnection(); - await this._db.init(); - this._config = await this._db.getConfig(); - await this.registerOnChangeListener(this._db); - this.iterateListeners((cb) => cb.initialized?.()); /** - * This is only required for the long-lived shared IndexedDB connections. + * For OPFS, we can see this open call sometimes fail due to NoModificationAllowedError. + * We should be able to recover from this by re-opening the database. */ - this.requiresHolds = (this._config as ResolvedWASQLiteOpenFactoryOptions).vfs == WASQLiteVFS.IDBBatchAtomicVFS; + const maxAttempts = 3; + for (let count = 0; count < maxAttempts; count++) { + try { + await this.openInternalDB(); + break; + } catch (ex) { + if (count == maxAttempts - 1) { + throw ex; + } + this.logger.warn(`Attempt ${count + 1} of ${maxAttempts} to open database failed, retrying in 1 second...`, ex); + await new Promise((resolve) => setTimeout(resolve, 1000)); + } + } + this.iterateListeners((cb) => cb.initialized?.()); } getConfiguration(): ResolvedWebSQLOpenOptions { @@ -196,7 +259,7 @@ export class LockedAsyncDatabaseAdapter return this.acquireLock( async () => fn(this.generateDBHelpers({ execute: this._execute, executeRaw: this._executeRaw })), { - timeoutMs: options?.timeoutMs + timeoutMs: options?.timeoutMs ?? this.options.defaultLockTimeoutMs } ); } @@ -206,23 +269,20 @@ export class LockedAsyncDatabaseAdapter return this.acquireLock( async () => fn(this.generateDBHelpers({ execute: this._execute, executeRaw: this._executeRaw })), { - timeoutMs: options?.timeoutMs + timeoutMs: options?.timeoutMs ?? this.options.defaultLockTimeoutMs } ); } - protected async acquireLock(callback: () => Promise, options?: { timeoutMs?: number }): Promise { - await this.waitForInitialized(); - + protected async _acquireLock(callback: () => Promise, options?: { timeoutMs?: number }): Promise { if (this.closing) { throw new Error(`Cannot acquire lock, the database is closing`); } - const abortController = new AbortController(); this.pendingAbortControllers.add(abortController); const { timeoutMs } = options ?? {}; - const timoutId = timeoutMs + const timeoutId = timeoutMs ? setTimeout(() => { abortController.abort(`Timeout after ${timeoutMs}ms`); this.pendingAbortControllers.delete(abortController); @@ -234,19 +294,52 @@ export class LockedAsyncDatabaseAdapter { signal: abortController.signal }, async () => { this.pendingAbortControllers.delete(abortController); - if (timoutId) { - clearTimeout(timoutId); + if (timeoutId) { + clearTimeout(timeoutId); } - const holdId = this.requiresHolds ? await this.baseDB.markHold() : null; - try { - return await callback(); - } finally { - if (holdId) { - await this.baseDB.releaseHold(holdId); + return await callback(); + } + ); + } + + protected async acquireLock(callback: () => Promise, options?: { timeoutMs?: number }): Promise { + await this.waitForInitialized(); + + // The database is being opened in the background. Wait for it here. + if (this.databaseOpenPromise) { + await this.databaseOpenPromise; + } + + return this._acquireLock(async () => { + let holdId: string | null = null; + try { + /** + * We can't await this since it uses the same lock as we're in now. + * If there is a pending open, this call will throw. + * If there is no pending open, but there is also no database - the open + * might have failed. We need to re-open the database. + */ + if (this.databaseOpenPromise || !this._db) { + throw new ConnectionClosedError('Connection is busy re-opening'); + } + + holdId = this.requiresHolds ? await this.baseDB.markHold() : null; + return await callback(); + } catch (ex) { + if (ex instanceof ConnectionClosedError) { + if (this.options.reOpenOnConnectionClosed && !this.databaseOpenPromise && !this.closing) { + // Immediately re-open the database. We need to miss as little table updates as possible. + // Note, don't await this since it uses the same lock as we're in now. + this.reOpenInternalDB(); } } + throw ex; + } finally { + if (holdId) { + await this.baseDB.releaseHold(holdId); + } } - ); + }, options); } async readTransaction(fn: (tx: Transaction) => Promise, options?: DBLockOptions | undefined): Promise { diff --git a/packages/web/src/db/adapters/WorkerWrappedAsyncDatabaseConnection.ts b/packages/web/src/db/adapters/WorkerWrappedAsyncDatabaseConnection.ts index e03bf8aa8..57557edb8 100644 --- a/packages/web/src/db/adapters/WorkerWrappedAsyncDatabaseConnection.ts +++ b/packages/web/src/db/adapters/WorkerWrappedAsyncDatabaseConnection.ts @@ -1,6 +1,8 @@ +import { BaseObserver } from '@powersync/common'; import * as Comlink from 'comlink'; import { AsyncDatabaseConnection, + ConnectionClosedError, OnTableChangeCallback, OpenAsyncDatabaseConnection, ProxiedQueryResult @@ -23,17 +25,23 @@ export type WrappedWorkerConnectionOptions void; }; +export type WorkerWrappedAsyncDatabaseConnectionListener = { + closing: () => void; +}; /** * Wraps a provided instance of {@link AsyncDatabaseConnection}, providing necessary proxy * functions for worker listeners. */ export class WorkerWrappedAsyncDatabaseConnection + extends BaseObserver implements AsyncDatabaseConnection { protected lockAbortController = new AbortController(); protected notifyRemoteClosed: AbortController | undefined; constructor(protected options: WrappedWorkerConnectionOptions) { + super(); + if (options.remoteCanCloseUnexpectedly) { this.notifyRemoteClosed = new AbortController(); } @@ -72,18 +80,21 @@ export class WorkerWrappedAsyncDatabaseConnection this.baseConnection.isAutoCommit()); } - private withRemote(workerPromise: () => Promise): Promise { + private withRemote(workerPromise: () => Promise, fireActionOnAbort = false): Promise { const controller = this.notifyRemoteClosed; if (controller) { return new Promise((resolve, reject) => { if (controller.signal.aborted) { - reject(new Error('Called operation on closed remote')); - // Don't run the operation if we're going to reject - return; + reject(new ConnectionClosedError('Called operation on closed remote')); + if (!fireActionOnAbort) { + // Don't run the operation if we're going to reject + // We might want to fire-and-forget the operation in some cases (like a close operation) + return; + } } function handleAbort() { - reject(new Error('Remote peer closed with request in flight')); + reject(new ConnectionClosedError('Remote peer closed with request in flight')); } function completePromise(action: () => void) { @@ -164,10 +175,12 @@ export class WorkerWrappedAsyncDatabaseConnection this.baseConnection.close()); + // fire and forget the close operation + await this.withRemote(() => this.baseConnection.close(), true); } finally { this.options.remote[Comlink.releaseProxy](); this.options.onClose?.(); + this.iterateListeners((l) => l.closing?.()); } } diff --git a/packages/web/src/db/adapters/wa-sqlite/WASQLiteConnection.ts b/packages/web/src/db/adapters/wa-sqlite/WASQLiteConnection.ts index 8860a7233..c94a4d3f0 100644 --- a/packages/web/src/db/adapters/wa-sqlite/WASQLiteConnection.ts +++ b/packages/web/src/db/adapters/wa-sqlite/WASQLiteConnection.ts @@ -3,7 +3,6 @@ import { BaseObserver, BatchedUpdateNotification } from '@powersync/common'; import { Mutex } from 'async-mutex'; import { AsyncDatabaseConnection, OnTableChangeCallback, ProxiedQueryResult } from '../AsyncDatabaseConnection'; import { ResolvedWASQLiteOpenFactoryOptions } from './WASQLiteOpenFactory'; - /** * List of currently tested virtual filesystems */ @@ -126,9 +125,10 @@ export const DEFAULT_MODULE_FACTORIES = { } // @ts-expect-error The types for this static method are missing upstream const { OPFSCoopSyncVFS } = await import('@journeyapps/wa-sqlite/src/examples/OPFSCoopSyncVFS.js'); + const vfs = await OPFSCoopSyncVFS.create(options.dbFileName, module); return { module, - vfs: await OPFSCoopSyncVFS.create(options.dbFileName, module) + vfs }; } }; @@ -387,7 +387,15 @@ export class WASqliteConnection async close() { this.broadcastChannel?.close(); - await this.sqliteAPI.close(this.dbP); + await this.acquireExecuteLock(async () => { + /** + * Running the close operation inside the same execute mutex prevents errors like: + * ``` + * unable to close due to unfinalized statements or unfinished backups + * ``` + */ + await this.sqliteAPI.close(this.dbP); + }); } async registerOnTableChange(callback: OnTableChangeCallback) { diff --git a/packages/web/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.ts b/packages/web/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.ts index 487d121aa..fa4528907 100644 --- a/packages/web/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.ts +++ b/packages/web/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.ts @@ -1,17 +1,17 @@ -import { type ILogLevel, DBAdapter } from '@powersync/common'; +import { DBAdapter, type ILogLevel } from '@powersync/common'; import * as Comlink from 'comlink'; import { openWorkerDatabasePort, resolveWorkerDatabasePortFactory } from '../../../worker/db/open-worker-database'; import { AbstractWebSQLOpenFactory } from '../AbstractWebSQLOpenFactory'; import { AsyncDatabaseConnection, OpenAsyncDatabaseConnection } from '../AsyncDatabaseConnection'; import { LockedAsyncDatabaseAdapter } from '../LockedAsyncDatabaseAdapter'; +import { WorkerWrappedAsyncDatabaseConnection } from '../WorkerWrappedAsyncDatabaseConnection'; import { DEFAULT_CACHE_SIZE_KB, ResolvedWebSQLOpenOptions, TemporaryStorageOption, WebSQLOpenFactoryOptions } from '../web-sql-flags'; -import { WorkerWrappedAsyncDatabaseConnection } from '../WorkerWrappedAsyncDatabaseConnection'; -import { WASqliteConnection, WASQLiteVFS } from './WASQLiteConnection'; +import { WASQLiteVFS, WASqliteConnection } from './WASQLiteConnection'; export interface WASQLiteOpenFactoryOptions extends WebSQLOpenFactoryOptions { vfs?: WASQLiteVFS; diff --git a/packages/web/src/db/sync/SharedWebStreamingSyncImplementation.ts b/packages/web/src/db/sync/SharedWebStreamingSyncImplementation.ts index 66d3b51ed..fe2bb3ad2 100644 --- a/packages/web/src/db/sync/SharedWebStreamingSyncImplementation.ts +++ b/packages/web/src/db/sync/SharedWebStreamingSyncImplementation.ts @@ -2,20 +2,19 @@ import { PowerSyncConnectionOptions, PowerSyncCredentials, SubscribedStream, - SyncStatus, SyncStatusOptions } from '@powersync/common'; import * as Comlink from 'comlink'; +import { getNavigatorLocks } from '../../shared/navigator'; import { AbstractSharedSyncClientProvider } from '../../worker/sync/AbstractSharedSyncClientProvider'; import { ManualSharedSyncPayload, SharedSyncClientEvent } from '../../worker/sync/SharedSyncImplementation'; -import { DEFAULT_CACHE_SIZE_KB, resolveWebSQLFlags, TemporaryStorageOption } from '../adapters/web-sql-flags'; +import { WorkerClient } from '../../worker/sync/WorkerClient'; import { WebDBAdapter } from '../adapters/WebDBAdapter'; +import { DEFAULT_CACHE_SIZE_KB, TemporaryStorageOption, resolveWebSQLFlags } from '../adapters/web-sql-flags'; import { WebStreamingSyncImplementation, WebStreamingSyncImplementationOptions } from './WebStreamingSyncImplementation'; -import { WorkerClient } from '../../worker/sync/WorkerClient'; -import { getNavigatorLocks } from '../../shared/navigator'; /** * The shared worker will trigger methods on this side of the message port @@ -146,7 +145,25 @@ export class SharedWebStreamingSyncImplementation extends WebStreamingSyncImplem ).port; } + /** + * Pass along any sync status updates to this listener + */ + this.clientProvider = new SharedSyncClientProvider( + this.webOptions, + (status) => { + this.updateSyncStatus(status); + }, + options.db + ); + this.syncManager = Comlink.wrap(this.messagePort); + /** + * The sync worker will call this client provider when it needs + * to fetch credentials or upload data. + * This performs bi-directional method calling. + */ + Comlink.expose(this.clientProvider, this.messagePort); + this.syncManager.setLogLevel(this.logger.getLevel()); this.triggerCrudUpload = this.syncManager.triggerCrudUpload; @@ -157,10 +174,49 @@ export class SharedWebStreamingSyncImplementation extends WebStreamingSyncImplem * DB worker, but a port to the DB worker can be transferred to the * sync worker. */ + + this.isInitialized = this._init(); + } + + protected async _init() { + /** + * The general flow of initialization is: + * - The client requests a unique navigator lock. + * - Once the lock is acquired, we register the lock with the shared worker. + * - The shared worker can then request the same lock. The client has been closed if the shared worker can acquire the lock. + * - Once the shared worker knows the client's lock, we can guarentee that the shared worker will detect if the client has been closed. + * - This makes the client safe for the shared worker to use. + * - The client is only added to the SharedSyncImplementation once the lock has been registered. + * This ensures we don't ever keep track of dead clients (tabs that closed before the lock was registered). + * - The client side lock is held until the client is disposed. + * - We resolve the top-level promise after the lock has been registered with the shared worker. + * - The client sends the params to the shared worker after locks have been registered. + */ + await new Promise((resolve) => { + // Request a random lock until this client is disposed. The name of the lock is sent to the shared worker, which + // will also attempt to acquire it. Since the lock is returned when the tab is closed, this allows the share worker + // to free resources associated with this tab. + // We take hold of this lock as soon-as-possible in order to cater for potentially closed tabs. + getNavigatorLocks().request(`tab-close-signal-${crypto.randomUUID()}`, async (lock) => { + if (this.abortOnClose.signal.aborted) { + return; + } + // Awaiting here ensures the worker is waiting for the lock + await this.syncManager.addLockBasedCloseSignal(lock!.name); + + // The lock has been registered, we can continue with the initialization + resolve(); + + await new Promise((r) => { + this.abortOnClose.signal.onabort = () => r(); + }); + }); + }); + const { crudUploadThrottleMs, identifier, retryDelayMs } = this.options; const flags = { ...this.webOptions.flags, workers: undefined }; - this.isInitialized = this.syncManager.setParams( + await this.syncManager.setParams( { dbParams: this.dbAdapter.getConfiguration(), streamOptions: { @@ -170,39 +226,8 @@ export class SharedWebStreamingSyncImplementation extends WebStreamingSyncImplem flags: flags } }, - options.subscriptions + this.options.subscriptions ); - - /** - * Pass along any sync status updates to this listener - */ - this.clientProvider = new SharedSyncClientProvider( - this.webOptions, - (status) => { - this.iterateListeners((l) => this.updateSyncStatus(status)); - }, - options.db - ); - - /** - * The sync worker will call this client provider when it needs - * to fetch credentials or upload data. - * This performs bi-directional method calling. - */ - Comlink.expose(this.clientProvider, this.messagePort); - - // Request a random lock until this client is disposed. The name of the lock is sent to the shared worker, which - // will also attempt to acquire it. Since the lock is returned when the tab is closed, this allows the share worker - // to free resources associated with this tab. - getNavigatorLocks().request(`tab-close-signal-${crypto.randomUUID()}`, async (lock) => { - if (!this.abortOnClose.signal.aborted) { - this.syncManager.addLockBasedCloseSignal(lock!.name); - - await new Promise((r) => { - this.abortOnClose.signal.onabort = () => r(); - }); - } - }); } /** @@ -231,8 +256,6 @@ export class SharedWebStreamingSyncImplementation extends WebStreamingSyncImplem async dispose(): Promise { await this.waitForReady(); - await super.dispose(); - await new Promise((resolve) => { // Listen for the close acknowledgment from the worker this.messagePort.addEventListener('message', (event) => { @@ -249,6 +272,9 @@ export class SharedWebStreamingSyncImplementation extends WebStreamingSyncImplem }; this.messagePort.postMessage(closeMessagePayload); }); + + await super.dispose(); + this.abortOnClose.abort(); // Release the proxy @@ -263,12 +289,4 @@ export class SharedWebStreamingSyncImplementation extends WebStreamingSyncImplem updateSubscriptions(subscriptions: SubscribedStream[]): void { this.syncManager.updateSubscriptions(subscriptions); } - - /** - * Used in tests to force a connection states - */ - private async _testUpdateStatus(status: SyncStatus) { - await this.isInitialized; - return this.syncManager._testUpdateAllStatuses(status.toJSON()); - } } diff --git a/packages/web/src/worker/db/WASQLiteDB.worker.ts b/packages/web/src/worker/db/WASQLiteDB.worker.ts index 4f40fdad4..4fd1bd747 100644 --- a/packages/web/src/worker/db/WASQLiteDB.worker.ts +++ b/packages/web/src/worker/db/WASQLiteDB.worker.ts @@ -17,7 +17,6 @@ const logger = createLogger('db-worker'); const DBMap = new Map(); const OPEN_DB_LOCK = 'open-wasqlite-db'; - let nextClientId = 1; const openDBShared = async (options: WorkerDBOpenerOptions): Promise => { diff --git a/packages/web/src/worker/sync/SharedSyncImplementation.ts b/packages/web/src/worker/sync/SharedSyncImplementation.ts index 5c4f5b683..4a188596d 100644 --- a/packages/web/src/worker/sync/SharedSyncImplementation.ts +++ b/packages/web/src/worker/sync/SharedSyncImplementation.ts @@ -2,14 +2,14 @@ import { AbortOperation, BaseObserver, ConnectionManager, - createLogger, DBAdapter, PowerSyncBackendConnector, SqliteBucketStorage, SubscribedStream, SyncStatus, - type ILogger, + createLogger, type ILogLevel, + type ILogger, type PowerSyncConnectionOptions, type StreamingSyncImplementation, type StreamingSyncImplementationListener, @@ -25,8 +25,8 @@ import { import { OpenAsyncDatabaseConnection } from '../../db/adapters/AsyncDatabaseConnection'; import { LockedAsyncDatabaseAdapter } from '../../db/adapters/LockedAsyncDatabaseAdapter'; -import { ResolvedWebSQLOpenOptions } from '../../db/adapters/web-sql-flags'; import { WorkerWrappedAsyncDatabaseConnection } from '../../db/adapters/WorkerWrappedAsyncDatabaseConnection'; +import { ResolvedWebSQLOpenOptions } from '../../db/adapters/web-sql-flags'; import { AbstractSharedSyncClientProvider } from './AbstractSharedSyncClientProvider'; import { BroadcastLogger } from './BroadcastLogger'; @@ -76,6 +76,7 @@ export type WrappedSyncPort = { db?: DBAdapter; currentSubscriptions: SubscribedStream[]; closeListeners: (() => void | Promise)[]; + isClosing: boolean; }; /** @@ -106,7 +107,6 @@ export class SharedSyncImplementation extends BaseObserver { - return this.portMutex.runExclusive(async () => { - await this.waitForReady(); - if (!this.dbAdapter) { - await this.openInternalDB(); - } - - const sync = this.generateStreamingImplementation(); - const onDispose = sync.registerListener({ - statusChanged: (status) => { - this.updateAllStatuses(status.toJSON()); - } - }); + await this.waitForReady(); - return { - sync, - onDispose - }; + const sync = this.generateStreamingImplementation(); + const onDispose = sync.registerListener({ + statusChanged: (status) => { + this.updateAllStatuses(status.toJSON()); + } }); + + return { + sync, + onDispose + }; }, logger: this.logger }); @@ -171,6 +169,32 @@ export class SharedSyncImplementation extends BaseObserver { + // Find the last port which is not closing + return await this.portMutex.runExclusive(() => { + for (let i = this.ports.length - 1; i >= 0; i--) { + if (!this.ports[i].isClosing) { + return this.ports[i]; + } + } + return; + }); + } + + /** + * In some very rare cases a specific tab might not respond to requests. + * This returns a random port which is not closing. + */ + protected async getRandomWrappedPort(): Promise { + return await this.portMutex.runExclusive(() => { + const nonClosingPorts = this.ports.filter((p) => !p.isClosing); + return nonClosingPorts[Math.floor(Math.random() * nonClosingPorts.length)]; + }); + } + async waitForStatus(status: SyncStatusOptions): Promise { return this.withSyncImplementation(async (sync) => { return sync.waitForStatus(status); @@ -217,33 +241,51 @@ export class SharedSyncImplementation extends BaseObserver { this.collectActiveSubscriptions(); - if (this.syncParams) { - // Cannot modify already existing sync implementation params - // But we can ask for a DB adapter, if required, at this point. - - if (!this.dbAdapter) { - await this.openInternalDB(); - } - return; - } + }); - // First time setting params - this.syncParams = params; - if (params.streamOptions?.flags?.broadcastLogs) { - this.logger = this.broadCastLogger; - } + if (this.syncParams) { + // Cannot modify already existing sync implementation params + return; + } - self.onerror = (event) => { - // Share any uncaught events on the broadcast logger - this.logger.error('Uncaught exception in PowerSync shared sync worker', event); - }; + // First time setting params + this.syncParams = params; + if (params.streamOptions?.flags?.broadcastLogs) { + this.logger = this.broadCastLogger; + } - if (!this.dbAdapter) { - await this.openInternalDB(); + const lockedAdapter = new LockedAsyncDatabaseAdapter({ + name: params.dbParams.dbFilename, + openConnection: async () => { + // Gets a connection from the clients when a new connection is requested. + const db = await this.openInternalDB(); + db.registerListener({ + closing: () => { + lockedAdapter.reOpenInternalDB(); + } + }); + return db; + }, + logger: this.logger, + reOpenOnConnectionClosed: true + }); + this.distributedDB = lockedAdapter; + await lockedAdapter.init(); + + lockedAdapter.registerListener({ + databaseReOpened: () => { + // We may have missed some table updates while the database was closed. + // We can poke the crud in case we missed any updates. + this.connectionManager.syncStreamImplementation?.triggerCrudUpload(); } - - this.iterateListeners((l) => l.initialized?.()); }); + + self.onerror = (event) => { + // Share any uncaught events on the broadcast logger + this.logger.error('Uncaught exception in PowerSync shared sync worker', event); + }; + + this.iterateListeners((l) => l.initialized?.()); } async dispose() { @@ -276,7 +318,8 @@ export class SharedSyncImplementation extends BaseObserver(port), currentSubscriptions: [], - closeListeners: [] + closeListeners: [], + isClosing: false } satisfies WrappedSyncPort; this.ports.push(portProvider); @@ -295,14 +338,17 @@ export class SharedSyncImplementation extends BaseObserver { + return await this.portMutex.runExclusive(async () => { const index = this.ports.findIndex((p) => p == port); if (index < 0) { this.logger.warn(`Could not remove port ${port} since it is not present in active ports.`); - return {}; + return () => {}; } const trackedPort = this.ports[index]; @@ -321,42 +367,15 @@ export class SharedSyncImplementation extends BaseObserver 0; - return { - shouldReconnect, - trackedPort - }; - }); - - if (!trackedPort) { - // We could not find the port to remove - return () => {}; - } - - for (const closeListener of trackedPort.closeListeners) { - await closeListener(); - } - - if (this.dbAdapter && this.dbAdapter == trackedPort.db) { - // Unconditionally close the connection because the database it's writing to has just been closed. - // The connection has been closed previously, this might throw. We should be able to ignore it. - await this.connectionManager - .disconnect() - .catch((ex) => this.logger.warn('Error while disconnecting. Will attempt to reconnect.', ex)); - - // Clearing the adapter will result in a new one being opened in connect - this.dbAdapter = null; - - if (shouldReconnect) { - await this.connectionManager.connect(CONNECTOR_PLACEHOLDER, this.lastConnectOptions ?? {}); + // Close the worker wrapped database connection, we can't accurately rely on this connection + for (const closeListener of trackedPort.closeListeners) { + await closeListener(); } - } - // Re-index subscriptions, the subscriptions of the removed port would no longer be considered. - this.collectActiveSubscriptions(); + this.collectActiveSubscriptions(); - // Release proxy - return () => trackedPort.clientProvider[Comlink.releaseProxy](); + return () => trackedPort.clientProvider[Comlink.releaseProxy](); + }); } triggerCrudUpload() { @@ -401,11 +420,14 @@ export class SharedSyncImplementation extends BaseObserver { - const lastPort = this.ports[this.ports.length - 1]; + const lastPort = await this.getLastWrappedPort(); + if (!lastPort) { + throw new Error('No client port found to invalidate credentials'); + } try { this.logger.log('calling the last port client provider to invalidate credentials'); lastPort.clientProvider.invalidateCredentials(); @@ -414,7 +436,10 @@ export class SharedSyncImplementation extends BaseObserver { - const lastPort = this.ports[this.ports.length - 1]; + const lastPort = await this.getLastWrappedPort(); + if (!lastPort) { + throw new Error('No client port found to fetch credentials'); + } return new Promise(async (resolve, reject) => { const abortController = new AbortController(); this.fetchCredentialsController = { @@ -437,7 +462,10 @@ export class SharedSyncImplementation extends BaseObserver { - const lastPort = this.ports[this.ports.length - 1]; + const lastPort = await this.getLastWrappedPort(); + if (!lastPort) { + throw new Error('No client port found to upload crud'); + } return new Promise(async (resolve, reject) => { const abortController = new AbortController(); @@ -464,39 +492,91 @@ export class SharedSyncImplementation extends BaseObserver { + abortController.abort(); + }, 10_000); + + /** + * Handle cases where the client might close while opening a connection. + */ + const abortController = new AbortController(); + const closeListener = () => { + abortController.abort(); + }; + + const removeCloseListener = () => { + const index = client.closeListeners.indexOf(closeListener); + if (index >= 0) { + client.closeListeners.splice(index, 1); + } + }; + + client.closeListeners.push(closeListener); + + const workerPort = await withAbort({ + action: () => client.clientProvider.getDBWorkerPort(), + signal: abortController.signal, + cleanupOnAbort: (port) => { + port.close(); + } + }).catch((ex) => { + removeCloseListener(); + throw ex; + }); + const remote = Comlink.wrap(workerPort); const identifier = this.syncParams!.dbParams.dbFilename; - const db = await remote(this.syncParams!.dbParams); - const locked = new LockedAsyncDatabaseAdapter({ - name: identifier, - openConnection: async () => { - const wrapped = new WorkerWrappedAsyncDatabaseConnection({ - remote, - baseConnection: db, - identifier, - // It's possible for this worker to outlive the client hosting the database for us. We need to be prepared for - // that and ensure pending requests are aborted when the tab is closed. - remoteCanCloseUnexpectedly: true - }); - lastClient.closeListeners.push(async () => { - this.logger.info('Aborting open connection because associated tab closed.'); - await wrapped.close().catch((ex) => this.logger.warn('error closing database connection', ex)); - wrapped.markRemoteClosed(); - }); - return wrapped; - }, - logger: this.logger + /** + * The open could fail if the tab is closed while we're busy opening the database. + * This operation is typically executed inside an exclusive portMutex lock. + * We typically execute the closeListeners using the portMutex in a different context. + * We can't rely on the closeListeners to abort the operation if the tab is closed. + */ + const db = await withAbort({ + action: () => remote(this.syncParams!.dbParams), + signal: abortController.signal, + cleanupOnAbort: (db) => { + db.close(); + } + }).finally(() => { + // We can remove the close listener here since we no longer need it past this point. + removeCloseListener(); + }); + + clearTimeout(timeout); + + const wrapped = new WorkerWrappedAsyncDatabaseConnection({ + remote, + baseConnection: db, + identifier, + // It's possible for this worker to outlive the client hosting the database for us. We need to be prepared for + // that and ensure pending requests are aborted when the tab is closed. + remoteCanCloseUnexpectedly: true }); - await locked.init(); - this.dbAdapter = lastClient.db = locked; + client.closeListeners.push(async () => { + this.logger.info('Aborting open connection because associated tab closed.'); + /** + * Don't await this close operation. It might never resolve if the tab is closed. + * We mark the remote as closed first, this will reject any pending requests. + * We then call close. The close operation is configured to fire-and-forget, the main promise will reject immediately. + */ + wrapped.markRemoteClosed(); + wrapped.close().catch((ex) => this.logger.warn('error closing database connection', ex)); + }); + + return wrapped; } /** @@ -507,17 +587,43 @@ export class SharedSyncImplementation extends BaseObserver p.clientProvider.statusChanged(status)); } +} - /** - * A function only used for unit tests which updates the internal - * sync stream client and all tab client's sync status - */ - async _testUpdateAllStatuses(status: SyncStatusOptions) { - if (!this.connectionManager.syncStreamImplementation) { - throw new Error('Cannot update status without a sync stream implementation'); +/** + * Runs the action with an abort controller. + */ +function withAbort(options: { + action: () => Promise; + signal: AbortSignal; + cleanupOnAbort?: (result: T) => void; +}): Promise { + const { action, signal, cleanupOnAbort } = options; + return new Promise((resolve, reject) => { + if (signal.aborted) { + reject(new AbortOperation('Operation aborted by abort controller')); + return; } - // Only assigning, don't call listeners for this test - this.connectionManager.syncStreamImplementation!.syncStatus = new SyncStatus(status); - this.updateAllStatuses(status); - } + + function handleAbort() { + signal.removeEventListener('abort', handleAbort); + reject(new AbortOperation('Operation aborted by abort controller')); + } + + signal.addEventListener('abort', handleAbort, { once: true }); + + function completePromise(action: () => void) { + signal.removeEventListener('abort', handleAbort); + action(); + } + + action() + .then((data) => { + // We already rejected due to the abort, allow for cleanup + if (signal.aborted) { + return completePromise(() => cleanupOnAbort?.(data)); + } + completePromise(() => resolve(data)); + }) + .catch((e) => completePromise(() => reject(e))); + }); } diff --git a/packages/web/src/worker/sync/SharedSyncImplementation.worker.ts b/packages/web/src/worker/sync/SharedSyncImplementation.worker.ts index 157d7c1cb..8693575b6 100644 --- a/packages/web/src/worker/sync/SharedSyncImplementation.worker.ts +++ b/packages/web/src/worker/sync/SharedSyncImplementation.worker.ts @@ -10,5 +10,5 @@ const sharedSyncImplementation = new SharedSyncImplementation(); _self.onconnect = async function (event: MessageEvent) { const port = event.ports[0]; - await new WorkerClient(sharedSyncImplementation, port).initialize(); + new WorkerClient(sharedSyncImplementation, port); }; diff --git a/packages/web/src/worker/sync/WorkerClient.ts b/packages/web/src/worker/sync/WorkerClient.ts index 1dfffe9b2..519d42119 100644 --- a/packages/web/src/worker/sync/WorkerClient.ts +++ b/packages/web/src/worker/sync/WorkerClient.ts @@ -1,4 +1,6 @@ +import { ILogLevel, PowerSyncConnectionOptions, SubscribedStream } from '@powersync/common'; import * as Comlink from 'comlink'; +import { getNavigatorLocks } from '../../shared/navigator'; import { ManualSharedSyncPayload, SharedSyncClientEvent, @@ -6,8 +8,6 @@ import { SharedSyncInitOptions, WrappedSyncPort } from './SharedSyncImplementation'; -import { ILogLevel, PowerSyncConnectionOptions, SubscribedStream, SyncStatusOptions } from '@powersync/common'; -import { getNavigatorLocks } from '../../shared/navigator'; /** * A client to the shared sync worker. @@ -17,13 +17,13 @@ import { getNavigatorLocks } from '../../shared/navigator'; */ export class WorkerClient { private resolvedPort: WrappedSyncPort | null = null; + protected resolvedPortPromise: Promise | null = null; constructor( private readonly sync: SharedSyncImplementation, private readonly port: MessagePort - ) {} - - async initialize() { + ) { + Comlink.expose(this, this.port); /** * Adds an extra listener which can remove this port * from the list of monitored ports. @@ -34,9 +34,6 @@ export class WorkerClient { await this.removePort(); } }); - - this.resolvedPort = await this.sync.addPort(this.port); - Comlink.expose(this, this.port); } private async removePort() { @@ -59,7 +56,10 @@ export class WorkerClient { * When the client tab is closed, its lock will be returned. So when the shared worker attempts to acquire the lock, * it can consider the connection to be closed. */ - addLockBasedCloseSignal(name: string) { + async addLockBasedCloseSignal(name: string) { + // Only add the port once the lock has been obtained on the client. + this.resolvedPort = await this.sync.addPort(this.port); + // Don't await this lock request getNavigatorLocks().request(name, async () => { await this.removePort(); }); @@ -99,8 +99,4 @@ export class WorkerClient { disconnect() { return this.sync.disconnect(); } - - async _testUpdateAllStatuses(status: SyncStatusOptions) { - return this.sync._testUpdateAllStatuses(status); - } } diff --git a/packages/web/tests/error_serialization.test.ts b/packages/web/tests/error_serialization.test.ts new file mode 100644 index 000000000..df192378c --- /dev/null +++ b/packages/web/tests/error_serialization.test.ts @@ -0,0 +1,43 @@ +import { SyncStreamConnectionMethod } from '@powersync/common'; +import { describe, expect } from 'vitest'; +import { sharedMockSyncServiceTest } from './utils/mockSyncServiceTest'; + +/** + * Test to verify that Error instances are properly serialized when passed through MessagePorts. + * When errors occur in the shared worker and are reported via statusChanged, they should + * be properly serialized and deserialized to appear in the sync status. + */ +describe('Error Serialization through MessagePorts', { sequential: true }, () => { + sharedMockSyncServiceTest( + 'should serialize and deserialize Error in sync status when connection fails', + { timeout: 10_000 }, + async ({ context: { database, mockService } }) => { + await mockService.setAutomaticResponse({ + status: 401, + headers: { 'Content-Type': 'application/json' }, + bodyLines: ['Unauthorized'] + }); + + // Start connection attempt + await database.connect( + { + fetchCredentials: async () => { + return { + endpoint: 'http://localhost:3000', + token: 'test-token' + }; + }, + uploadData: async () => {} + }, + { + connectionMethod: SyncStreamConnectionMethod.HTTP + } + ); + + expect(database.currentStatus.dataFlowStatus?.downloadError).toBeDefined(); + expect(database.currentStatus.dataFlowStatus?.downloadError?.name).toBe('Error'); + expect(database.currentStatus.dataFlowStatus?.downloadError?.message).toBe('HTTP : "Unauthorized"\n'); + expect(database.currentStatus.dataFlowStatus?.downloadError?.stack).toBeDefined(); + } + ); +}); diff --git a/packages/web/tests/mockSyncServiceExample.test.ts b/packages/web/tests/mockSyncServiceExample.test.ts new file mode 100644 index 000000000..e7165e944 --- /dev/null +++ b/packages/web/tests/mockSyncServiceExample.test.ts @@ -0,0 +1,88 @@ +/** + * Example test demonstrating how to use the mock sync service for shared worker environments. + * + * This example shows how to: + * 1. Use the sharedMockSyncServiceTest utility to set up the test environment + * 2. Use the mock service to get pending requests and create responses + * 3. Send data via sync stream and query it in the database + * + * Note: This is an example file - rename to .test.ts to use it in actual tests. + */ + +import { StreamingSyncCheckpoint } from '@powersync/common'; +import { describe, expect, vi } from 'vitest'; +import { sharedMockSyncServiceTest } from './utils/mockSyncServiceTest'; + +describe('Mock Sync Service Example', { timeout: 100000 }, () => { + sharedMockSyncServiceTest( + 'should allow mocking sync responses in shared worker', + { timeout: 100000 }, + async ({ context: { database, connect, mockService } }) => { + // Call connect to start the sync worker and get the sync service + const { syncRequestId } = await connect(); + + // Push a checkpoint with buckets (following node test pattern) + const checkpoint: StreamingSyncCheckpoint = { + checkpoint: { + last_op_id: '1', + buckets: [ + { + bucket: 'a', + count: 1, + checksum: 0, + priority: 3 + } + ], + write_checkpoint: undefined + } + }; + + await mockService.pushBodyLine(syncRequestId, checkpoint); + + // The connect call should resolve by now + await mockService.pushBodyLine(syncRequestId, { + data: { + bucket: 'a', + data: [ + { + checksum: 0, + op_id: '1', + op: 'PUT', + object_id: '1', + object_type: 'lists', + data: '{"name": "from server"}' + } + ] + } + }); + + // Push checkpoint_complete to finish the sync + await mockService.pushBodyLine(syncRequestId, { + checkpoint_complete: { + last_op_id: '1' + } + }); + + // Complete the response + await mockService.completeResponse(syncRequestId); + + // Wait for sync to complete and verify the data was saved + await vi.waitFor(async () => { + const rows = await database.getAll('SELECT * FROM lists WHERE id = ?', ['1']); + expect(rows).toHaveLength(1); + expect(rows[0]).toMatchObject({ + id: '1', + name: 'from server' + }); + }); + + // Verify the data by querying the database + const allRows = await database.getAll('SELECT * FROM lists'); + expect(allRows).toHaveLength(1); + expect(allRows[0]).toMatchObject({ + id: '1', + name: 'from server' + }); + } + ); +}); diff --git a/packages/web/tests/mocks/MockWebRemote.ts b/packages/web/tests/mocks/MockWebRemote.ts new file mode 100644 index 000000000..5613a3f98 --- /dev/null +++ b/packages/web/tests/mocks/MockWebRemote.ts @@ -0,0 +1,128 @@ +import { + AbstractRemote, + AbstractRemoteOptions, + BSONImplementation, + DataStream, + DEFAULT_REMOTE_LOGGER, + FetchImplementation, + FetchImplementationProvider, + ILogger, + RemoteConnector, + SocketSyncStreamOptions +} from '@powersync/common'; +import { serialize, type BSON } from 'bson'; +import { MockSyncService, setupMockServiceMessageHandler } from '../utils/MockSyncServiceWorker'; + +/** + * Mock fetch provider that intercepts all requests and routes them to the mock sync service. + * Used for testing in shared worker environments with enableMultipleTabs: true. + * + * When running in a shared worker context, this will: + * 1. Intercept all requests and register them as pending requests + * 2. Wait for a client to create a response before returning + * 3. Set up message handler for the mock service when onconnect is called + */ +class MockSyncServiceFetchProvider extends FetchImplementationProvider { + getFetch(): FetchImplementation { + return async (input: RequestInfo | URL, init?: RequestInit): Promise => { + const request = new Request(input, init); + + const mockService = MockSyncService.GLOBAL_INSTANCE; + + // Read the request body (if any) + let body: any = null; + try { + if (request.body) { + const clonedRequest = request.clone(); + body = await clonedRequest.json().catch(() => { + // If JSON parsing fails, try text + return clonedRequest.text().catch(() => null); + }); + } + } catch (e) { + // Body might not be readable, that's okay + } + + // Extract headers from the request + const headers: Record = {}; + request.headers.forEach((value, key) => { + headers[key] = value; + }); + + // Register as a pending request and wait for client to create response + return await mockService.registerPendingRequest(request.url, request.method, headers, body, request.signal); + }; + } +} + +/** + * Check if we're running in a shared worker context + */ +function isSharedWorkerContext(): boolean { + const isSharedWorker = + typeof SharedWorkerGlobalScope !== 'undefined' && + typeof self !== 'undefined' && + (self as any).constructor?.name === 'SharedWorkerGlobalScope'; + return isSharedWorker; +} + +if (isSharedWorkerContext()) { + const _self: SharedWorkerGlobalScope = self as any; + console.log('MockWebRemote: setting up connect listener'); + + /** + * This listener should be called in tandem with the shared sync worker's listener. + */ + _self.addEventListener('connect', async function (event: MessageEvent) { + console.log('MockWebRemote: connect listener called'); + const port = event.ports[0]; + + // Set up message handler for the mock service on this port + // Tests can create a separate SharedWorker connection to access this + setupMockServiceMessageHandler(port); + }); +} + +export class WebRemote extends AbstractRemote { + private _bson: BSONImplementation | undefined; + + constructor( + protected connector: RemoteConnector, + protected logger: ILogger = DEFAULT_REMOTE_LOGGER, + options?: Partial + ) { + // Use mock service fetch provider if we're in a shared worker context + const fetchProvider = new MockSyncServiceFetchProvider(); + + super(connector, logger, { + ...(options ?? {}), + fetchImplementation: options?.fetchImplementation ?? fetchProvider + }); + } + + getUserAgent(): string { + return 'powersync-web-mock'; + } + + async getBSON(): Promise { + if (this._bson) { + return this._bson; + } + const { BSON } = await import('bson'); + this._bson = BSON; + return this._bson; + } + + /** + * Override socketStreamRaw to use HTTP method (postStreamRaw) instead. + * This allows us to use the same mocks for both socket and HTTP streaming. + */ + async socketStreamRaw( + options: SocketSyncStreamOptions, + map: (buffer: Uint8Array) => T, + bson?: typeof BSON + ): Promise> { + // postStreamRaw decodes to strings, so convert back to Uint8Array for the map function + return await this.postStreamRaw(options, (line: string) => map(serialize(JSON.parse(line)))); + } +} diff --git a/packages/web/tests/multiple_instances.test.ts b/packages/web/tests/multiple_instances.test.ts index 76fadaf26..a8a239fc4 100644 --- a/packages/web/tests/multiple_instances.test.ts +++ b/packages/web/tests/multiple_instances.test.ts @@ -1,24 +1,11 @@ -import { - AbstractPowerSyncDatabase, - createBaseLogger, - createLogger, - DEFAULT_CRUD_UPLOAD_THROTTLE_MS, - SqliteBucketStorage, - SyncStatus -} from '@powersync/common'; -import { - OpenAsyncDatabaseConnection, - SharedWebStreamingSyncImplementation, - SharedWebStreamingSyncImplementationOptions, - WASqliteConnection, - WebRemote -} from '@powersync/web'; +import { AbstractPowerSyncDatabase, createBaseLogger, createLogger } from '@powersync/common'; +import { OpenAsyncDatabaseConnection, WASqliteConnection } from '@powersync/web'; import * as Comlink from 'comlink'; import { beforeAll, describe, expect, it, onTestFinished, vi } from 'vitest'; import { LockedAsyncDatabaseAdapter } from '../src/db/adapters/LockedAsyncDatabaseAdapter'; import { WebDBAdapter } from '../src/db/adapters/WebDBAdapter'; import { WorkerWrappedAsyncDatabaseConnection } from '../src/db/adapters/WorkerWrappedAsyncDatabaseConnection'; -import { TestConnector } from './utils/MockStreamOpenFactory'; +import { createTestConnector, sharedMockSyncServiceTest } from './utils/mockSyncServiceTest'; import { generateTestDb, testSchema } from './utils/testDb'; const DB_FILENAME = 'test-multiple-instances.db'; @@ -50,45 +37,57 @@ describe('Multiple Instances', { sequential: true }, () => { expect(assets.length).equals(1); }); - it('should broadcast logs from shared sync worker', { timeout: 20000 }, async () => { - const logger = createLogger('test-logger'); - const spiedErrorLogger = vi.spyOn(logger, 'error'); - const spiedDebugLogger = vi.spyOn(logger, 'debug'); - - const powersync = generateTestDb({ - logger, - database: { - dbFilename: 'broadcast-logger-test.sqlite' - }, - schema: testSchema - }); + sharedMockSyncServiceTest( + 'should broadcast logs from shared sync worker', + { timeout: 10_000 }, + async ({ context: { openDatabase, mockService } }) => { + const logger = createLogger('test-logger'); + const spiedErrorLogger = vi.spyOn(logger, 'error'); + const spiedDebugLogger = vi.spyOn(logger, 'debug'); + + // Open an additional database which we can spy on the logs. + const powersync = openDatabase({ + logger + }); - powersync.connect({ - fetchCredentials: async () => { - return { - endpoint: 'http://localhost/does-not-exist', - token: 'none' - }; - }, - uploadData: async (db) => {} - }); + powersync.connect({ + fetchCredentials: async () => { + return { + endpoint: 'http://localhost/does-not-exist', + token: 'none' + }; + }, + uploadData: async (db) => {} + }); - // Should log that a connection attempt has been made - const message = 'Streaming sync iteration started'; - await vi.waitFor( - () => - expect( - spiedDebugLogger.mock.calls - .flat(1) - .find((argument) => typeof argument == 'string' && argument.includes(message)) - ).exist, - { timeout: 2000 } - ); - - // The connection should fail with an error - await vi.waitFor(() => expect(spiedErrorLogger.mock.calls.length).gt(0), { timeout: 2000 }); - // This test seems to take quite long while waiting for this disconnect call - }); + await vi.waitFor( + async () => { + const requests = await mockService.getPendingRequests(); + expect(requests.length).toBeGreaterThan(0); + const pendingRequestId = requests[0].id; + // Generate an error + await mockService.createResponse(pendingRequestId, 401, { 'Content-Type': 'application/json' }); + await mockService.completeResponse(pendingRequestId); + }, + { timeout: 3_000 } + ); + + // Should log that a connection attempt has been made + const message = 'Streaming sync iteration started'; + await vi.waitFor( + () => + expect( + spiedDebugLogger.mock.calls + .flat(1) + .find((argument) => typeof argument == 'string' && argument.includes(message)) + ).exist, + { timeout: 2000 } + ); + + // The connection should fail with an error + await vi.waitFor(() => expect(spiedErrorLogger.mock.calls.length).gt(0), { timeout: 2000 }); + } + ); it('should maintain DB connections if instances call close', async () => { /** @@ -104,7 +103,7 @@ describe('Multiple Instances', { sequential: true }, () => { await createAsset(powersync2); }); - it('should handled interrupted transactions', { timeout: Infinity }, async () => { + it('should handled interrupted transactions', async () => { //Create a shared PowerSync database. We'll just use this for internally managing connections. const powersync = openDatabase(); await powersync.init(); @@ -223,170 +222,82 @@ describe('Multiple Instances', { sequential: true }, () => { await watchedPromise; }); - it('should share sync updates', async () => { - // Generate the first streaming sync implementation - const connector1 = new TestConnector(); - const db = openDatabase(); - await db.init(); - - // They need to use the same identifier to use the same shared worker. - const identifier = 'streaming-sync-shared'; - const syncOptions1: SharedWebStreamingSyncImplementationOptions = { - adapter: new SqliteBucketStorage(db.database), - remote: new WebRemote(connector1), - uploadCrud: async () => { - await connector1.uploadData(db); - }, - identifier, - crudUploadThrottleMs: DEFAULT_CRUD_UPLOAD_THROTTLE_MS, - retryDelayMs: 90_000, // Large delay to allow for testing - db: db.database as WebDBAdapter, - subscriptions: [] - }; - - const stream1 = new SharedWebStreamingSyncImplementation(syncOptions1); - await stream1.connect(); - // Generate the second streaming sync implementation - const connector2 = new TestConnector(); - const syncOptions2: SharedWebStreamingSyncImplementationOptions = { - adapter: new SqliteBucketStorage(db.database), - remote: new WebRemote(connector1), - uploadCrud: async () => { - await connector2.uploadData(db); - }, - identifier, - crudUploadThrottleMs: DEFAULT_CRUD_UPLOAD_THROTTLE_MS, - retryDelayMs: 90_000, // Large delay to allow for testing - db: db.database as WebDBAdapter, - subscriptions: [] - }; - - const stream2 = new SharedWebStreamingSyncImplementation(syncOptions2); - - const stream2UpdatedPromise = new Promise((resolve, reject) => { - const l = stream2.registerListener({ - statusChanged: (status) => { - if (status.connected) { - resolve(); - l(); - } - } + sharedMockSyncServiceTest( + 'should share sync updates', + { timeout: 10_000 }, + async ({ context: { database, connect, openDatabase } }) => { + const secondDatabase = openDatabase(); + + expect(database.currentStatus.connected).false; + expect(secondDatabase.currentStatus.connected).false; + // connect the second database in order for it to have access to the sync service. + secondDatabase.connect(createTestConnector()); + // Timing of this can be tricky due to the need for responding to a pending request. + await vi.waitFor(() => expect(secondDatabase.currentStatus.connecting).true, { timeout: 5_000 }); + // connect the first database - this will actually connect to the sync service. + await connect(); + + expect(database.currentStatus.connected).true; + + await vi.waitFor(() => expect(secondDatabase.currentStatus.connected).true, { timeout: 5_000 }); + } + ); + + sharedMockSyncServiceTest( + 'should trigger uploads from last connected clients', + async ({ context: { database, openDatabase, connect, connector, mockService } }) => { + const secondDatabase = openDatabase(); + + expect(database.currentStatus.connected).false; + expect(secondDatabase.currentStatus.connected).false; + + // Don't actually upload data + connector.uploadData.mockImplementation(async (db) => { + console.log('uploading from first client'); }); - }); - - // hack to set the status to a new one for tests - (stream1 as any)['_testUpdateStatus'](new SyncStatus({ connected: true })); - - await stream2UpdatedPromise; - expect(stream2.isConnected).true; - - await stream1.dispose(); - await stream2.dispose(); - }); - - it('should trigger uploads from last connected clients', async () => { - // Generate the first streaming sync implementation - const connector1 = new TestConnector(); - const spy1 = vi.spyOn(connector1, 'uploadData'); - const db = openDatabase(); - await db.init(); - // They need to use the same identifier to use the same shared worker. - const identifier = db.database.name; + // Create something with CRUD in it. + await database.execute('INSERT into lists (id, name) VALUES (uuid(), ?)', ['steven']); - // Resolves once the first connector has been called to upload data - let triggerUpload1: () => void; - const upload1TriggeredPromise = new Promise((resolve) => { - triggerUpload1 = resolve; - }); - - const sharedSyncOptions = { - adapter: new SqliteBucketStorage(db.database), - remote: new WebRemote(connector1), - db: db.database as WebDBAdapter, - identifier, - // The large delay here allows us to test between connection retries - retryDelayMs: 90_000, - crudUploadThrottleMs: DEFAULT_CRUD_UPLOAD_THROTTLE_MS, - subscriptions: [], - flags: { - broadcastLogs: true - } - }; - - // Create the first streaming client - const stream1 = new SharedWebStreamingSyncImplementation({ - ...sharedSyncOptions, - uploadCrud: async () => { - triggerUpload1(); - connector1.uploadData(db); - } - }); + // connect from the first database + await connect(); - // Generate the second streaming sync implementation - const connector2 = new TestConnector(); - // The second connector will be called first to upload, we don't want it to actually upload - // This will cause the sync uploads to be delayed as the CRUD queue did not change - const spy2 = vi.spyOn(connector2, 'uploadData').mockImplementation(async () => {}); + await vi.waitFor(() => expect(database.currentStatus.connected).true); - let triggerUpload2: () => void; - const upload2TriggeredPromise = new Promise((resolve) => { - triggerUpload2 = resolve; - }); + // It should initially try and upload from the first client + await vi.waitFor(() => expect(connector.uploadData).toHaveBeenCalledOnce(), { timeout: 2000 }); - const stream2 = new SharedWebStreamingSyncImplementation({ - ...sharedSyncOptions, - uploadCrud: async () => { - triggerUpload2(); - connector2.uploadData(db); - } - }); - - // Waits for the stream to be marked as connected - const stream2UpdatedPromise = new Promise((resolve, reject) => { - const l = stream2.registerListener({ - statusChanged: (status) => { - if (status.connected) { - resolve(); - l(); - } - } + const secondConnector = createTestConnector(); + // Don't actually upload data + secondConnector.uploadData.mockImplementation(async (db) => { + console.log('uploading from second client'); }); - }); - - // hack to set the status to connected for tests - await stream1.connect(); - // Hack, set the status to connected in order to trigger the upload - await (stream1 as any)['_testUpdateStatus'](new SyncStatus({ connected: true })); - - // The status in the second stream client should be updated - await stream2UpdatedPromise; - - expect(stream2.isConnected).true; - - // Create something with CRUD in it. - await db.execute('INSERT into customers (id, name, email) VALUES (uuid(), ?, ?)', [ - 'steven', - 'steven@journeyapps.com' - ]); - stream1.triggerCrudUpload(); - // The second connector should be called to upload - await upload2TriggeredPromise; - - // It should call the latest connected client - expect(spy2).toHaveBeenCalledOnce(); + // Connect the second database and wait for a pending request to appear + const secondConnectPromise = secondDatabase.connect(secondConnector); + let _pendingRequestId: string; + await vi.waitFor(async () => { + const requests = await mockService.getPendingRequests(); + expect(requests.length).toBeGreaterThan(0); + _pendingRequestId = requests[0].id; + }); + const pendingRequestId = _pendingRequestId!; + await mockService.createResponse(pendingRequestId, 200, { 'Content-Type': 'application/json' }); + await mockService.pushBodyLine(pendingRequestId, { + token_expires_in: 10000000 + }); + await secondConnectPromise; - // Close the second client, leaving only the first one - await stream2.dispose(); + // It should now upload from the second client + await vi.waitFor(() => expect(secondConnector.uploadData).toHaveBeenCalledOnce()); + await new Promise((resolve) => setTimeout(resolve, 5000)); + // Now disconnect and close the second client + await secondDatabase.close(); - // Hack, set the status to connected in order to trigger the upload - await (stream1 as any)['_testUpdateStatus'](new SyncStatus({ connected: true })); - stream1.triggerCrudUpload(); - // It should now upload from the first client - await upload1TriggeredPromise; + expect(database.currentStatus.connected).true; - expect(spy1).toHaveBeenCalledOnce(); - await stream1.dispose(); - }); + // It should now upload from the first client + await vi.waitFor(() => expect(connector.uploadData.mock.calls.length).greaterThanOrEqual(2), { timeout: 3000 }); + } + ); }); diff --git a/packages/web/tests/multiple_tabs_iframe.test.ts b/packages/web/tests/multiple_tabs_iframe.test.ts new file mode 100644 index 000000000..038425932 --- /dev/null +++ b/packages/web/tests/multiple_tabs_iframe.test.ts @@ -0,0 +1,407 @@ +import { WASQLiteVFS } from '@powersync/web'; +import { v4 as uuid } from 'uuid'; +import { describe, expect, it, onTestFinished } from 'vitest'; + +/** + * Creates an iframe with a PowerSync client that connects using the same database. + * The iframe uses dynamic import to load PowerSync modules. + * + * Note: This approach works in Vitest browser mode where modules are available + * via the Vite dev server. The iframe needs to access modules from the same origin. + */ +interface IframeClient { + iframe: HTMLIFrameElement; + cleanup: () => Promise; + executeQuery: (query: string, parameters?: unknown[]) => Promise; + getCredentialsFetchCount: () => Promise; +} + +interface IframeClientResult { + iframe: HTMLIFrameElement; + cleanup: () => Promise; + ready: Promise; +} + +// Run tests for both IndexedDB and OPFS +createMultipleTabsTest(); // IndexedDB (default) +createMultipleTabsTest(WASQLiteVFS.OPFSCoopSyncVFS); + +function createIframeWithPowerSyncClient( + dbFilename: string, + identifier: string, + vfs?: WASQLiteVFS, + waitForConnection?: boolean, + configureMockResponses?: boolean +): IframeClientResult { + const iframe = document.createElement('iframe'); + // Make iframe visible for debugging + iframe.style.display = 'block'; + iframe.style.width = '300px'; + iframe.style.height = '150px'; + iframe.style.border = '2px solid #007bff'; + iframe.style.margin = '10px'; + iframe.style.borderRadius = '4px'; + iframe.title = `PowerSync Client: ${identifier}`; + document.body.appendChild(iframe); + + // Get the base URL for module imports + // In Vitest browser mode, we need to construct a path relative to where the test file is served + // Use import.meta.url to get the current test file's location + const testFileUrl = new URL(import.meta.url); + const testFileDir = testFileUrl.pathname.substring(0, testFileUrl.pathname.lastIndexOf('/')); + // Construct the absolute path to the initializer module relative to the test file + const modulePath = `${testFileUrl.origin}${testFileDir}/utils/iframeInitializer.ts`; + + // Create HTML content with module script that imports and executes the setup function + // Vite will serve the module file, allowing proper module resolution + const htmlContent = ` + + + + PowerSync Client ${identifier} + + + +
+
ID:${identifier}
+
DB:${dbFilename}
+
VFS:${vfs || 'IndexedDB (default)'}
+
+ + +`; + + const blob = new Blob([htmlContent], { type: 'text/html' }); + const url = URL.createObjectURL(blob); + iframe.src = url; + + let requestIdCounter = 0; + const pendingRequests = new Map< + string, + { + resolve: (value: any) => void; + reject: (error: Error) => void; + } + >(); + + let messageHandler: ((event: MessageEvent) => void) | null = null; + let isCleanedUp = false; + + // Create cleanup function that can be called immediately + const cleanup = async (): Promise => { + if (isCleanedUp) { + return; + } + isCleanedUp = true; + + // Remove message handler if it was added + if (messageHandler) { + window.removeEventListener('message', messageHandler); + messageHandler = null; + } + + // Simulate abrupt tab closure - just remove the iframe without calling + // disconnect/close on the PowerSync client. This tests dead tab detection. + URL.revokeObjectURL(url); + if (iframe.parentNode) { + iframe.remove(); + } + }; + + // Create promise that resolves when powersync-ready is received + const ready = new Promise((resolve, reject) => { + messageHandler = async (event: MessageEvent) => { + if (isCleanedUp) { + return; + } + + const data = event.data; + + if (data?.type === 'powersync-ready' && data.identifier === identifier) { + // Don't remove the message handler - we need it to receive query results! + resolve({ + iframe, + cleanup, + executeQuery: (query: string, parameters?: unknown[]): Promise => { + return new Promise((resolveQuery, rejectQuery) => { + if (isCleanedUp) { + rejectQuery(new Error('Iframe has been cleaned up')); + return; + } + + const requestId = `query-${identifier}-${++requestIdCounter}`; + pendingRequests.set(requestId, { + resolve: resolveQuery, + reject: rejectQuery + }); + + const iframeWindow = iframe.contentWindow; + if (!iframeWindow) { + pendingRequests.delete(requestId); + rejectQuery(new Error('Iframe window not available')); + return; + } + + iframeWindow.postMessage( + { + type: 'execute-query', + requestId, + query, + parameters + }, + '*' + ); + + // Cleanup after timeout to prevent memory leaks + setTimeout(() => { + if (pendingRequests.has(requestId)) { + pendingRequests.delete(requestId); + rejectQuery(new Error('Query timeout')); + } + }, 30000); + }); + }, + getCredentialsFetchCount: (): Promise => { + return new Promise((resolveCount, rejectCount) => { + if (isCleanedUp) { + rejectCount(new Error('Iframe has been cleaned up')); + return; + } + + const requestId = `credentials-count-${identifier}-${++requestIdCounter}`; + pendingRequests.set(requestId, { + resolve: resolveCount, + reject: rejectCount + }); + + const iframeWindow = iframe.contentWindow; + if (!iframeWindow) { + pendingRequests.delete(requestId); + rejectCount(new Error('Iframe window not available')); + return; + } + + iframeWindow.postMessage( + { + type: 'get-credentials-count', + requestId + }, + '*' + ); + + // Cleanup after timeout to prevent memory leaks + setTimeout(() => { + if (pendingRequests.has(requestId)) { + pendingRequests.delete(requestId); + rejectCount(new Error('Credentials count request timeout')); + } + }, 10000); + }); + } + }); + } else if (data?.type === 'powersync-error' && data.identifier === identifier) { + if (messageHandler) { + window.removeEventListener('message', messageHandler); + messageHandler = null; + } + URL.revokeObjectURL(url); + if (iframe.parentNode) { + iframe.remove(); + } + reject(new Error(`PowerSync error in iframe: ${data.error}`)); + } else if (data?.type === 'query-result' && data.identifier === identifier) { + const pending = pendingRequests.get(data.requestId); + if (pending) { + pendingRequests.delete(data.requestId); + if (data.success) { + pending.resolve(data.result); + } else { + pending.reject(new Error(data.error || 'Query failed')); + } + } + } else if (data?.type === 'credentials-count-result' && data.identifier === identifier) { + const pending = pendingRequests.get(data.requestId); + if (pending) { + pendingRequests.delete(data.requestId); + if (data.success) { + pending.resolve(data.count); + } else { + pending.reject(new Error(data.error || 'Credentials count request failed')); + } + } + } + }; + window.addEventListener('message', messageHandler); + }); + + return { + iframe, + cleanup, + ready + }; +} + +/** + * Test suite for simulating multiple browser tabs with PowerSync clients. + * + * Purpose: + * These tests simulate the behavior of closing and reopening multiple browser tabs + * that share a PowerSync database connection via a SharedWorker. This is critical + * for testing PowerSync's dead tab detection and resource cleanup mechanisms. + * + * Iframe vs Real Tab Behavior: + * Closing an iframe by removing it from the DOM is similar to closing a real browser tab + * for PowerSync's purposes because: + * 1. Navigator Locks API: PowerSync uses Navigator Locks to detect tab closure. When an + * iframe is removed, its execution context is destroyed and any held locks are automatically + * released, just like when a real tab closes. This is the primary mechanism PowerSync uses + * for dead tab detection (see SharedWebStreamingSyncImplementation.ts). + * 2. MessagePort Closure: When an iframe is removed, any MessagePorts used for communication + * with the SharedWorker are closed, triggering cleanup in the worker. + * 3. Window Unload: The iframe's window context is destroyed, which would trigger unload + * event listeners if registered (PowerSyncDatabase registers an 'unload' listener when + * enableMultiTabs is true). + * + * Test Scenarios: + * - Opening 100 tabs simultaneously + * - Waiting 1 second for all tabs to initialize + * - Simultaneously closing all tabs except the middle (50th) tab + * - Verifying that the remaining tab is still functional and the shared database + * connection is properly maintained after closing 99 tabs + * + * This test suite runs for both IndexedDB and OPFS VFS backends to ensure dead tab + * detection works correctly across different storage mechanisms. + */ +function createMultipleTabsTest(vfs?: WASQLiteVFS) { + const vfsName = vfs || 'IndexedDB'; + describe(`Multiple Tabs with Iframes (${vfsName})`, { sequential: true, timeout: 30_000 }, () => { + const dbFilename = `test-multi-tab-${uuid()}.db`; + + // Number of tabs to create + const NUM_TABS = 100; + // Index of the middle tab to keep (0-indexed, so 49 is the 50th tab) + const MIDDLE_TAB_INDEX = 49; + + it('should handle opening and closing many tabs quickly', async () => { + // Step 0: Create an iframe to set up PowerSync and configure mock responses (401) + const setupIdentifier = `setup-${uuid()}`; + const setupIframe = createIframeWithPowerSyncClient(dbFilename, setupIdentifier, vfs, false, true); + onTestFinished(async () => { + try { + await setupIframe.cleanup(); + } catch (e) { + // Ignore cleanup errors + } + }); + // Wait for the setup iframe to be ready (this ensures PowerSync is initialized and mock responses are configured) + await setupIframe.ready; + // Step 1: Open 100 tabs (don't wait for them to be ready) + const tabResults: IframeClientResult[] = [setupIframe]; + + for (let i = 0; i < NUM_TABS; i++) { + const identifier = `tab-${i}`; + const result = createIframeWithPowerSyncClient(dbFilename, identifier, vfs); + tabResults.push(result); + + // Register cleanup for each tab + onTestFinished(async () => { + try { + await result.cleanup(); + } catch (e) { + // Ignore cleanup errors - tab might already be closed + } + }); + } + + // Total iframes: 1 setup + NUM_TABS tabs + expect(tabResults.length).toBe(NUM_TABS + 1); + + // Verify all iframes are created (they're created immediately) + for (const result of tabResults) { + expect(result.iframe.isConnected).toBe(true); + } + + // Step 2: Wait 1 second + await new Promise((resolve) => setTimeout(resolve, 1000)); + + // Step 3: Close all tabs except the setup iframe (index 0) and the middle (50th) tab + // The middle tab is at index 1 + MIDDLE_TAB_INDEX (since index 0 is the setup iframe) + const middleTabArrayIndex = 1 + MIDDLE_TAB_INDEX; + const tabsToClose: IframeClientResult[] = []; + for (let i = 0; i < tabResults.length; i++) { + // Skip the setup iframe (index 0) and the middle tab + if (i !== 0 && i !== middleTabArrayIndex) { + tabsToClose.push(tabResults[i]); + } + } + + // Close all tabs except the setup iframe and middle one simultaneously (without waiting for ready) + const closePromises = tabsToClose.map((result) => result.cleanup()); + await Promise.all(closePromises); + + // Verify closed tabs are removed + for (let i = 0; i < tabResults.length; i++) { + if (i !== 0 && i !== middleTabArrayIndex) { + expect(tabResults[i].iframe.isConnected).toBe(false); + expect(document.body.contains(tabResults[i].iframe)).toBe(false); + } + } + + // Verify the setup iframe and middle tab are still present + expect(tabResults[0].iframe.isConnected).toBe(true); + expect(document.body.contains(tabResults[0].iframe)).toBe(true); + expect(tabResults[middleTabArrayIndex].iframe.isConnected).toBe(true); + expect(document.body.contains(tabResults[middleTabArrayIndex].iframe)).toBe(true); + + // Step 4: Wait for the middle tab to be ready, then execute a test query to verify its DB is still functional + const middleTabClient = await tabResults[middleTabArrayIndex].ready; + const queryResult = await middleTabClient.executeQuery('SELECT 1 as value'); + + // Verify the query result + expect(queryResult).toBeDefined(); + expect(Array.isArray(queryResult)).toBe(true); + expect(queryResult.length).toBe(1); + expect((queryResult[0] as { value: number }).value).toBe(1); + + // Step 5: Create another tab, wait for it to be ready, and verify its credentialsFetchCount is 1 + const newTabIdentifier = `new-tab-${Date.now()}`; + const newTabResult = createIframeWithPowerSyncClient(dbFilename, newTabIdentifier, vfs, true); + onTestFinished(async () => { + try { + await newTabResult.cleanup(); + } catch (e) { + // Ignore cleanup errors + } + }); + const newTabClient = await newTabResult.ready; + + // Verify the new tab's credentials fetch count is 1 + // This means the shared worker is using the db and attempting to connect to the PowerSync server. + const credentialsFetchCount = await newTabClient.getCredentialsFetchCount(); + expect(credentialsFetchCount).toBe(1); + }); + }); +} diff --git a/packages/web/tests/utils/MockSyncService.ts b/packages/web/tests/utils/MockSyncService.ts new file mode 100644 index 000000000..d6755b18e --- /dev/null +++ b/packages/web/tests/utils/MockSyncService.ts @@ -0,0 +1,3 @@ +// Re-export types and worker-side implementation +export * from './MockSyncServiceTypes'; +export * from './MockSyncServiceWorker'; diff --git a/packages/web/tests/utils/MockSyncServiceClient.ts b/packages/web/tests/utils/MockSyncServiceClient.ts new file mode 100644 index 000000000..c099e5b77 --- /dev/null +++ b/packages/web/tests/utils/MockSyncServiceClient.ts @@ -0,0 +1,228 @@ +import { StreamingSyncLine } from '@powersync/common'; +import type { + AutomaticResponseConfig, + MockSyncServiceMessage, + MockSyncServiceResponse, + PendingRequest +} from './MockSyncServiceTypes'; + +/** + * Interface for mocking sync service responses in shared worker environments. + * Similar to MockSyncService in the node SDK package. + */ +export interface MockSyncService { + /** + * Get all pending requests (requests waiting for a response to be created) + */ + getPendingRequests(): Promise; + + /** + * Create a response for a pending request with the specified status and headers. + * This resolves the pending request and allows pushing body data. + */ + createResponse(pendingRequestId: string, status: number, headers: Record): Promise; + + /** + * Push body data to an active response. + * Accepts either text (string) or binary data (ArrayBuffer or Uint8Array). + * Strings are encoded to Uint8Array before sending. + * The response must have been created first using createResponse. + */ + pushBodyData(pendingRequestId: string, data: string | ArrayBuffer | Uint8Array): Promise; + + /** + * Push a streaming sync line as NDJSON to an active response. + * This is a convenience method that encodes the line as JSON with a newline. + * The response must have been created first using createResponse. + */ + pushBodyLine(pendingRequestId: string, line: StreamingSyncLine): Promise; + + /** + * Complete an active response (close the stream). + * The response must have been created first using createResponse. + */ + completeResponse(pendingRequestId: string): Promise; + + /** + * Set the automatic response configuration. + * When set, this will be used to automatically reply to all pending requests. + */ + setAutomaticResponse(config: AutomaticResponseConfig | null): Promise; + + /** + * Automatically reply to all pending requests using the automatic response configuration. + * Returns the number of requests that were replied to. + */ + replyToAllPendingRequests(): Promise; +} + +/** + * Connect to the shared worker and get access to the mock sync service. + * This function creates a separate SharedWorker connection to the same shared sync worker + * just to access the mock service, without interfering with the normal sync implementation. + * + * @param identifier - The database identifier (used to construct the worker name) + * @param workerUrl - Optional custom worker URL. If not provided, uses the default shared sync worker. + * @returns The mock sync service interface, or null if not available + */ +export async function getMockSyncServiceFromWorker( + identifier: string, + workerUrl?: string | URL +): Promise { + // Create a separate SharedWorker connection to the same shared sync worker + // This connection is only used to access the mock service + // Note the URL and identifier should match in order for the correct worker to be used + const worker = workerUrl + ? new SharedWorker(typeof workerUrl === 'string' ? workerUrl : workerUrl.href, { + name: `shared-sync-${identifier}` + }) + : new SharedWorker(new URL('../../lib/src/worker/sync/SharedSyncImplementation.worker.js', import.meta.url), { + /* @vite-ignore */ + name: `shared-sync-${identifier}`, + type: 'module' + }); + + const port = worker.port; + port.start(); + + // Generic helper to send a message and wait for a response + const sendMessage = ( + message: MockSyncServiceMessage, + expectedType: T['type'], + timeout = 5000, + transfer?: Transferable[] + ): Promise => { + return new Promise((resolve, reject) => { + const requestId = 'requestId' in message ? message.requestId : undefined; + + const handler = (event: MessageEvent) => { + const response = event.data; + + if (response.type === expectedType && response.requestId === requestId) { + port.removeEventListener('message', handler); + if ('success' in response && !response.success) { + reject(new Error('Operation failed')); + } else { + resolve(response as T); + } + } else if (response.type === 'error' && response.requestId === requestId) { + port.removeEventListener('message', handler); + reject(new Error(response.error)); + } + }; + + port.addEventListener('message', handler); + if (transfer && transfer.length > 0) { + port.postMessage(message, transfer); + } else { + port.postMessage(message); + } + + // Timeout + setTimeout(() => { + port.removeEventListener('message', handler); + reject(new Error(`Timeout waiting for ${expectedType} response`)); + }, timeout); + }); + }; + + // Define pushBodyData first so it can be used by pushBodyLine + const pushBodyData = async (pendingRequestId: string, data: string | ArrayBuffer | Uint8Array): Promise => { + const requestId = crypto.randomUUID(); + + // Handle transferable objects for ArrayBuffer + const transfer: Transferable[] = []; + if (data instanceof ArrayBuffer) { + transfer.push(data); + } else if (data instanceof Uint8Array && data.buffer instanceof ArrayBuffer) { + transfer.push(data.buffer); + } + // Strings are passed as-is, no transfer needed + + await sendMessage( + { + type: 'pushBodyData', + requestId, + pendingRequestId, + data + } satisfies MockSyncServiceMessage, + 'pushBodyData', + 5000, + transfer.length > 0 ? transfer : undefined + ); + }; + + return { + async getPendingRequests(): Promise { + const requestId = crypto.randomUUID(); + const response = await sendMessage<{ type: 'getPendingRequests'; requestId: string; requests: PendingRequest[] }>( + { type: 'getPendingRequests', requestId } satisfies MockSyncServiceMessage, + 'getPendingRequests' + ); + return response.requests; + }, + + async createResponse(pendingRequestId: string, status: number, headers: Record): Promise { + const requestId = crypto.randomUUID(); + await sendMessage( + { + type: 'createResponse', + requestId, + pendingRequestId, + status, + headers + } satisfies MockSyncServiceMessage, + 'createResponse' + ); + }, + + pushBodyData, + + async pushBodyLine(pendingRequestId: string, line: any): Promise { + // Encode as NDJSON: JSON.stringify + newline + const lineStr = `${JSON.stringify(line)}\n`; + await pushBodyData(pendingRequestId, lineStr); + }, + + async completeResponse(pendingRequestId: string): Promise { + const requestId = crypto.randomUUID(); + await sendMessage( + { + type: 'completeResponse', + requestId, + pendingRequestId + } satisfies MockSyncServiceMessage, + 'completeResponse' + ); + }, + + async setAutomaticResponse(config: AutomaticResponseConfig | null): Promise { + const requestId = crypto.randomUUID(); + await sendMessage( + { + type: 'setAutomaticResponse', + requestId, + config + } satisfies MockSyncServiceMessage, + 'setAutomaticResponse' + ); + }, + + async replyToAllPendingRequests(): Promise { + const requestId = crypto.randomUUID(); + const response = await sendMessage<{ + type: 'replyToAllPendingRequests'; + requestId: string; + success: boolean; + count: number; + }>( + { + type: 'replyToAllPendingRequests', + requestId + } satisfies MockSyncServiceMessage, + 'replyToAllPendingRequests' + ); + return response.count; + } + }; +} diff --git a/packages/web/tests/utils/MockSyncServiceTypes.ts b/packages/web/tests/utils/MockSyncServiceTypes.ts new file mode 100644 index 000000000..8f0d61d79 --- /dev/null +++ b/packages/web/tests/utils/MockSyncServiceTypes.ts @@ -0,0 +1,71 @@ +/** + * Representation of a pending request + */ +export interface PendingRequest { + id: string; + url: string; + method: string; + headers: Record; + body: any; +} + +/** + * Automatic response configuration + */ +export interface AutomaticResponseConfig { + status: number; + headers: Record; + bodyLines?: any[]; +} + +/** + * Message types for communication via MessagePort + */ +export type MockSyncServiceMessage = + | { type: 'getPendingRequests'; requestId: string } + | { + type: 'createResponse'; + requestId: string; + pendingRequestId: string; + status: number; + headers: Record; + } + | { type: 'pushBodyData'; requestId: string; pendingRequestId: string; data: string | ArrayBuffer | Uint8Array } + | { type: 'completeResponse'; requestId: string; pendingRequestId: string } + | { type: 'setAutomaticResponse'; requestId: string; config: AutomaticResponseConfig | null } + | { type: 'replyToAllPendingRequests'; requestId: string }; + +export type MockSyncServiceResponse = + | { type: 'getPendingRequests'; requestId: string; requests: PendingRequest[] } + | { type: 'createResponse'; requestId: string; success: boolean } + | { type: 'pushBodyData'; requestId: string; success: boolean } + | { type: 'completeResponse'; requestId: string; success: boolean } + | { type: 'setAutomaticResponse'; requestId: string; success: boolean } + | { type: 'replyToAllPendingRequests'; requestId: string; success: boolean; count: number } + | { type: 'error'; requestId?: string; error: string }; + +/** + * Internal representation of a pending request with response promise + */ +export interface PendingRequestInternal { + id: string; + url: string; + method: string; + headers: Record; + body: any; + responsePromise: { + resolve: (response: Response) => void; + reject: (error: Error) => void; + }; + streamController?: ReadableStreamDefaultController; +} + +/** + * Internal representation of an active response + */ +export interface ActiveResponse { + id: string; + status: number; + headers: Record; + stream: ReadableStreamDefaultController; +} diff --git a/packages/web/tests/utils/MockSyncServiceWorker.ts b/packages/web/tests/utils/MockSyncServiceWorker.ts new file mode 100644 index 000000000..9387d3064 --- /dev/null +++ b/packages/web/tests/utils/MockSyncServiceWorker.ts @@ -0,0 +1,406 @@ +import type { MockSyncServiceMessage, MockSyncServiceResponse } from './MockSyncServiceTypes'; +import { + ActiveResponse, + AutomaticResponseConfig, + PendingRequest, + PendingRequestInternal +} from './MockSyncServiceTypes'; + +/** + * Mock sync service implementation for shared worker environments. + * This allows tests to mock sync responses when using enableMultipleTabs: true. + * Requests are kept pending until a client explicitly creates a response. + */ +export class MockSyncService { + private pendingRequests: Map = new Map(); + private activeResponses: Map = new Map(); + private nextId = 0; + private automaticResponse: AutomaticResponseConfig | null = null; + + /** + * A Static instance of the mock sync service. + * This can be used directly for non-worker environments. + * A proxy is required for worker environments. + */ + static readonly GLOBAL_INSTANCE = new MockSyncService(); + + /** + * Register a new pending request (called by WebRemote when a sync stream is requested). + * Returns a promise that resolves when a client creates a response for this request. + */ + registerPendingRequest( + url: string, + method: string, + headers: Record, + body: any, + signal?: AbortSignal + ): Promise { + const id = `pending-${++this.nextId}`; + + let resolveResponse: (response: Response) => void; + let rejectResponse: (error: Error) => void; + + const responsePromise = new Promise((resolve, reject) => { + resolveResponse = resolve; + rejectResponse = reject; + }); + + const pendingRequest: PendingRequestInternal = { + id, + url, + method, + headers, + body, + responsePromise: { + resolve: resolveResponse!, + reject: rejectResponse! + } + }; + + this.pendingRequests.set(id, pendingRequest); + + signal?.addEventListener('abort', () => { + this.pendingRequests.delete(id); + rejectResponse(new Error('Request aborted')); + + // if already in active responses, remove it + if (this.activeResponses.has(id)) { + const response = this.activeResponses.get(id); + if (response) { + response.stream.close(); + } + this.activeResponses.delete(id); + } + }); + + // If automatic response is configured, apply it immediately + if (this.automaticResponse) { + // Use setTimeout to ensure the response is created asynchronously + // This prevents issues if the response creation happens synchronously + setTimeout(() => { + try { + // Create response with automatic config + this.createResponse(id, this.automaticResponse!.status, this.automaticResponse!.headers); + + // Push body lines if provided + if (this.automaticResponse!.bodyLines) { + for (const line of this.automaticResponse!.bodyLines) { + const lineStr = `${JSON.stringify(line)}\n`; + const encoder = new TextEncoder(); + this.pushBodyData(id, encoder.encode(lineStr)); + } + } + + // Complete the response + this.completeResponse(id); + } catch (e) { + // If automatic response fails, reject the promise + rejectResponse!(e instanceof Error ? e : new Error(String(e))); + } + }, 0); + } + + // Return the promise - it will resolve when createResponse is called (or immediately if auto-response is set) + return responsePromise; + } + + /** + * Get all pending requests + */ + getPendingRequestsSync(): PendingRequest[] { + return Array.from(this.pendingRequests.values()).map((pr) => ({ + id: pr.id, + url: pr.url, + method: pr.method, + headers: pr.headers, + body: pr.body + })); + } + + /** + * Create a response for a pending request. + * This resolves the response promise and allows pushing body lines. + */ + createResponse(pendingRequestId: string, status: number, headers: Record): void { + const pendingRequest = this.pendingRequests.get(pendingRequestId); + if (!pendingRequest) { + throw new Error(`Pending request ${pendingRequestId} not found`); + } + + // Create a readable stream that the mock service can control + // Response.body is always ReadableStream, so we use Uint8Array + const stream = new ReadableStream({ + start: (controller) => { + // Store the active response once the controller is available + // The start callback is called synchronously, so this is safe + const activeResponse: ActiveResponse = { + id: pendingRequestId, + status, + headers, + stream: controller + }; + this.activeResponses.set(pendingRequestId, activeResponse); + }, + cancel: () => { + // Remove response when stream is cancelled + this.activeResponses.delete(pendingRequestId); + this.pendingRequests.delete(pendingRequestId); + } + }); + + // Create the Response object + const response = new Response(stream, { + status, + headers + }); + + // Resolve the pending request's promise + pendingRequest.responsePromise.resolve(response); + + // Remove from pending (it's now active) + this.pendingRequests.delete(pendingRequestId); + } + + /** + * Push body data to an active response. + * Accepts either text (string) or binary data (ArrayBuffer or Uint8Array). + * All data is encoded to Uint8Array before enqueueing (required by ReadableStream). + */ + pushBodyData(pendingRequestId: string, data: string | ArrayBuffer | Uint8Array): void { + const activeResponse = this.activeResponses.get(pendingRequestId); + if (!activeResponse) { + throw new Error(`Active response ${pendingRequestId} not found`); + } + + try { + let encoded: Uint8Array; + + if (typeof data === 'string') { + // Encode string to Uint8Array (required by ReadableStream) + const encoder = new TextEncoder(); + encoded = encoder.encode(data); + } else if (data instanceof ArrayBuffer) { + // Convert ArrayBuffer to Uint8Array + encoded = new Uint8Array(data); + } else { + // Already Uint8Array, use directly + encoded = data; + } + + activeResponse.stream.enqueue(encoded); + } catch (e) { + // Stream might be closed, remove it + this.activeResponses.delete(pendingRequestId); + throw new Error(`Failed to push data to response ${pendingRequestId}: ${e}`); + } + } + + /** + * Complete an active response (close the stream) + */ + completeResponse(pendingRequestId: string): void { + const activeResponse = this.activeResponses.get(pendingRequestId); + if (!activeResponse) { + throw new Error(`Active response ${pendingRequestId} not found`); + } + + try { + activeResponse.stream.close(); + } catch (e) { + // Stream might already be closed + } finally { + this.activeResponses.delete(pendingRequestId); + } + } + + /** + * Set the automatic response configuration. + * When set, this will be used to automatically reply to all pending requests. + */ + setAutomaticResponse(config: AutomaticResponseConfig | null): void { + this.automaticResponse = config; + } + + /** + * Automatically reply to all pending requests using the automatic response configuration. + * Returns the number of requests that were replied to. + */ + replyToAllPendingRequests(): number { + if (!this.automaticResponse) { + throw new Error('Automatic response not set. Call setAutomaticResponse first.'); + } + + const pendingRequestIds = Array.from(this.pendingRequests.keys()); + let count = 0; + + for (const requestId of pendingRequestIds) { + try { + // Create response with automatic config + this.createResponse(requestId, this.automaticResponse.status, this.automaticResponse.headers); + + // Push body lines if provided + if (this.automaticResponse.bodyLines) { + for (const line of this.automaticResponse.bodyLines) { + const lineStr = `${JSON.stringify(line)}\n`; + const encoder = new TextEncoder(); + this.pushBodyData(requestId, encoder.encode(lineStr)); + } + } + + // Complete the response + this.completeResponse(requestId); + count++; + } catch (e) { + // Skip requests that fail (might already be handled) + continue; + } + } + + return count; + } +} + +/** + * Set up message handler for the mock service on a MessagePort + */ +export function setupMockServiceMessageHandler(port: MessagePort) { + port.addEventListener('message', (event: MessageEvent) => { + const message = event.data; + + if (!message || typeof message !== 'object' || !('type' in message)) { + return; + } + + const service = MockSyncService.GLOBAL_INSTANCE; + + try { + switch (message.type) { + case 'getPendingRequests': { + try { + const requests = service.getPendingRequestsSync(); + port.postMessage({ + type: 'getPendingRequests', + requestId: message.requestId, + requests + } satisfies MockSyncServiceResponse); + } catch (error) { + port.postMessage({ + type: 'error', + requestId: message.requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + break; + } + case 'createResponse': { + try { + service.createResponse(message.pendingRequestId, message.status, message.headers); + port.postMessage({ + type: 'createResponse', + requestId: message.requestId, + success: true + } satisfies MockSyncServiceResponse); + } catch (error) { + port.postMessage({ + type: 'error', + requestId: message.requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + break; + } + case 'pushBodyData': { + try { + service.pushBodyData(message.pendingRequestId, message.data); + port.postMessage({ + type: 'pushBodyData', + requestId: message.requestId, + success: true + } satisfies MockSyncServiceResponse); + } catch (error) { + port.postMessage({ + type: 'error', + requestId: message.requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + break; + } + case 'completeResponse': { + try { + service.completeResponse(message.pendingRequestId); + port.postMessage({ + type: 'completeResponse', + requestId: message.requestId, + success: true + } satisfies MockSyncServiceResponse); + } catch (error) { + port.postMessage({ + type: 'error', + requestId: message.requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + break; + } + case 'setAutomaticResponse': { + try { + service.setAutomaticResponse(message.config); + port.postMessage({ + type: 'setAutomaticResponse', + requestId: message.requestId, + success: true + } satisfies MockSyncServiceResponse); + } catch (error) { + port.postMessage({ + type: 'error', + requestId: message.requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + break; + } + case 'replyToAllPendingRequests': { + try { + const count = service.replyToAllPendingRequests(); + port.postMessage({ + type: 'replyToAllPendingRequests', + requestId: message.requestId, + success: true, + count + } satisfies MockSyncServiceResponse); + } catch (error) { + port.postMessage({ + type: 'error', + requestId: message.requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + break; + } + default: { + const requestId = + 'requestId' in message && typeof message === 'object' && message !== null + ? (message as { requestId?: string }).requestId + : undefined; + port.postMessage({ + type: 'error', + requestId, + error: `Unknown message type: ${(message as any).type}` + } satisfies MockSyncServiceResponse); + break; + } + } + } catch (error) { + // Fallback for any unexpected errors + const requestId = 'requestId' in message ? message.requestId : undefined; + port.postMessage({ + type: 'error', + requestId, + error: error instanceof Error ? error.message : String(error) + } satisfies MockSyncServiceResponse); + } + }); + + port.start(); +} diff --git a/packages/web/tests/utils/iframeInitializer.ts b/packages/web/tests/utils/iframeInitializer.ts new file mode 100644 index 000000000..4c1c8da11 --- /dev/null +++ b/packages/web/tests/utils/iframeInitializer.ts @@ -0,0 +1,173 @@ +import { LogLevel, Schema, SyncStreamConnectionMethod, TableV2, column, createBaseLogger } from '@powersync/common'; +import { PowerSyncDatabase, WASQLiteOpenFactory, WASQLiteVFS } from '@powersync/web'; +import { getMockSyncServiceFromWorker } from './MockSyncServiceClient'; + +/** + * Initializes a PowerSync client in the current iframe context and notifies the parent. + * This function is designed to be called from within an iframe's script tag. + * + * @param vfs - VFS option as a string (e.g., 'OPFSCoopSyncVFS' or 'IDBBatchAtomicVFS') + */ +export async function setupPowerSyncInIframe( + dbFilename: string, + identifier: string, + vfs?: string, + waitForConnection?: boolean, + configureMockResponses?: boolean +): Promise { + try { + // Track the number of times fetchCredentials has been called + let credentialsFetchCount = 0; + + const connector = { + async fetchCredentials() { + credentialsFetchCount++; + return { endpoint: 'http://localhost/test', token: 'test-token' }; + }, + async uploadData() {} + }; + + // Create a simple schema for testing + const schema = new Schema({ + customers: new TableV2({ + name: column.text, + email: column.text + }) + }); + + // Configure database with optional VFS + // The vfs string value is the enum value itself (string enums) + const databaseOptions = vfs + ? new WASQLiteOpenFactory({ + dbFilename, + vfs: vfs as WASQLiteVFS + }) + : { dbFilename }; + + // Configure verbose logging + const logger = createBaseLogger(); + logger.setLevel(LogLevel.DEBUG); + logger.useDefaults(); + + const db = new PowerSyncDatabase({ + database: databaseOptions, + schema: schema, + retryDelayMs: 100, + flags: { enableMultiTabs: true, useWebWorker: true }, + logger + }); + + // Connect to PowerSync (don't await this since we want to create multiple tabs) + const connectionPromise = db.connect(connector, { connectionMethod: SyncStreamConnectionMethod.HTTP }); + + if (waitForConnection) { + await connectionPromise; + } + + if (configureMockResponses) { + // Wait for connecting:true before setting up mock responses + const maxAttempts = 100; + const delayMs = 50; + for (let i = 0; i < maxAttempts; i++) { + if (db.currentStatus.connecting) { + break; + } + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + + const mockSyncService = await getMockSyncServiceFromWorker(dbFilename); + if (mockSyncService) { + await mockSyncService.setAutomaticResponse({ + // We want to confirm credentials are fetched due to invalidation. + status: 401, + headers: { 'Content-Type': 'application/json' } + }); + await mockSyncService.replyToAllPendingRequests(); + } + } + + // Store reference for cleanup + (window as any).powersyncClient = db; + + // Set up message handlers for test operations + window.addEventListener('message', async (event: MessageEvent) => { + // Only handle messages from parent window + // Note: event.source might not match exactly with blob URLs, so we'll be less strict + if (event.source && event.source !== window.parent && event.source !== window) { + return; + } + + const { type, requestId, query, parameters } = event.data || {}; + + if (type === 'execute-query' && requestId) { + try { + const result = await db.getAll(query, parameters || []); + window.parent.postMessage( + { + type: 'query-result', + requestId, + identifier, + success: true, + result + }, + '*' + ); + } catch (error) { + window.parent.postMessage( + { + type: 'query-result', + requestId, + identifier, + success: false, + error: (error as Error).message + }, + '*' + ); + } + } else if (type === 'get-credentials-count' && requestId) { + try { + window.parent.postMessage( + { + type: 'credentials-count-result', + requestId, + identifier, + success: true, + count: credentialsFetchCount + }, + '*' + ); + } catch (error) { + window.parent.postMessage( + { + type: 'credentials-count-result', + requestId, + identifier, + success: false, + error: (error as Error).message + }, + '*' + ); + } + } + }); + + // Notify parent that client is ready + window.parent.postMessage( + { + type: 'powersync-ready', + identifier: identifier + }, + '*' + ); + } catch (error) { + console.error('PowerSync initialization error:', error); + window.parent.postMessage( + { + type: 'powersync-error', + identifier: identifier, + error: (error as Error).message + }, + '*' + ); + } +} diff --git a/packages/web/tests/utils/mockSyncServiceTest.ts b/packages/web/tests/utils/mockSyncServiceTest.ts new file mode 100644 index 000000000..2cac92e53 --- /dev/null +++ b/packages/web/tests/utils/mockSyncServiceTest.ts @@ -0,0 +1,160 @@ +import { + LogLevel, + PowerSyncBackendConnector, + PowerSyncCredentials, + Schema, + SyncStreamConnectionMethod, + Table, + column, + createBaseLogger +} from '@powersync/common'; +import { PowerSyncDatabase, WebPowerSyncDatabaseOptions } from '@powersync/web'; +import { MockedFunction, expect, onTestFinished, test, vi } from 'vitest'; +import { MockSyncService, getMockSyncServiceFromWorker } from './MockSyncServiceClient'; + +// Define schema similar to node tests +const lists = new Table({ + name: column.text +}); + +export const AppSchema = new Schema({ + lists +}); + +export type MockedTestConnector = { + [Key in keyof PowerSyncBackendConnector]: MockedFunction; +}; +/** + * Creates a test connector with vi.fn implementations for testing. + */ +export function createTestConnector(): MockedTestConnector { + return { + fetchCredentials: vi.fn().mockResolvedValue({ + endpoint: 'http://localhost:3000', + token: 'test-token' + } as PowerSyncCredentials), + uploadData: vi.fn().mockResolvedValue(undefined) + }; +} + +/** + * Result of calling the connect function + */ +export interface ConnectResult { + syncRequestId: string; +} + +/** + * Vitest test extension for mocking sync service in shared worker environments. + * Similar to mockSyncServiceTest in the node SDK package. + * + * This extension: + * - Sets up a PowerSync database with the lists schema + * - Exposes a connect function that calls powersync.connect(), waits for connecting: true, + * creates the sync service, and returns both + * - Exposes the database and test connector + */ +export const sharedMockSyncServiceTest = test.extend<{ + context: { + /** An automatically opened database */ + connector: MockedTestConnector; + connect: (customConnector?: PowerSyncBackendConnector) => Promise; + database: PowerSyncDatabase; + databaseName: string; + openDatabase: (customConfig?: Partial) => PowerSyncDatabase; + mockService: MockSyncService; + }; +}>({ + context: async ({}, use) => { + const dbFilename = `test-${crypto.randomUUID()}.db`; + const logger = createBaseLogger(); + logger.setLevel(LogLevel.DEBUG); + logger.useDefaults(); + + const openDatabase = (customConfig: Partial = {}) => { + const db = new PowerSyncDatabase({ + database: { + dbFilename, + ...(customConfig.database ?? {}) + }, + flags: { + enableMultiTabs: true, + ...(customConfig.flags ?? {}) + }, + retryDelayMs: 1000, + crudUploadThrottleMs: 1000, + schema: AppSchema, + logger, + ...customConfig + }); + onTestFinished(async () => { + if (!db.closed) { + await db.disconnect(); + await db.close(); + } + }); + return db; + }; + + const database = openDatabase(); + + // Get the identifier from the database.name property + const identifier = database.database.name; + + // Connect to the shared worker to get the mock service + const mockService = await getMockSyncServiceFromWorker(identifier); + if (!mockService) { + throw new Error('Mock service not available'); + } + + const connector = createTestConnector(); + + const connectFn = async (customConnector?: PowerSyncBackendConnector): Promise => { + const connectorToUse = customConnector ?? connector; + + // Call powersync.connect() to start the sync worker + const connectionPromise = database.connect(connectorToUse, { + connectionMethod: SyncStreamConnectionMethod.HTTP + }); + + // Wait for the database to report connecting: true before using the sync service + await vi.waitFor( + () => { + expect(database.connecting).toBe(true); + }, + { timeout: 1000 } + ); + + let _syncRequestId: string; + await vi.waitFor(async () => { + const requests = await mockService.getPendingRequests(); + expect(requests.length).toBeGreaterThan(0); + _syncRequestId = requests[0].id; + }); + + const syncRequestId = _syncRequestId!; + + await mockService.createResponse(syncRequestId, 200, { 'Content-Type': 'application/json' }); + + // Send a Keepalive just as the first message + await mockService.pushBodyLine(syncRequestId, { + token_expires_in: 10_000_000 + }); + + await connectionPromise; + + return { + syncRequestId + }; + }; + + await use({ + connector, + connect: connectFn, + database, + databaseName: dbFilename, + openDatabase, + mockService + }); + } +}); diff --git a/packages/web/vitest.config.ts b/packages/web/vitest.config.ts index 0e586da8d..31b3acc55 100644 --- a/packages/web/vitest.config.ts +++ b/packages/web/vitest.config.ts @@ -20,7 +20,9 @@ const config: UserConfigExport = { */ '@powersync/web': path.resolve(__dirname, './lib/src'), // https://jira.mongodb.org/browse/NODE-5773 - bson: require.resolve('bson') + bson: require.resolve('bson'), + // Mock WebRemote to throw 401 errors for all HTTP requests in tests + '../../db/sync/WebRemote': path.resolve(__dirname, './tests/mocks/MockWebRemote.ts') } }, worker: { @@ -31,7 +33,7 @@ const config: UserConfigExport = { // Don't optimise these packages as they contain web workers and WASM files. // https://github.com/vitejs/vite/issues/11672#issuecomment-1415820673 exclude: ['@journeyapps/wa-sqlite', '@powersync/web'], - include: ['bson', 'comlink', 'async-mutex'] + include: [] }, plugins: [wasm(), topLevelAwait()], test: { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 474e2bf2c..cd067ab78 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -93,8 +93,8 @@ importers: specifier: ^19.2.4 version: 19.2.14(@angular/core@19.2.14(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@powersync/web': specifier: workspace:* version: link:../../packages/web @@ -281,8 +281,8 @@ importers: specifier: latest version: 7.0.3(@capacitor/core@7.4.4) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@powersync/capacitor': specifier: workspace:* version: link:../../packages/capacitor @@ -348,8 +348,8 @@ importers: specifier: ^11.13.0 version: 11.14.0(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/icons-material': specifier: ^5.15.16 version: 5.17.1(@mui/material@5.17.1(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.14.0(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) @@ -542,8 +542,8 @@ importers: specifier: ^5.0.13 version: 5.2.5 '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@lexical/react': specifier: ^0.15.0 version: 0.15.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(yjs@13.6.27) @@ -715,8 +715,8 @@ importers: demos/react-multi-client: dependencies: '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@powersync/react': specifier: workspace:* version: link:../../packages/react @@ -1145,8 +1145,8 @@ importers: specifier: ^2.5.0 version: 2.5.0(react-native@0.76.9(@babel/core@7.26.10)(@babel/preset-env@7.28.5(@babel/core@7.26.10))(@react-native-community/cli@15.1.3(typescript@5.9.2))(@types/react@18.3.23)(encoding@0.1.13)(react@18.3.1))(react@18.3.1) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@powersync/attachments': specifier: workspace:* version: link:../../packages/attachments @@ -1299,8 +1299,8 @@ importers: specifier: 11.11.5 version: 11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/icons-material': specifier: ^5.15.12 version: 5.17.1(@mui/material@5.17.1(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) @@ -1384,8 +1384,8 @@ importers: specifier: 11.11.5 version: 11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/icons-material': specifier: ^5.15.12 version: 5.17.1(@mui/material@5.17.1(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) @@ -1469,8 +1469,8 @@ importers: specifier: 11.11.5 version: 11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/icons-material': specifier: ^5.15.12 version: 5.17.1(@mui/material@5.17.1(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) @@ -1554,8 +1554,8 @@ importers: specifier: 11.11.5 version: 11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/icons-material': specifier: ^5.15.12 version: 5.17.1(@mui/material@5.17.1(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.23)(react@18.3.1) @@ -1718,8 +1718,8 @@ importers: demos/yjs-react-supabase-text-collab: dependencies: '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/material': specifier: ^5.15.12 version: 5.17.1(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.14.0(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -2033,8 +2033,8 @@ importers: version: link:../common devDependencies: '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@powersync/web': specifier: workspace:* version: link:../web @@ -2064,8 +2064,8 @@ importers: version: 0.28.2 devDependencies: '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@powersync/web': specifier: workspace:* version: link:../web @@ -2327,8 +2327,8 @@ importers: version: 12.1.0 devDependencies: '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@types/uuid': specifier: ^9.0.6 version: 9.0.8 @@ -2375,8 +2375,8 @@ importers: tools/diagnostics-app: dependencies: '@journeyapps/wa-sqlite': - specifier: ^1.4.0 - version: 1.4.0 + specifier: ^1.4.1 + version: 1.4.1 '@mui/material': specifier: ^5.15.12 version: 5.17.1(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@emotion/styled@11.14.0(@emotion/react@11.14.0(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react@18.3.1))(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -6028,8 +6028,8 @@ packages: react: '*' react-native: '*' - '@journeyapps/wa-sqlite@1.4.0': - resolution: {integrity: sha512-2eLwUhJHTI9aKAoep20ZS/OZnDQ8kKtIRcLCj54/t8B14cBPAl+NqwnRP6YEUxbrfXOxXdtvIDCX9jY6iFEcOA==} + '@journeyapps/wa-sqlite@1.4.1': + resolution: {integrity: sha512-xAWys6opteBpWaKmHG1pZvBmQViEKFK/46YVEkYlWxa4F9VAG0gIjCpfIdcQvXdqZf7X3ByADGmNBcR/cJ1DqQ==} '@jridgewell/gen-mapping@0.3.13': resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} @@ -10894,8 +10894,8 @@ packages: base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - baseline-browser-mapping@2.9.2: - resolution: {integrity: sha512-PxSsosKQjI38iXkmb3d0Y32efqyA0uW4s41u4IVBsLlWLhCiYNpH/AfNOVWRqCQBlD8TFJTz6OUWNd4DFJCnmw==} + baseline-browser-mapping@2.9.4: + resolution: {integrity: sha512-ZCQ9GEWl73BVm8bu5Fts8nt7MHdbt5vY9bP6WGnUh+r3l8M7CgfyTlwsgCbMC66BNxPr6Xoce3j66Ms5YUQTNA==} hasBin: true basic-auth@2.0.1: @@ -12583,8 +12583,8 @@ packages: electron-to-chromium@1.5.161: resolution: {integrity: sha512-hwtetwfKNZo/UlwHIVBlKZVdy7o8bIZxxKs0Mv/ROPiQQQmDgdm5a+KvKtBsxM8ZjFzTaCeLoodZ8jiBE3o9rA==} - electron-to-chromium@1.5.264: - resolution: {integrity: sha512-1tEf0nLgltC3iy9wtlYDlQDc5Rg9lEKVjEmIHJ21rI9OcqkvD45K1oyNIRA4rR1z3LgJ7KeGzEBojVcV6m4qjA==} + electron-to-chromium@1.5.266: + resolution: {integrity: sha512-kgWEglXvkEfMH7rxP5OSZZwnaDWT7J9EoZCujhnpLbfi0bbNtRkgdX2E3gt0Uer11c61qCYktB3hwkAS325sJg==} electron-winstaller@5.4.0: resolution: {integrity: sha512-bO3y10YikuUwUuDUQRM4KfwNkKhnpVO7IPdbsrejwN9/AABJzzTQ4GeHwyzNSrVO+tEH3/Np255a3sVZpZDjvg==} @@ -27135,7 +27135,7 @@ snapshots: react: 18.3.1 react-native: 0.76.9(@babel/core@7.26.10)(@babel/preset-env@7.28.5(@babel/core@7.26.10))(@react-native-community/cli@15.1.3(typescript@5.9.2))(@types/react@18.3.23)(encoding@0.1.13)(react@18.3.1) - '@journeyapps/wa-sqlite@1.4.0': {} + '@journeyapps/wa-sqlite@1.4.1': {} '@jridgewell/gen-mapping@0.3.13': dependencies: @@ -30056,7 +30056,9 @@ snapshots: transitivePeerDependencies: - '@babel/core' - '@babel/preset-env' + - bufferutil - supports-color + - utf-8-validate '@react-native/metro-config@0.78.0(@babel/core@7.26.10)(@babel/preset-env@7.27.2(@babel/core@7.26.10))': dependencies: @@ -30067,7 +30069,9 @@ snapshots: transitivePeerDependencies: - '@babel/core' - '@babel/preset-env' + - bufferutil - supports-color + - utf-8-validate '@react-native/normalize-color@2.1.0': {} @@ -34464,7 +34468,7 @@ snapshots: base64-js@1.5.1: {} - baseline-browser-mapping@2.9.2: {} + baseline-browser-mapping@2.9.4: {} basic-auth@2.0.1: dependencies: @@ -34705,9 +34709,9 @@ snapshots: browserslist@4.28.1: dependencies: - baseline-browser-mapping: 2.9.2 + baseline-browser-mapping: 2.9.4 caniuse-lite: 1.0.30001759 - electron-to-chromium: 1.5.264 + electron-to-chromium: 1.5.266 node-releases: 2.0.27 update-browserslist-db: 1.2.2(browserslist@4.28.1) @@ -36506,7 +36510,7 @@ snapshots: electron-to-chromium@1.5.161: {} - electron-to-chromium@1.5.264: {} + electron-to-chromium@1.5.266: {} electron-winstaller@5.4.0: dependencies: diff --git a/tools/diagnostics-app/package.json b/tools/diagnostics-app/package.json index d0d896f18..10ade369b 100644 --- a/tools/diagnostics-app/package.json +++ b/tools/diagnostics-app/package.json @@ -9,7 +9,7 @@ "start": "pnpm build && pnpm preview" }, "dependencies": { - "@journeyapps/wa-sqlite": "^1.4.0", + "@journeyapps/wa-sqlite": "^1.4.1", "@mui/material": "^5.15.12", "@mui/x-data-grid": "^6.19.6", "@powersync/react": "workspace:*",