Designing an Offline-First Frontend Architecture
Designing an Offline-First Frontend Architecture
Building Applications That Work Without Network Connectivity
Offline-first is not about gracefully degrading when the network fails—it's about designing applications where offline is the default state and network connectivity is an enhancement. This architectural shift requires rethinking data flow, storage strategies, conflict resolution, and UI patterns from the ground up.
This article presents production patterns for building offline-capable applications that maintain data integrity, resolve conflicts predictably, and provide seamless user experiences regardless of connectivity state.
Offline-First Architecture Overview
┌─────────────────────────────────────────────────────────────────────────────┐
│ Offline-First Architecture Layers │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ ┌─────────────────────────────────────────────────────────────────────┐ │
│ │ User Interface │ │
│ │ ┌──────────────┐ ┌───────────────┐ ┌─────────────────────────┐ │ │
│ │ │ Optimistic │ │ Sync Status │ │ Conflict Resolution │ │ │
│ │ │ Updates │ │ Indicator │ │ UI │ │ │
│ │ └──────────────┘ └───────────────┘ └─────────────────────────┘ │ │
│ └─────────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────────┐ │
│ │ Application State Layer │ │
│ │ ┌──────────────┐ ┌───────────────┐ ┌─────────────────────────┐ │ │
│ │ │ Local State │ │ Sync Queue │ │ Conflict Detection │ │ │
│ │ │ (Reactive) │ │ (Operations) │ │ & Resolution │ │ │
│ │ └──────────────┘ └───────────────┘ └─────────────────────────┘ │ │
│ └─────────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────────┐ │
│ │ Local Database Layer │ │
│ │ ┌──────────────┐ ┌───────────────┐ ┌─────────────────────────┐ │ │
│ │ │ IndexedDB │ │ SQLite │ │ Change Tracking │ │ │
│ │ │ (Documents) │ │ (OPFS) │ │ (Versions/Vectors) │ │ │
│ │ └──────────────┘ └───────────────┘ └─────────────────────────┘ │ │
│ └─────────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────────┐ │
│ │ Sync Engine Layer │ │
│ │ ┌──────────────┐ ┌───────────────┐ ┌─────────────────────────┐ │ │
│ │ │ Background │ │ Delta Sync │ │ Retry & Backoff │ │ │
│ │ │ Sync │ │ Protocol │ │ Logic │ │ │
│ │ └──────────────┘ └───────────────┘ └─────────────────────────┘ │ │
│ └─────────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────────┐ │
│ │ Server │ │
│ └─────────────────────────────────────────────────────────────────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
Local Database Selection
Database Comparison for Offline Storage
┌─────────────────────────────────────────────────────────────────────────────┐
│ Local Database Comparison │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ IndexedDB SQLite (OPFS) localStorage │
│ ───────── ───────────── ──────────── │
│ │
│ Strengths: Strengths: Strengths: │
│ • Native browser API • Full SQL support • Simple API │
│ • Good for documents • Complex queries • Synchronous │
│ • Transaction support • Joins, aggregations • Wide support │
│ • Async, non-blocking • ACID compliance │
│ • Familiar to devs Weaknesses: │
│ Weaknesses: • 5-10MB limit │
│ • Verbose API Weaknesses: • Blocking │
│ • No SQL • Requires WASM • No indexing │
│ • Limited querying • OPFS browser support • String only │
│ • Cursor-based iteration • Larger bundle size │
│ │
│ Best for: Best for: Best for: │
│ • Document stores • Relational data • Small config │
│ • Key-value with indexes • Complex queries • Session state │
│ • Most offline apps • Existing SQL schemas • Simple caches │
│ │
│ Size limit: Size limit: Size limit: │
│ ~50% of available disk ~50% of available disk 5-10MB │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
IndexedDB Abstraction Layer
// src/offline/database.ts
interface TableSchema {
name: string;
keyPath: string;
autoIncrement?: boolean;
indexes: Array<{
name: string;
keyPath: string | string[];
unique?: boolean;
}>;
}
interface DatabaseConfig {
name: string;
version: number;
tables: TableSchema[];
}
class OfflineDatabase {
private db: IDBDatabase | null = null;
private config: DatabaseConfig;
constructor(config: DatabaseConfig) {
this.config = config;
}
async open(): Promise<void> {
return new Promise((resolve, reject) => {
const request = indexedDB.open(this.config.name, this.config.version);
request.onerror = () => reject(request.error);
request.onsuccess = () => {
this.db = request.result;
resolve();
};
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
const transaction = (event.target as IDBOpenDBRequest).transaction!;
for (const table of this.config.tables) {
let store: IDBObjectStore;
if (!db.objectStoreNames.contains(table.name)) {
store = db.createObjectStore(table.name, {
keyPath: table.keyPath,
autoIncrement: table.autoIncrement,
});
} else {
store = transaction.objectStore(table.name);
}
// Create indexes
for (const index of table.indexes) {
if (!store.indexNames.contains(index.name)) {
store.createIndex(index.name, index.keyPath, {
unique: index.unique,
});
}
}
}
};
});
}
async get<T>(table: string, key: IDBValidKey): Promise<T | undefined> {
return this.transaction(table, 'readonly', (store) => {
return store.get(key);
});
}
async getAll<T>(
table: string,
query?: IDBKeyRange,
count?: number
): Promise<T[]> {
return this.transaction(table, 'readonly', (store) => {
return store.getAll(query, count);
});
}
async put<T>(table: string, value: T): Promise<IDBValidKey> {
return this.transaction(table, 'readwrite', (store) => {
return store.put(value);
});
}
async putMany<T>(table: string, values: T[]): Promise<void> {
return this.transaction(table, 'readwrite', (store) => {
for (const value of values) {
store.put(value);
}
return undefined;
});
}
async delete(table: string, key: IDBValidKey): Promise<void> {
return this.transaction(table, 'readwrite', (store) => {
store.delete(key);
return undefined;
});
}
async clear(table: string): Promise<void> {
return this.transaction(table, 'readwrite', (store) => {
store.clear();
return undefined;
});
}
async query<T>(
table: string,
indexName: string,
range: IDBKeyRange,
options: { limit?: number; offset?: number; direction?: IDBCursorDirection } = {}
): Promise<T[]> {
const { limit, offset = 0, direction = 'next' } = options;
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(table, 'readonly');
const store = tx.objectStore(table);
const index = store.index(indexName);
const request = index.openCursor(range, direction);
const results: T[] = [];
let skipped = 0;
request.onsuccess = () => {
const cursor = request.result;
if (!cursor) {
resolve(results);
return;
}
if (skipped < offset) {
skipped++;
cursor.continue();
return;
}
if (limit && results.length >= limit) {
resolve(results);
return;
}
results.push(cursor.value);
cursor.continue();
};
request.onerror = () => reject(request.error);
});
}
private transaction<T>(
table: string,
mode: IDBTransactionMode,
operation: (store: IDBObjectStore) => IDBRequest | undefined
): Promise<T> {
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(table, mode);
const store = tx.objectStore(table);
const request = operation(store);
tx.oncomplete = () => resolve(request?.result);
tx.onerror = () => reject(tx.error);
tx.onabort = () => reject(tx.error);
});
}
async runInTransaction<T>(
tables: string[],
mode: IDBTransactionMode,
operation: (stores: Record<string, IDBObjectStore>) => Promise<T>
): Promise<T> {
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(tables, mode);
const stores: Record<string, IDBObjectStore> = {};
for (const table of tables) {
stores[table] = tx.objectStore(table);
}
operation(stores)
.then(result => {
tx.oncomplete = () => resolve(result);
})
.catch(error => {
tx.abort();
reject(error);
});
tx.onerror = () => reject(tx.error);
});
}
close() {
this.db?.close();
this.db = null;
}
}
// Database schema definition
const offlineDbConfig: DatabaseConfig = {
name: 'app-offline',
version: 1,
tables: [
{
name: 'documents',
keyPath: 'id',
indexes: [
{ name: 'by-type', keyPath: 'type' },
{ name: 'by-updated', keyPath: 'updatedAt' },
{ name: 'by-sync-status', keyPath: 'syncStatus' },
],
},
{
name: 'sync-queue',
keyPath: 'id',
autoIncrement: true,
indexes: [
{ name: 'by-timestamp', keyPath: 'timestamp' },
{ name: 'by-status', keyPath: 'status' },
],
},
{
name: 'metadata',
keyPath: 'key',
indexes: [],
},
],
};
export { OfflineDatabase, offlineDbConfig, DatabaseConfig };
Sync Queue and Operation Log
Tracking Offline Changes
// src/offline/sync-queue.ts
import { OfflineDatabase } from './database';
type OperationType = 'create' | 'update' | 'delete';
type SyncStatus = 'pending' | 'syncing' | 'synced' | 'failed';
interface SyncOperation {
id?: number;
operationType: OperationType;
entityType: string;
entityId: string;
data: unknown;
timestamp: number;
status: SyncStatus;
retryCount: number;
lastError?: string;
vectorClock?: Record<string, number>;
}
interface SyncQueueConfig {
maxRetries: number;
retryBackoff: number[];
batchSize: number;
}
class SyncQueue {
private isProcessing = false;
private onSyncCallbacks: Array<(status: { pending: number; failed: number }) => void> = [];
constructor(
private db: OfflineDatabase,
private config: SyncQueueConfig,
private syncFn: (operations: SyncOperation[]) => Promise<SyncResult[]>
) {}
async enqueue(operation: Omit<SyncOperation, 'id' | 'timestamp' | 'status' | 'retryCount'>): Promise<void> {
const op: SyncOperation = {
...operation,
timestamp: Date.now(),
status: 'pending',
retryCount: 0,
};
await this.db.put('sync-queue', op);
this.notifyListeners();
// Trigger sync if online
if (navigator.onLine) {
this.processQueue();
}
}
async processQueue(): Promise<void> {
if (this.isProcessing || !navigator.onLine) {
return;
}
this.isProcessing = true;
try {
while (true) {
// Get pending operations
const pending = await this.db.query<SyncOperation>(
'sync-queue',
'by-status',
IDBKeyRange.only('pending'),
{ limit: this.config.batchSize }
);
if (pending.length === 0) break;
// Mark as syncing
for (const op of pending) {
op.status = 'syncing';
await this.db.put('sync-queue', op);
}
// Sync with server
try {
const results = await this.syncFn(pending);
for (let i = 0; i < pending.length; i++) {
const op = pending[i];
const result = results[i];
if (result.success) {
// Remove from queue
await this.db.delete('sync-queue', op.id!);
} else if (result.conflict) {
// Handle conflict
await this.handleConflict(op, result);
} else {
// Retry logic
await this.handleFailure(op, result.error);
}
}
} catch (error) {
// Network error, revert to pending
for (const op of pending) {
op.status = 'pending';
await this.db.put('sync-queue', op);
}
break;
}
}
// Process failed operations that are ready for retry
await this.retryFailedOperations();
} finally {
this.isProcessing = false;
this.notifyListeners();
}
}
private async handleFailure(operation: SyncOperation, error?: string): Promise<void> {
operation.retryCount++;
operation.lastError = error;
if (operation.retryCount >= this.config.maxRetries) {
operation.status = 'failed';
} else {
operation.status = 'pending';
// Delay based on retry count
const delay = this.config.retryBackoff[
Math.min(operation.retryCount - 1, this.config.retryBackoff.length - 1)
];
operation.timestamp = Date.now() + delay;
}
await this.db.put('sync-queue', operation);
}
private async handleConflict(
operation: SyncOperation,
result: SyncResult
): Promise<void> {
// Emit conflict event for application to handle
const event = new CustomEvent('sync-conflict', {
detail: {
operation,
serverData: result.serverData,
resolve: async (resolvedData: unknown) => {
operation.data = resolvedData;
operation.status = 'pending';
operation.retryCount = 0;
await this.db.put('sync-queue', operation);
this.processQueue();
},
discard: async () => {
await this.db.delete('sync-queue', operation.id!);
// Revert local state to server state
if (result.serverData) {
await this.db.put('documents', result.serverData);
}
},
},
});
window.dispatchEvent(event);
}
private async retryFailedOperations(): Promise<void> {
const now = Date.now();
const readyForRetry = await this.db.query<SyncOperation>(
'sync-queue',
'by-status',
IDBKeyRange.only('pending')
);
const toRetry = readyForRetry.filter(op => op.timestamp <= now);
if (toRetry.length > 0) {
// Process will be picked up in the next cycle
}
}
async getPendingCount(): Promise<number> {
const pending = await this.db.query<SyncOperation>(
'sync-queue',
'by-status',
IDBKeyRange.only('pending')
);
return pending.length;
}
async getFailedOperations(): Promise<SyncOperation[]> {
return this.db.query<SyncOperation>(
'sync-queue',
'by-status',
IDBKeyRange.only('failed')
);
}
async retryFailed(operationId: number): Promise<void> {
const op = await this.db.get<SyncOperation>('sync-queue', operationId);
if (op && op.status === 'failed') {
op.status = 'pending';
op.retryCount = 0;
op.timestamp = Date.now();
await this.db.put('sync-queue', op);
this.processQueue();
}
}
async discardFailed(operationId: number): Promise<void> {
await this.db.delete('sync-queue', operationId);
this.notifyListeners();
}
onStatusChange(callback: (status: { pending: number; failed: number }) => void): () => void {
this.onSyncCallbacks.push(callback);
return () => {
const index = this.onSyncCallbacks.indexOf(callback);
if (index >= 0) {
this.onSyncCallbacks.splice(index, 1);
}
};
}
private async notifyListeners(): Promise<void> {
const pending = await this.getPendingCount();
const failed = await this.getFailedOperations();
for (const callback of this.onSyncCallbacks) {
callback({ pending, failed: failed.length });
}
}
}
interface SyncResult {
success: boolean;
conflict?: boolean;
serverData?: unknown;
error?: string;
}
export { SyncQueue, SyncOperation, SyncResult, SyncStatus };
Conflict Resolution Strategies
Vector Clocks and Conflict Detection
┌─────────────────────────────────────────────────────────────────────────────┐
│ Conflict Resolution Strategies │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ Last-Write-Wins (LWW) Server-Wins Client-Wins │
│ ───────────────────── ─────────── ──────────── │
│ │
│ ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ │
│ │ A=1 │ │ A=2 │ │ A=1 │ │ A=2 │ │ A=1 │ │ A=2 │ │
│ │ t=5 │ │ t=7 │ │ │ │ │ │ │ │ │ │
│ └──┬──┘ └──┬──┘ └──┬──┘ └──┬──┘ └──┬──┘ └──┬──┘ │
│ │ │ │ │ │ │ │
│ └─────┬─────┘ └─────┬─────┘ └─────┬─────┘ │
│ │ │ │ │
│ ▼ ▼ ▼ │
│ ┌───────┐ ┌───────┐ ┌───────┐ │
│ │ A=2 │ │ A=2 │ │ A=1 │ │
│ │ (t=7) │ │(server│ │(client│ │
│ └───────┘ │ wins) │ │ wins) │ │
│ └───────┘ └───────┘ │
│ │
│ │
│ Merge Manual Resolution │
│ ───── ───────────────── │
│ │
│ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │
│ │ name:A │ │ name:A │ │ name:A │ │ name:B │ │
│ │ age:25 │ │ city:NY │ │ age:25 │ │ age:30 │ │
│ └────┬────┘ └────┬────┘ └────┬────┘ └────┬────┘ │
│ │ │ │ │ │
│ └───────┬───────┘ └───────┬───────┘ │
│ │ │ │
│ ▼ ▼ │
│ ┌─────────────┐ ┌─────────────┐ │
│ │ name:A │ │ User picks │ │
│ │ age:25 │ │ resolution │ │
│ │ city:NY │ └─────────────┘ │
│ └─────────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
// src/offline/conflict-resolution.ts
type VectorClock = Record<string, number>;
interface VersionedDocument<T> {
id: string;
data: T;
vectorClock: VectorClock;
lastModifiedBy: string;
lastModifiedAt: number;
}
// Compare vector clocks
function compareVectorClocks(
a: VectorClock,
b: VectorClock
): 'equal' | 'a_dominates' | 'b_dominates' | 'concurrent' {
let aGreater = false;
let bGreater = false;
const allNodes = new Set([...Object.keys(a), ...Object.keys(b)]);
for (const node of allNodes) {
const aVal = a[node] || 0;
const bVal = b[node] || 0;
if (aVal > bVal) aGreater = true;
if (bVal > aVal) bGreater = true;
}
if (!aGreater && !bGreater) return 'equal';
if (aGreater && !bGreater) return 'a_dominates';
if (!aGreater && bGreater) return 'b_dominates';
return 'concurrent';
}
// Increment vector clock
function incrementClock(clock: VectorClock, nodeId: string): VectorClock {
return {
...clock,
[nodeId]: (clock[nodeId] || 0) + 1,
};
}
// Merge vector clocks (take max of each component)
function mergeClock(a: VectorClock, b: VectorClock): VectorClock {
const merged: VectorClock = { ...a };
for (const [node, value] of Object.entries(b)) {
merged[node] = Math.max(merged[node] || 0, value);
}
return merged;
}
// Conflict resolution strategies
type ConflictStrategy<T> = (
local: VersionedDocument<T>,
remote: VersionedDocument<T>
) => VersionedDocument<T> | 'manual';
const lastWriteWins: ConflictStrategy<any> = (local, remote) => {
return local.lastModifiedAt > remote.lastModifiedAt ? local : remote;
};
const serverWins: ConflictStrategy<any> = (local, remote) => {
return remote;
};
const clientWins: ConflictStrategy<any> = (local, remote) => {
return local;
};
// Field-level merge for objects
function fieldLevelMerge<T extends Record<string, unknown>>(
local: VersionedDocument<T>,
remote: VersionedDocument<T>,
fieldClocks: Record<string, VectorClock>
): VersionedDocument<T> {
const merged: T = {} as T;
const allFields = new Set([
...Object.keys(local.data),
...Object.keys(remote.data),
]);
for (const field of allFields) {
const localClock = fieldClocks[`${local.id}:${field}`] || {};
const remoteClock = remote.vectorClock;
const comparison = compareVectorClocks(localClock, remoteClock);
if (comparison === 'a_dominates' || comparison === 'equal') {
merged[field as keyof T] = local.data[field as keyof T];
} else if (comparison === 'b_dominates') {
merged[field as keyof T] = remote.data[field as keyof T];
} else {
// Concurrent: use LWW for this field
merged[field as keyof T] = local.lastModifiedAt > remote.lastModifiedAt
? local.data[field as keyof T]
: remote.data[field as keyof T];
}
}
return {
id: local.id,
data: merged,
vectorClock: mergeClock(local.vectorClock, remote.vectorClock),
lastModifiedBy: local.lastModifiedAt > remote.lastModifiedAt
? local.lastModifiedBy
: remote.lastModifiedBy,
lastModifiedAt: Math.max(local.lastModifiedAt, remote.lastModifiedAt),
};
}
// Conflict resolver class
class ConflictResolver<T> {
private strategies: Map<string, ConflictStrategy<T>> = new Map();
private manualResolutionQueue: Array<{
local: VersionedDocument<T>;
remote: VersionedDocument<T>;
resolve: (doc: VersionedDocument<T>) => void;
}> = [];
constructor(private defaultStrategy: ConflictStrategy<T> = lastWriteWins) {}
registerStrategy(entityType: string, strategy: ConflictStrategy<T>) {
this.strategies.set(entityType, strategy);
}
resolve(
entityType: string,
local: VersionedDocument<T>,
remote: VersionedDocument<T>
): VersionedDocument<T> | Promise<VersionedDocument<T>> {
const comparison = compareVectorClocks(local.vectorClock, remote.vectorClock);
// No conflict
if (comparison === 'a_dominates') return local;
if (comparison === 'b_dominates') return remote;
if (comparison === 'equal') return local;
// Concurrent updates - need resolution
const strategy = this.strategies.get(entityType) || this.defaultStrategy;
const result = strategy(local, remote);
if (result === 'manual') {
return new Promise((resolve) => {
this.manualResolutionQueue.push({ local, remote, resolve });
this.emitManualResolutionNeeded(local, remote);
});
}
return result;
}
private emitManualResolutionNeeded(
local: VersionedDocument<T>,
remote: VersionedDocument<T>
) {
window.dispatchEvent(new CustomEvent('conflict-needs-resolution', {
detail: { local, remote },
}));
}
resolveManually(
documentId: string,
resolvedDocument: VersionedDocument<T>
) {
const index = this.manualResolutionQueue.findIndex(
item => item.local.id === documentId
);
if (index >= 0) {
const { resolve } = this.manualResolutionQueue[index];
this.manualResolutionQueue.splice(index, 1);
resolve(resolvedDocument);
}
}
getPendingConflicts(): Array<{
local: VersionedDocument<T>;
remote: VersionedDocument<T>;
}> {
return this.manualResolutionQueue.map(({ local, remote }) => ({
local,
remote,
}));
}
}
export {
VectorClock,
VersionedDocument,
ConflictResolver,
compareVectorClocks,
incrementClock,
mergeClock,
fieldLevelMerge,
lastWriteWins,
serverWins,
clientWins,
};
Sync Protocol Design
Delta Synchronization
// src/offline/sync-protocol.ts
import { VectorClock, VersionedDocument, mergeClock } from './conflict-resolution';
interface SyncCheckpoint {
lastSyncTimestamp: number;
serverSequence: number;
vectorClock: VectorClock;
}
interface DeltaSyncRequest {
clientId: string;
checkpoint: SyncCheckpoint;
changes: VersionedDocument<unknown>[];
}
interface DeltaSyncResponse {
serverChanges: VersionedDocument<unknown>[];
conflicts: Array<{
clientVersion: VersionedDocument<unknown>;
serverVersion: VersionedDocument<unknown>;
}>;
newCheckpoint: SyncCheckpoint;
fullSyncRequired: boolean;
}
class SyncProtocol {
private clientId: string;
private checkpoint: SyncCheckpoint | null = null;
constructor(
clientId: string,
private apiEndpoint: string,
private db: import('./database').OfflineDatabase
) {
this.clientId = clientId;
}
async initialize(): Promise<void> {
const stored = await this.db.get<{ key: string; value: SyncCheckpoint }>(
'metadata',
'sync-checkpoint'
);
if (stored) {
this.checkpoint = stored.value;
} else {
this.checkpoint = {
lastSyncTimestamp: 0,
serverSequence: 0,
vectorClock: {},
};
}
}
async sync(localChanges: VersionedDocument<unknown>[]): Promise<{
applied: VersionedDocument<unknown>[];
conflicts: Array<{
local: VersionedDocument<unknown>;
remote: VersionedDocument<unknown>;
}>;
}> {
const request: DeltaSyncRequest = {
clientId: this.clientId,
checkpoint: this.checkpoint!,
changes: localChanges,
};
const response = await this.sendSyncRequest(request);
if (response.fullSyncRequired) {
return this.performFullSync();
}
// Update checkpoint
this.checkpoint = response.newCheckpoint;
await this.db.put('metadata', {
key: 'sync-checkpoint',
value: this.checkpoint,
});
return {
applied: response.serverChanges,
conflicts: response.conflicts.map(c => ({
local: c.clientVersion,
remote: c.serverVersion,
})),
};
}
private async sendSyncRequest(request: DeltaSyncRequest): Promise<DeltaSyncResponse> {
const response = await fetch(`${this.apiEndpoint}/sync`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
});
if (!response.ok) {
throw new Error(`Sync failed: ${response.status}`);
}
return response.json();
}
private async performFullSync(): Promise<{
applied: VersionedDocument<unknown>[];
conflicts: Array<{
local: VersionedDocument<unknown>;
remote: VersionedDocument<unknown>;
}>;
}> {
// Fetch all data from server
const response = await fetch(`${this.apiEndpoint}/full-sync`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ clientId: this.clientId }),
});
if (!response.ok) {
throw new Error(`Full sync failed: ${response.status}`);
}
const data: {
documents: VersionedDocument<unknown>[];
checkpoint: SyncCheckpoint;
} = await response.json();
// Update checkpoint
this.checkpoint = data.checkpoint;
await this.db.put('metadata', {
key: 'sync-checkpoint',
value: this.checkpoint,
});
// Full sync returns all documents - conflicts resolved by server
return {
applied: data.documents,
conflicts: [],
};
}
async getLocalChanges(): Promise<VersionedDocument<unknown>[]> {
const lastSync = this.checkpoint?.lastSyncTimestamp || 0;
// Query documents modified after last sync
return this.db.query<VersionedDocument<unknown>>(
'documents',
'by-updated',
IDBKeyRange.lowerBound(lastSync, true)
);
}
}
// Server-side sync handler (conceptual)
class SyncServer {
private documents: Map<string, VersionedDocument<unknown>> = new Map();
private serverSequence = 0;
handleSyncRequest(request: DeltaSyncRequest): DeltaSyncResponse {
const conflicts: DeltaSyncResponse['conflicts'] = [];
const appliedChanges: VersionedDocument<unknown>[] = [];
// Process client changes
for (const clientDoc of request.changes) {
const serverDoc = this.documents.get(clientDoc.id);
if (!serverDoc) {
// New document
this.documents.set(clientDoc.id, clientDoc);
appliedChanges.push(clientDoc);
continue;
}
// Check for conflicts
const comparison = this.compareVersions(clientDoc, serverDoc);
if (comparison === 'client_newer') {
this.documents.set(clientDoc.id, clientDoc);
appliedChanges.push(clientDoc);
} else if (comparison === 'conflict') {
conflicts.push({
clientVersion: clientDoc,
serverVersion: serverDoc,
});
}
// If server_newer, client will receive update below
}
// Get server changes since client's last sync
const serverChanges = this.getChangesSince(request.checkpoint);
this.serverSequence++;
return {
serverChanges,
conflicts,
newCheckpoint: {
lastSyncTimestamp: Date.now(),
serverSequence: this.serverSequence,
vectorClock: this.getServerVectorClock(),
},
fullSyncRequired: this.shouldRequireFullSync(request.checkpoint),
};
}
private compareVersions(
client: VersionedDocument<unknown>,
server: VersionedDocument<unknown>
): 'client_newer' | 'server_newer' | 'conflict' {
// Compare using vector clocks
let clientNewer = false;
let serverNewer = false;
const allNodes = new Set([
...Object.keys(client.vectorClock),
...Object.keys(server.vectorClock),
]);
for (const node of allNodes) {
const cVal = client.vectorClock[node] || 0;
const sVal = server.vectorClock[node] || 0;
if (cVal > sVal) clientNewer = true;
if (sVal > cVal) serverNewer = true;
}
if (clientNewer && !serverNewer) return 'client_newer';
if (serverNewer && !clientNewer) return 'server_newer';
return 'conflict';
}
private getChangesSince(checkpoint: SyncCheckpoint): VersionedDocument<unknown>[] {
// Return documents modified since checkpoint
return Array.from(this.documents.values()).filter(
doc => doc.lastModifiedAt > checkpoint.lastSyncTimestamp
);
}
private getServerVectorClock(): VectorClock {
// Merge all document clocks
let merged: VectorClock = {};
for (const doc of this.documents.values()) {
merged = mergeClock(merged, doc.vectorClock);
}
return merged;
}
private shouldRequireFullSync(checkpoint: SyncCheckpoint): boolean {
// Force full sync if checkpoint is too old or sequence gap too large
const sequenceGap = this.serverSequence - checkpoint.serverSequence;
const timeGap = Date.now() - checkpoint.lastSyncTimestamp;
return sequenceGap > 1000 || timeGap > 7 * 24 * 60 * 60 * 1000; // 7 days
}
}
export { SyncProtocol, SyncCheckpoint, DeltaSyncRequest, DeltaSyncResponse };
Optimistic UI Patterns
Immediate Updates with Rollback
// src/offline/optimistic-store.ts
import { OfflineDatabase } from './database';
import { SyncQueue } from './sync-queue';
import { VectorClock, incrementClock } from './conflict-resolution';
interface Document<T> {
id: string;
data: T;
vectorClock: VectorClock;
lastModifiedBy: string;
lastModifiedAt: number;
syncStatus: 'synced' | 'pending' | 'conflict';
localVersion?: number;
}
interface OptimisticUpdate<T> {
documentId: string;
previousState: Document<T> | null;
newState: Document<T>;
operationType: 'create' | 'update' | 'delete';
}
type Subscriber<T> = (documents: Map<string, Document<T>>) => void;
class OptimisticStore<T> {
private documents: Map<string, Document<T>> = new Map();
private pendingUpdates: Map<string, OptimisticUpdate<T>> = new Map();
private subscribers: Set<Subscriber<T>> = new Set();
private clientId: string;
constructor(
private db: OfflineDatabase,
private syncQueue: SyncQueue,
private entityType: string,
clientId: string
) {
this.clientId = clientId;
this.loadFromDatabase();
this.setupSyncListeners();
}
private async loadFromDatabase() {
const docs = await this.db.query<Document<T>>(
'documents',
'by-type',
IDBKeyRange.only(this.entityType)
);
for (const doc of docs) {
this.documents.set(doc.id, doc);
}
this.notify();
}
private setupSyncListeners() {
// Listen for sync completion
window.addEventListener('sync-complete', (e: Event) => {
const detail = (e as CustomEvent).detail;
if (detail.entityType === this.entityType) {
this.handleSyncComplete(detail.documentId, detail.serverDocument);
}
});
// Listen for sync failure
window.addEventListener('sync-failed', (e: Event) => {
const detail = (e as CustomEvent).detail;
if (detail.entityType === this.entityType) {
this.handleSyncFailure(detail.documentId, detail.error);
}
});
}
subscribe(callback: Subscriber<T>): () => void {
this.subscribers.add(callback);
callback(this.documents);
return () => {
this.subscribers.delete(callback);
};
}
private notify() {
for (const subscriber of this.subscribers) {
subscriber(new Map(this.documents));
}
}
async create(id: string, data: T): Promise<Document<T>> {
const now = Date.now();
const vectorClock = incrementClock({}, this.clientId);
const document: Document<T> = {
id,
data,
vectorClock,
lastModifiedBy: this.clientId,
lastModifiedAt: now,
syncStatus: 'pending',
localVersion: 1,
};
// Optimistically update local state
this.documents.set(id, document);
// Track for potential rollback
this.pendingUpdates.set(id, {
documentId: id,
previousState: null,
newState: document,
operationType: 'create',
});
// Persist to local database
await this.db.put('documents', { ...document, type: this.entityType });
// Queue for sync
await this.syncQueue.enqueue({
operationType: 'create',
entityType: this.entityType,
entityId: id,
data: document,
vectorClock,
});
this.notify();
return document;
}
async update(id: string, updates: Partial<T>): Promise<Document<T>> {
const existing = this.documents.get(id);
if (!existing) {
throw new Error(`Document ${id} not found`);
}
const now = Date.now();
const vectorClock = incrementClock(existing.vectorClock, this.clientId);
const updated: Document<T> = {
...existing,
data: { ...existing.data, ...updates },
vectorClock,
lastModifiedBy: this.clientId,
lastModifiedAt: now,
syncStatus: 'pending',
localVersion: (existing.localVersion || 0) + 1,
};
// Track previous state for rollback
if (!this.pendingUpdates.has(id)) {
this.pendingUpdates.set(id, {
documentId: id,
previousState: existing,
newState: updated,
operationType: 'update',
});
} else {
// Update the new state but keep original previous state
const pending = this.pendingUpdates.get(id)!;
pending.newState = updated;
}
// Optimistically update
this.documents.set(id, updated);
// Persist
await this.db.put('documents', { ...updated, type: this.entityType });
// Queue for sync
await this.syncQueue.enqueue({
operationType: 'update',
entityType: this.entityType,
entityId: id,
data: updated,
vectorClock,
});
this.notify();
return updated;
}
async delete(id: string): Promise<void> {
const existing = this.documents.get(id);
if (!existing) return;
// Track for rollback
this.pendingUpdates.set(id, {
documentId: id,
previousState: existing,
newState: { ...existing, syncStatus: 'pending' },
operationType: 'delete',
});
// Optimistically remove
this.documents.delete(id);
// Mark as deleted in database (soft delete for sync)
await this.db.put('documents', {
...existing,
type: this.entityType,
_deleted: true,
syncStatus: 'pending',
});
// Queue for sync
await this.syncQueue.enqueue({
operationType: 'delete',
entityType: this.entityType,
entityId: id,
data: { id },
vectorClock: incrementClock(existing.vectorClock, this.clientId),
});
this.notify();
}
private handleSyncComplete(documentId: string, serverDocument: Document<T>) {
const pending = this.pendingUpdates.get(documentId);
if (pending) {
// Update with server-confirmed state
if (pending.operationType !== 'delete') {
this.documents.set(documentId, {
...serverDocument,
syncStatus: 'synced',
});
}
this.pendingUpdates.delete(documentId);
}
this.notify();
}
private handleSyncFailure(documentId: string, error: string) {
const pending = this.pendingUpdates.get(documentId);
if (pending) {
// Check if we should rollback
if (error === 'conflict' || error === 'not_found') {
this.rollback(documentId);
}
// Otherwise keep pending state and retry
}
}
private async rollback(documentId: string) {
const pending = this.pendingUpdates.get(documentId);
if (!pending) return;
if (pending.previousState) {
// Restore previous state
this.documents.set(documentId, pending.previousState);
await this.db.put('documents', {
...pending.previousState,
type: this.entityType,
});
} else {
// Was a create, remove it
this.documents.delete(documentId);
await this.db.delete('documents', documentId);
}
this.pendingUpdates.delete(documentId);
this.notify();
}
get(id: string): Document<T> | undefined {
return this.documents.get(id);
}
getAll(): Document<T>[] {
return Array.from(this.documents.values());
}
getPendingCount(): number {
return this.pendingUpdates.size;
}
hasPendingChanges(): boolean {
return this.pendingUpdates.size > 0;
}
}
export { OptimisticStore, Document, OptimisticUpdate };
Service Worker Integration
Background Sync and Offline Caching
// sw.ts (Service Worker)
/// <reference lib="webworker" />
declare const self: ServiceWorkerGlobalScope;
const CACHE_NAME = 'app-v1';
const STATIC_ASSETS = [
'/',
'/index.html',
'/app.js',
'/styles.css',
'/offline.html',
];
const API_CACHE_NAME = 'api-cache-v1';
const API_CACHE_DURATION = 5 * 60 * 1000; // 5 minutes
// Install event - cache static assets
self.addEventListener('install', (event) => {
event.waitUntil(
caches.open(CACHE_NAME).then((cache) => {
return cache.addAll(STATIC_ASSETS);
})
);
self.skipWaiting();
});
// Activate event - clean old caches
self.addEventListener('activate', (event) => {
event.waitUntil(
caches.keys().then((cacheNames) => {
return Promise.all(
cacheNames
.filter((name) => name !== CACHE_NAME && name !== API_CACHE_NAME)
.map((name) => caches.delete(name))
);
})
);
self.clients.claim();
});
// Fetch event - serve from cache with network fallback
self.addEventListener('fetch', (event) => {
const { request } = event;
const url = new URL(request.url);
// API requests
if (url.pathname.startsWith('/api/')) {
event.respondWith(handleApiRequest(request));
return;
}
// Static assets
event.respondWith(handleStaticRequest(request));
});
async function handleStaticRequest(request: Request): Promise<Response> {
// Try cache first
const cached = await caches.match(request);
if (cached) {
return cached;
}
// Network fallback
try {
const response = await fetch(request);
// Cache successful responses
if (response.ok) {
const cache = await caches.open(CACHE_NAME);
cache.put(request, response.clone());
}
return response;
} catch (error) {
// Offline fallback
const offlinePage = await caches.match('/offline.html');
return offlinePage || new Response('Offline', { status: 503 });
}
}
async function handleApiRequest(request: Request): Promise<Response> {
// For GET requests, try network first with cache fallback
if (request.method === 'GET') {
return handleApiGet(request);
}
// For mutations, use sync queue
return handleApiMutation(request);
}
async function handleApiGet(request: Request): Promise<Response> {
try {
const response = await fetch(request);
if (response.ok) {
// Cache successful GET responses
const cache = await caches.open(API_CACHE_NAME);
const cacheResponse = response.clone();
const headers = new Headers(cacheResponse.headers);
headers.set('x-cached-at', Date.now().toString());
cache.put(request, new Response(await cacheResponse.blob(), {
status: cacheResponse.status,
statusText: cacheResponse.statusText,
headers,
}));
}
return response;
} catch (error) {
// Network failed, try cache
const cached = await caches.match(request);
if (cached) {
const cachedAt = parseInt(cached.headers.get('x-cached-at') || '0');
const age = Date.now() - cachedAt;
// Add header indicating stale data
const headers = new Headers(cached.headers);
headers.set('x-from-cache', 'true');
headers.set('x-cache-age', age.toString());
return new Response(await cached.blob(), {
status: cached.status,
statusText: cached.statusText,
headers,
});
}
// No cache, return error
return new Response(JSON.stringify({ error: 'offline' }), {
status: 503,
headers: { 'Content-Type': 'application/json' },
});
}
}
async function handleApiMutation(request: Request): Promise<Response> {
try {
// Try to send immediately
const response = await fetch(request);
return response;
} catch (error) {
// Offline - queue for background sync
await queueForSync(request);
return new Response(JSON.stringify({
queued: true,
message: 'Request queued for sync',
}), {
status: 202,
headers: { 'Content-Type': 'application/json' },
});
}
}
async function queueForSync(request: Request): Promise<void> {
const body = await request.text();
// Store in IndexedDB via message to main thread
const clients = await self.clients.matchAll();
for (const client of clients) {
client.postMessage({
type: 'QUEUE_SYNC',
payload: {
url: request.url,
method: request.method,
headers: Object.fromEntries(request.headers.entries()),
body,
timestamp: Date.now(),
},
});
}
// Register for background sync
if ('sync' in self.registration) {
await (self.registration as any).sync.register('sync-queue');
}
}
// Background sync event
self.addEventListener('sync', (event: any) => {
if (event.tag === 'sync-queue') {
event.waitUntil(processSyncQueue());
}
});
async function processSyncQueue(): Promise<void> {
// Notify main thread to process queue
const clients = await self.clients.matchAll();
for (const client of clients) {
client.postMessage({ type: 'PROCESS_SYNC_QUEUE' });
}
}
// Push notification for sync results
self.addEventListener('push', (event) => {
const data = event.data?.json();
if (data?.type === 'SYNC_RESULT') {
event.waitUntil(
self.registration.showNotification('Sync Complete', {
body: data.message,
icon: '/icon.png',
tag: 'sync-notification',
})
);
}
});
// Periodic background sync
self.addEventListener('periodicsync', (event: any) => {
if (event.tag === 'sync-data') {
event.waitUntil(processSyncQueue());
}
});
export {};
React Integration
Offline-First Hooks
// src/offline/hooks.ts
import { useState, useEffect, useCallback, useSyncExternalStore } from 'react';
import { OptimisticStore, Document } from './optimistic-store';
import { SyncQueue } from './sync-queue';
// Online status hook
function useOnlineStatus() {
const [isOnline, setIsOnline] = useState(navigator.onLine);
useEffect(() => {
const handleOnline = () => setIsOnline(true);
const handleOffline = () => setIsOnline(false);
window.addEventListener('online', handleOnline);
window.addEventListener('offline', handleOffline);
return () => {
window.removeEventListener('online', handleOnline);
window.removeEventListener('offline', handleOffline);
};
}, []);
return isOnline;
}
// Sync status hook
function useSyncStatus(syncQueue: SyncQueue) {
const [status, setStatus] = useState<{
pending: number;
failed: number;
isSyncing: boolean;
}>({
pending: 0,
failed: 0,
isSyncing: false,
});
useEffect(() => {
return syncQueue.onStatusChange((syncStatus) => {
setStatus(prev => ({
...prev,
...syncStatus,
}));
});
}, [syncQueue]);
const retry = useCallback(async () => {
await syncQueue.processQueue();
}, [syncQueue]);
return { ...status, retry };
}
// Optimistic store hook
function useOfflineStore<T>(store: OptimisticStore<T>) {
const [documents, setDocuments] = useState<Map<string, Document<T>>>(new Map());
useEffect(() => {
return store.subscribe((docs) => {
setDocuments(docs);
});
}, [store]);
const create = useCallback(async (id: string, data: T) => {
return store.create(id, data);
}, [store]);
const update = useCallback(async (id: string, updates: Partial<T>) => {
return store.update(id, updates);
}, [store]);
const remove = useCallback(async (id: string) => {
return store.delete(id);
}, [store]);
return {
documents: Array.from(documents.values()),
documentsMap: documents,
create,
update,
delete: remove,
get: (id: string) => documents.get(id),
hasPendingChanges: store.hasPendingChanges(),
pendingCount: store.getPendingCount(),
};
}
// Conflict resolution hook
function useConflictResolution<T>() {
const [conflicts, setConflicts] = useState<Array<{
local: Document<T>;
remote: Document<T>;
}>>([]);
useEffect(() => {
const handleConflict = (e: Event) => {
const { local, remote } = (e as CustomEvent).detail;
setConflicts(prev => [...prev, { local, remote }]);
};
window.addEventListener('conflict-needs-resolution', handleConflict);
return () => {
window.removeEventListener('conflict-needs-resolution', handleConflict);
};
}, []);
const resolveConflict = useCallback((
documentId: string,
resolution: 'local' | 'remote' | Document<T>
) => {
const conflict = conflicts.find(c => c.local.id === documentId);
if (!conflict) return;
let resolved: Document<T>;
if (resolution === 'local') {
resolved = conflict.local;
} else if (resolution === 'remote') {
resolved = conflict.remote;
} else {
resolved = resolution;
}
window.dispatchEvent(new CustomEvent('conflict-resolved', {
detail: { documentId, resolved },
}));
setConflicts(prev => prev.filter(c => c.local.id !== documentId));
}, [conflicts]);
return { conflicts, resolveConflict };
}
// Stale data indicator hook
function useStaleIndicator(maxAge: number = 5 * 60 * 1000) {
const [staleData, setStaleData] = useState<Map<string, number>>(new Map());
const markFresh = useCallback((key: string) => {
setStaleData(prev => {
const next = new Map(prev);
next.set(key, Date.now());
return next;
});
}, []);
const isStale = useCallback((key: string) => {
const timestamp = staleData.get(key);
if (!timestamp) return true;
return Date.now() - timestamp > maxAge;
}, [staleData, maxAge]);
const getAge = useCallback((key: string) => {
const timestamp = staleData.get(key);
if (!timestamp) return Infinity;
return Date.now() - timestamp;
}, [staleData]);
return { markFresh, isStale, getAge };
}
export {
useOnlineStatus,
useSyncStatus,
useOfflineStore,
useConflictResolution,
useStaleIndicator,
};
Offline-Aware Components
// src/components/OfflineIndicator.tsx
import React from 'react';
import { useOnlineStatus, useSyncStatus } from '../offline/hooks';
interface OfflineIndicatorProps {
syncQueue: import('../offline/sync-queue').SyncQueue;
}
export function OfflineIndicator({ syncQueue }: OfflineIndicatorProps) {
const isOnline = useOnlineStatus();
const { pending, failed, isSyncing, retry } = useSyncStatus(syncQueue);
if (isOnline && pending === 0 && failed === 0) {
return null;
}
return (
<div className="offline-indicator">
{!isOnline && (
<div className="status offline">
<span className="icon">📴</span>
<span>You're offline</span>
</div>
)}
{pending > 0 && (
<div className="status pending">
<span className="icon">{isSyncing ? '🔄' : '⏳'}</span>
<span>{pending} changes waiting to sync</span>
</div>
)}
{failed > 0 && (
<div className="status failed">
<span className="icon">⚠️</span>
<span>{failed} changes failed to sync</span>
<button onClick={retry}>Retry</button>
</div>
)}
</div>
);
}
// Conflict resolution dialog
interface ConflictDialogProps<T> {
conflict: {
local: import('../offline/optimistic-store').Document<T>;
remote: import('../offline/optimistic-store').Document<T>;
};
onResolve: (resolution: 'local' | 'remote') => void;
renderDiff: (local: T, remote: T) => React.ReactNode;
}
export function ConflictDialog<T>({
conflict,
onResolve,
renderDiff,
}: ConflictDialogProps<T>) {
return (
<div className="conflict-dialog">
<h3>Sync Conflict Detected</h3>
<p>
This item was modified both offline and by another user.
Please choose which version to keep.
</p>
<div className="diff">
{renderDiff(conflict.local.data, conflict.remote.data)}
</div>
<div className="conflict-meta">
<div>
<strong>Your changes:</strong>
<span>Modified {formatTime(conflict.local.lastModifiedAt)}</span>
</div>
<div>
<strong>Server version:</strong>
<span>Modified {formatTime(conflict.remote.lastModifiedAt)}</span>
</div>
</div>
<div className="actions">
<button onClick={() => onResolve('local')}>
Keep My Changes
</button>
<button onClick={() => onResolve('remote')}>
Use Server Version
</button>
</div>
</div>
);
}
function formatTime(timestamp: number): string {
return new Date(timestamp).toLocaleString();
}
// Stale data indicator
interface StaleIndicatorProps {
isStale: boolean;
age: number;
onRefresh: () => void;
}
export function StaleDataIndicator({ isStale, age, onRefresh }: StaleIndicatorProps) {
if (!isStale) return null;
const ageMinutes = Math.floor(age / 60000);
return (
<div className="stale-indicator">
<span className="icon">⚠️</span>
<span>
Data may be outdated ({ageMinutes > 0 ? `${ageMinutes}m ago` : 'just now'})
</span>
<button onClick={onRefresh}>Refresh</button>
</div>
);
}
Rehydration and Initial Load
Progressive Rehydration Strategy
// src/offline/rehydration.ts
import { OfflineDatabase } from './database';
import { SyncProtocol } from './sync-protocol';
interface RehydrationConfig {
priority: string[]; // Entity types to load first
chunkSize: number; // Entities per chunk
delayBetweenChunks: number;
}
interface RehydrationProgress {
phase: 'local' | 'sync' | 'complete';
entitiesLoaded: number;
totalEntities: number;
currentType: string;
}
type ProgressCallback = (progress: RehydrationProgress) => void;
class Rehydrator {
constructor(
private db: OfflineDatabase,
private syncProtocol: SyncProtocol,
private config: RehydrationConfig
) {}
async rehydrate(onProgress: ProgressCallback): Promise<void> {
// Phase 1: Load from local database
await this.loadLocalData(onProgress);
// Phase 2: Sync with server (if online)
if (navigator.onLine) {
await this.syncWithServer(onProgress);
}
onProgress({
phase: 'complete',
entitiesLoaded: 0,
totalEntities: 0,
currentType: '',
});
}
private async loadLocalData(onProgress: ProgressCallback): Promise<void> {
let totalLoaded = 0;
for (const entityType of this.config.priority) {
onProgress({
phase: 'local',
entitiesLoaded: totalLoaded,
totalEntities: -1, // Unknown until counted
currentType: entityType,
});
// Load in chunks to avoid blocking UI
let offset = 0;
while (true) {
const chunk = await this.db.query(
'documents',
'by-type',
IDBKeyRange.only(entityType),
{ limit: this.config.chunkSize, offset }
);
if (chunk.length === 0) break;
// Allow UI to update
await new Promise(resolve =>
setTimeout(resolve, this.config.delayBetweenChunks)
);
totalLoaded += chunk.length;
offset += chunk.length;
onProgress({
phase: 'local',
entitiesLoaded: totalLoaded,
totalEntities: -1,
currentType: entityType,
});
}
}
}
private async syncWithServer(onProgress: ProgressCallback): Promise<void> {
onProgress({
phase: 'sync',
entitiesLoaded: 0,
totalEntities: -1,
currentType: 'syncing',
});
try {
const localChanges = await this.syncProtocol.getLocalChanges();
const { applied, conflicts } = await this.syncProtocol.sync(localChanges);
// Apply server changes
for (const doc of applied) {
await this.db.put('documents', doc);
}
// Handle conflicts
for (const conflict of conflicts) {
window.dispatchEvent(new CustomEvent('conflict-needs-resolution', {
detail: conflict,
}));
}
onProgress({
phase: 'sync',
entitiesLoaded: applied.length,
totalEntities: applied.length,
currentType: 'complete',
});
} catch (error) {
console.error('Sync failed during rehydration:', error);
// Continue with local data only
}
}
}
// React hook for rehydration
function useRehydration(
db: OfflineDatabase,
syncProtocol: SyncProtocol,
config: RehydrationConfig
) {
const [isRehydrating, setIsRehydrating] = useState(true);
const [progress, setProgress] = useState<RehydrationProgress>({
phase: 'local',
entitiesLoaded: 0,
totalEntities: 0,
currentType: '',
});
const [error, setError] = useState<Error | null>(null);
useEffect(() => {
const rehydrator = new Rehydrator(db, syncProtocol, config);
rehydrator.rehydrate(setProgress)
.then(() => setIsRehydrating(false))
.catch(err => {
setError(err);
setIsRehydrating(false);
});
}, [db, syncProtocol, config]);
return { isRehydrating, progress, error };
}
export { Rehydrator, RehydrationConfig, RehydrationProgress, useRehydration };
Key Takeaways
-
Offline is the default state: Design data flow assuming no network, not as an afterthought
-
Local-first requires local database: IndexedDB for documents, SQLite (OPFS) for relational data with complex queries
-
Sync queues are essential: Every mutation goes through a persistent queue that survives app restarts
-
Vector clocks enable conflict detection: Track causality across distributed clients to identify concurrent updates
-
Choose conflict resolution per entity type: Some data is LWW-safe, some needs manual resolution
-
Optimistic updates with rollback: Apply changes immediately, track pending state, rollback on failure
-
Service workers enable background sync: Queue operations when offline, sync when connectivity returns
-
Delta sync minimizes data transfer: Track last sync point, exchange only changes since then
-
Rehydration must be progressive: Load critical data first, show UI early, sync in background
-
UI must communicate sync state: Users need to know what's synced, pending, or conflicted
Offline-first is an architectural commitment, not a feature flag. It fundamentally changes how data flows through your application.
What did you think?