Designing Frontend Systems for Low-End Devices and Emerging Markets
Designing Frontend Systems for Low-End Devices and Emerging Markets
Building for the Next Billion Users
The majority of new internet users come from emerging markets with device and network constraints that differ dramatically from the Silicon Valley development environment. A $50 Android phone on a 2G connection in rural India has fundamentally different capabilities than a MacBook Pro on fiber. Building for these users requires intentional architectural decisions around CPU budget, memory constraints, progressive enhancement, and graceful degradation.
This article presents patterns for building frontend applications that perform well across the entire device spectrum.
Understanding the Constraints
┌─────────────────────────────────────────────────────────────────────────────┐
│ Device Tier Comparison │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ High-End Mid-Range Low-End │
│ ──────── ───────── ─────── │
│ │
│ CPU 8-core 3GHz 4-core 2GHz 2-core 1.2GHz │
│ A17/SD 8G3 SD 695 SD 439 │
│ │
│ RAM 8-16 GB 4-6 GB 1-2 GB │
│ │
│ Network 5G/WiFi 6 4G LTE 2G/3G │
│ 100+ Mbps 10-50 Mbps 0.1-1 Mbps │
│ │
│ JS Parse Speed ~100 KB/ms ~40 KB/ms ~10 KB/ms │
│ │
│ Browser Latest Chrome Chrome 80+ UC/Opera Mini │
│ │
│ Storage 256+ GB 32-64 GB 8-16 GB │
│ │
│ ───────────────────────────────────────────────────────────────────────── │
│ │
│ Performance Impact │
│ ────────────────── │
│ │
│ 1 MB JS Bundle: │
│ │
│ High-End: Parse: 10ms │ Execute: 50ms │ Total: ~60ms │
│ Mid-Range: Parse: 25ms │ Execute: 150ms │ Total: ~175ms │
│ Low-End: Parse: 100ms │ Execute: 500ms │ Total: ~600ms │
│ │
│ Network Transfer (1 MB): │
│ │
│ 5G/WiFi: ~80ms │ 4G: ~800ms │ 3G: ~8s │ 2G: ~80s │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
CPU Budget Architecture
JavaScript Execution Budget
// src/performance/cpu-budget.ts
interface CPUBudget {
frameTime: number; // Target ms per frame (16.67 for 60fps)
longTaskThreshold: number; // What constitutes a "long task"
jsParsePerKB: number; // ms to parse 1KB of JS
jsExecuteMultiplier: number; // Execution time vs parse time
interactionBudget: number; // Max ms for interaction response
hydrationBudget: number; // Total hydration budget
}
const budgetsByTier: Record<string, CPUBudget> = {
high: {
frameTime: 16.67,
longTaskThreshold: 50,
jsParsePerKB: 0.1,
jsExecuteMultiplier: 5,
interactionBudget: 100,
hydrationBudget: 500,
},
mid: {
frameTime: 16.67,
longTaskThreshold: 50,
jsParsePerKB: 0.25,
jsExecuteMultiplier: 8,
interactionBudget: 200,
hydrationBudget: 1000,
},
low: {
frameTime: 33.33, // Target 30fps on low-end
longTaskThreshold: 100,
jsParsePerKB: 1.0,
jsExecuteMultiplier: 15,
interactionBudget: 500,
hydrationBudget: 2000,
},
};
// Detect device tier
function detectDeviceTier(): 'high' | 'mid' | 'low' {
// Use device memory API
const memory = (navigator as any).deviceMemory;
if (memory !== undefined) {
if (memory >= 8) return 'high';
if (memory >= 4) return 'mid';
return 'low';
}
// Fallback: hardware concurrency (CPU cores)
const cores = navigator.hardwareConcurrency;
if (cores >= 8) return 'high';
if (cores >= 4) return 'mid';
return 'low';
}
// CPU-aware task scheduler
class CPUBudgetScheduler {
private budget: CPUBudget;
private tasks: Array<{
fn: () => void;
priority: 'critical' | 'high' | 'normal' | 'low';
estimatedMs: number;
}> = [];
private isProcessing = false;
private frameTimeRemaining = 0;
constructor(tier?: 'high' | 'mid' | 'low') {
this.budget = budgetsByTier[tier || detectDeviceTier()];
}
schedule(
task: () => void,
options: { priority?: 'critical' | 'high' | 'normal' | 'low'; estimatedMs?: number } = {}
) {
this.tasks.push({
fn: task,
priority: options.priority || 'normal',
estimatedMs: options.estimatedMs || 5,
});
// Sort by priority
this.tasks.sort((a, b) => {
const priorityOrder = { critical: 0, high: 1, normal: 2, low: 3 };
return priorityOrder[a.priority] - priorityOrder[b.priority];
});
if (!this.isProcessing) {
this.processQueue();
}
}
private processQueue() {
if (this.tasks.length === 0) {
this.isProcessing = false;
return;
}
this.isProcessing = true;
requestAnimationFrame((timestamp) => {
this.frameTimeRemaining = this.budget.frameTime;
this.processFrame();
});
}
private processFrame() {
const frameStart = performance.now();
while (this.tasks.length > 0 && this.frameTimeRemaining > 0) {
const task = this.tasks[0];
// Critical tasks run regardless of budget
if (task.priority !== 'critical' && task.estimatedMs > this.frameTimeRemaining) {
break;
}
this.tasks.shift();
const taskStart = performance.now();
task.fn();
const taskDuration = performance.now() - taskStart;
this.frameTimeRemaining -= taskDuration;
// Update estimate for future similar tasks
if (Math.abs(taskDuration - task.estimatedMs) > task.estimatedMs * 0.5) {
// Task took significantly different time than estimated
console.debug(`Task estimate off: expected ${task.estimatedMs}ms, actual ${taskDuration}ms`);
}
}
if (this.tasks.length > 0) {
// More tasks remain, schedule next frame
requestAnimationFrame(() => {
this.frameTimeRemaining = this.budget.frameTime;
this.processFrame();
});
} else {
this.isProcessing = false;
}
}
// Run task with deadline awareness
async runWithDeadline<T>(
task: () => T | Promise<T>,
deadline: number
): Promise<{ result?: T; timedOut: boolean }> {
const start = performance.now();
try {
const result = await Promise.race([
Promise.resolve(task()),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('deadline')), deadline)
),
]);
return { result, timedOut: false };
} catch (error) {
if (error instanceof Error && error.message === 'deadline') {
return { timedOut: true };
}
throw error;
}
}
getBudget(): CPUBudget {
return { ...this.budget };
}
}
export { CPUBudgetScheduler, CPUBudget, detectDeviceTier };
Computation Chunking
// src/performance/chunked-computation.ts
interface ChunkOptions {
chunkSize: number;
yieldAfterMs: number;
onProgress?: (progress: number) => void;
}
// Process large arrays in chunks
async function processInChunks<T, R>(
items: T[],
processor: (item: T, index: number) => R,
options: ChunkOptions
): Promise<R[]> {
const results: R[] = [];
let processedCount = 0;
const processChunk = async (startIndex: number): Promise<void> => {
const chunkStart = performance.now();
const endIndex = Math.min(startIndex + options.chunkSize, items.length);
for (let i = startIndex; i < endIndex; i++) {
results.push(processor(items[i], i));
processedCount++;
// Check if we've exceeded our time budget
if (performance.now() - chunkStart > options.yieldAfterMs) {
// Yield to main thread
await yieldToMain();
options.onProgress?.(processedCount / items.length);
// Continue in next microtask
return processChunk(i + 1);
}
}
if (endIndex < items.length) {
await yieldToMain();
options.onProgress?.(processedCount / items.length);
return processChunk(endIndex);
}
};
await processChunk(0);
return results;
}
// Yield to main thread
function yieldToMain(): Promise<void> {
return new Promise(resolve => {
if ('scheduler' in globalThis && 'yield' in (globalThis as any).scheduler) {
(globalThis as any).scheduler.yield().then(resolve);
} else {
setTimeout(resolve, 0);
}
});
}
// Virtual list renderer with chunked updates
class ChunkedVirtualList<T> {
private visibleItems: T[] = [];
private renderQueue: T[] = [];
private isRendering = false;
constructor(
private container: HTMLElement,
private renderItem: (item: T) => HTMLElement,
private itemHeight: number
) {
this.setupScrollListener();
}
private setupScrollListener() {
let scrollTimeout: number;
this.container.addEventListener('scroll', () => {
clearTimeout(scrollTimeout);
scrollTimeout = window.setTimeout(() => {
this.updateVisibleItems();
}, 16); // Debounce to frame boundary
}, { passive: true });
}
setItems(items: T[]) {
this.visibleItems = items;
this.scheduleRender();
}
private updateVisibleItems() {
const scrollTop = this.container.scrollTop;
const viewportHeight = this.container.clientHeight;
const startIndex = Math.floor(scrollTop / this.itemHeight);
const endIndex = Math.ceil((scrollTop + viewportHeight) / this.itemHeight);
// Add buffer
const bufferSize = 5;
const bufferedStart = Math.max(0, startIndex - bufferSize);
const bufferedEnd = Math.min(this.visibleItems.length, endIndex + bufferSize);
this.renderQueue = this.visibleItems.slice(bufferedStart, bufferedEnd);
this.scheduleRender();
}
private async scheduleRender() {
if (this.isRendering) return;
this.isRendering = true;
// Use requestIdleCallback if available
if ('requestIdleCallback' in window) {
requestIdleCallback(
(deadline) => this.renderChunk(deadline),
{ timeout: 100 }
);
} else {
requestAnimationFrame(() => {
this.renderChunk({ timeRemaining: () => 10 } as IdleDeadline);
});
}
}
private renderChunk(deadline: IdleDeadline) {
while (this.renderQueue.length > 0 && deadline.timeRemaining() > 0) {
const item = this.renderQueue.shift()!;
const element = this.renderItem(item);
// Add to DOM
this.container.appendChild(element);
}
if (this.renderQueue.length > 0) {
requestIdleCallback(
(deadline) => this.renderChunk(deadline),
{ timeout: 100 }
);
} else {
this.isRendering = false;
}
}
}
export { processInChunks, yieldToMain, ChunkedVirtualList };
Memory-Constrained Architecture
Memory Budget Management
// src/performance/memory-budget.ts
interface MemoryBudget {
totalMB: number;
cacheMB: number;
imageMB: number;
componentMB: number;
warnThreshold: number; // Fraction of budget before warning
criticalThreshold: number; // Fraction before aggressive cleanup
}
const memoryBudgets: Record<string, MemoryBudget> = {
high: {
totalMB: 512,
cacheMB: 100,
imageMB: 200,
componentMB: 50,
warnThreshold: 0.7,
criticalThreshold: 0.9,
},
mid: {
totalMB: 256,
cacheMB: 50,
imageMB: 100,
componentMB: 25,
warnThreshold: 0.6,
criticalThreshold: 0.8,
},
low: {
totalMB: 64,
cacheMB: 10,
imageMB: 25,
componentMB: 10,
warnThreshold: 0.5,
criticalThreshold: 0.7,
},
};
class MemoryManager {
private budget: MemoryBudget;
private caches: Map<string, LRUCache<unknown>> = new Map();
private imagePool: ImagePool;
private cleanupCallbacks: Array<() => void> = [];
constructor(tier: 'high' | 'mid' | 'low') {
this.budget = memoryBudgets[tier];
this.imagePool = new ImagePool(this.budget.imageMB * 1024 * 1024);
this.setupMemoryPressureListener();
}
private setupMemoryPressureListener() {
// Listen for memory pressure events
if ('addEventListener' in performance) {
performance.addEventListener('memory-pressure' as any, (event: any) => {
if (event.pressure === 'critical') {
this.handleCriticalMemory();
} else if (event.pressure === 'moderate') {
this.handleModerateMemory();
}
});
}
// Fallback: periodic memory checks
setInterval(() => this.checkMemoryUsage(), 10000);
}
private async checkMemoryUsage() {
if (!('memory' in performance)) return;
const memory = (performance as any).memory;
const usedMB = memory.usedJSHeapSize / (1024 * 1024);
const totalMB = memory.jsHeapSizeLimit / (1024 * 1024);
const usage = usedMB / this.budget.totalMB;
if (usage > this.budget.criticalThreshold) {
this.handleCriticalMemory();
} else if (usage > this.budget.warnThreshold) {
this.handleModerateMemory();
}
}
private handleCriticalMemory() {
console.warn('Critical memory pressure - aggressive cleanup');
// Clear all caches
for (const cache of this.caches.values()) {
cache.clear();
}
// Clear image pool
this.imagePool.clear();
// Run cleanup callbacks
for (const callback of this.cleanupCallbacks) {
callback();
}
// Force garbage collection if available
if ('gc' in globalThis) {
(globalThis as any).gc();
}
}
private handleModerateMemory() {
console.log('Moderate memory pressure - reducing caches');
// Reduce cache sizes
for (const cache of this.caches.values()) {
cache.trim(0.5); // Keep only half
}
// Reduce image pool
this.imagePool.trim(0.5);
}
createCache<T>(
name: string,
options: { maxSize: number; maxAge?: number }
): LRUCache<T> {
const cache = new LRUCache<T>(options);
this.caches.set(name, cache as LRUCache<unknown>);
return cache;
}
getImagePool(): ImagePool {
return this.imagePool;
}
onCleanup(callback: () => void) {
this.cleanupCallbacks.push(callback);
}
getStats(): {
usedMB: number;
cachesMB: number;
imagesMB: number;
budgetMB: number;
} {
const memory = (performance as any).memory;
return {
usedMB: memory ? memory.usedJSHeapSize / (1024 * 1024) : 0,
cachesMB: Array.from(this.caches.values()).reduce((sum, c) => sum + c.size, 0) / (1024 * 1024),
imagesMB: this.imagePool.getUsedBytes() / (1024 * 1024),
budgetMB: this.budget.totalMB,
};
}
}
// LRU Cache with size limits
class LRUCache<T> {
private cache: Map<string, { value: T; size: number; timestamp: number }> = new Map();
private currentSize = 0;
constructor(private options: { maxSize: number; maxAge?: number }) {}
get(key: string): T | undefined {
const entry = this.cache.get(key);
if (!entry) return undefined;
// Check age
if (this.options.maxAge && Date.now() - entry.timestamp > this.options.maxAge) {
this.delete(key);
return undefined;
}
// Move to end (most recently used)
this.cache.delete(key);
this.cache.set(key, { ...entry, timestamp: Date.now() });
return entry.value;
}
set(key: string, value: T, size: number = 1) {
// Remove if exists
if (this.cache.has(key)) {
this.delete(key);
}
// Evict until we have space
while (this.currentSize + size > this.options.maxSize && this.cache.size > 0) {
const oldest = this.cache.keys().next().value;
this.delete(oldest);
}
this.cache.set(key, { value, size, timestamp: Date.now() });
this.currentSize += size;
}
delete(key: string) {
const entry = this.cache.get(key);
if (entry) {
this.currentSize -= entry.size;
this.cache.delete(key);
}
}
clear() {
this.cache.clear();
this.currentSize = 0;
}
trim(keepFraction: number) {
const targetSize = Math.floor(this.currentSize * keepFraction);
while (this.currentSize > targetSize && this.cache.size > 0) {
const oldest = this.cache.keys().next().value;
this.delete(oldest);
}
}
get size(): number {
return this.currentSize;
}
}
// Image pool with memory limits
class ImagePool {
private images: Map<string, { img: HTMLImageElement; size: number; lastUsed: number }> = new Map();
private usedBytes = 0;
constructor(private maxBytes: number) {}
async load(url: string): Promise<HTMLImageElement> {
const existing = this.images.get(url);
if (existing) {
existing.lastUsed = Date.now();
return existing.img;
}
const img = new Image();
return new Promise((resolve, reject) => {
img.onload = () => {
// Estimate memory: width * height * 4 bytes (RGBA)
const estimatedBytes = img.naturalWidth * img.naturalHeight * 4;
// Evict if necessary
while (this.usedBytes + estimatedBytes > this.maxBytes && this.images.size > 0) {
this.evictOldest();
}
this.images.set(url, {
img,
size: estimatedBytes,
lastUsed: Date.now(),
});
this.usedBytes += estimatedBytes;
resolve(img);
};
img.onerror = reject;
img.src = url;
});
}
private evictOldest() {
let oldestUrl: string | null = null;
let oldestTime = Infinity;
for (const [url, entry] of this.images) {
if (entry.lastUsed < oldestTime) {
oldestTime = entry.lastUsed;
oldestUrl = url;
}
}
if (oldestUrl) {
const entry = this.images.get(oldestUrl)!;
this.usedBytes -= entry.size;
entry.img.src = ''; // Release memory
this.images.delete(oldestUrl);
}
}
clear() {
for (const entry of this.images.values()) {
entry.img.src = '';
}
this.images.clear();
this.usedBytes = 0;
}
trim(keepFraction: number) {
const targetBytes = Math.floor(this.maxBytes * keepFraction);
while (this.usedBytes > targetBytes && this.images.size > 0) {
this.evictOldest();
}
}
getUsedBytes(): number {
return this.usedBytes;
}
}
export { MemoryManager, LRUCache, ImagePool, MemoryBudget };
Progressive Enhancement Strategy
Feature Detection and Enhancement Layers
// src/enhancement/progressive.ts
interface FeatureCapabilities {
// JavaScript features
asyncAwait: boolean;
serviceWorker: boolean;
webWorkers: boolean;
sharedArrayBuffer: boolean;
wasm: boolean;
// APIs
intersectionObserver: boolean;
resizeObserver: boolean;
mutationObserver: boolean;
webGL: boolean;
webGL2: boolean;
// Storage
indexedDB: boolean;
localStorage: boolean;
cacheAPI: boolean;
// Network
fetchAPI: boolean;
streams: boolean;
// Performance
performanceObserver: boolean;
requestIdleCallback: boolean;
scheduler: boolean;
}
function detectCapabilities(): FeatureCapabilities {
return {
asyncAwait: typeof (async () => {}).constructor === 'function',
serviceWorker: 'serviceWorker' in navigator,
webWorkers: typeof Worker !== 'undefined',
sharedArrayBuffer: typeof SharedArrayBuffer !== 'undefined',
wasm: typeof WebAssembly !== 'undefined',
intersectionObserver: typeof IntersectionObserver !== 'undefined',
resizeObserver: typeof ResizeObserver !== 'undefined',
mutationObserver: typeof MutationObserver !== 'undefined',
webGL: !!document.createElement('canvas').getContext('webgl'),
webGL2: !!document.createElement('canvas').getContext('webgl2'),
indexedDB: typeof indexedDB !== 'undefined',
localStorage: typeof localStorage !== 'undefined',
cacheAPI: 'caches' in window,
fetchAPI: typeof fetch !== 'undefined',
streams: typeof ReadableStream !== 'undefined',
performanceObserver: typeof PerformanceObserver !== 'undefined',
requestIdleCallback: 'requestIdleCallback' in window,
scheduler: 'scheduler' in globalThis,
};
}
// Enhancement tiers
type EnhancementTier = 'basic' | 'standard' | 'enhanced' | 'cutting-edge';
function determineEnhancementTier(caps: FeatureCapabilities): EnhancementTier {
// Cutting edge: All modern features
if (
caps.serviceWorker &&
caps.webWorkers &&
caps.intersectionObserver &&
caps.streams &&
caps.scheduler
) {
return 'cutting-edge';
}
// Enhanced: Good modern support
if (
caps.asyncAwait &&
caps.fetchAPI &&
caps.intersectionObserver &&
caps.indexedDB
) {
return 'enhanced';
}
// Standard: Basic modern features
if (caps.fetchAPI && caps.localStorage) {
return 'standard';
}
// Basic: Minimal JavaScript
return 'basic';
}
// Component variants by tier
interface ComponentVariants<P> {
basic: React.ComponentType<P>;
standard: React.ComponentType<P>;
enhanced: React.ComponentType<P>;
'cutting-edge'?: React.ComponentType<P>;
}
function createProgressiveComponent<P>(
variants: ComponentVariants<P>,
tier: EnhancementTier
): React.ComponentType<P> {
return variants[tier] || variants.enhanced || variants.standard || variants.basic;
}
// Example: Progressive image component
import React, { useState, useEffect, useRef } from 'react';
interface ProgressiveImageProps {
src: string;
alt: string;
width: number;
height: number;
placeholder?: string;
}
// Basic: Just an img tag
const BasicImage: React.FC<ProgressiveImageProps> = ({ src, alt, width, height }) => (
<img src={src} alt={alt} width={width} height={height} loading="lazy" />
);
// Standard: With placeholder and load transition
const StandardImage: React.FC<ProgressiveImageProps> = ({
src,
alt,
width,
height,
placeholder,
}) => {
const [loaded, setLoaded] = useState(false);
return (
<div style={{ position: 'relative', width, height }}>
{placeholder && !loaded && (
<img
src={placeholder}
alt=""
style={{ position: 'absolute', filter: 'blur(10px)' }}
/>
)}
<img
src={src}
alt={alt}
loading="lazy"
onLoad={() => setLoaded(true)}
style={{ opacity: loaded ? 1 : 0, transition: 'opacity 0.3s' }}
/>
</div>
);
};
// Enhanced: With intersection observer lazy loading
const EnhancedImage: React.FC<ProgressiveImageProps> = ({
src,
alt,
width,
height,
placeholder,
}) => {
const [loaded, setLoaded] = useState(false);
const [inView, setInView] = useState(false);
const ref = useRef<HTMLDivElement>(null);
useEffect(() => {
const observer = new IntersectionObserver(
([entry]) => {
if (entry.isIntersecting) {
setInView(true);
observer.disconnect();
}
},
{ rootMargin: '50px' }
);
if (ref.current) {
observer.observe(ref.current);
}
return () => observer.disconnect();
}, []);
return (
<div ref={ref} style={{ width, height }}>
{placeholder && !loaded && (
<img src={placeholder} alt="" style={{ filter: 'blur(10px)' }} />
)}
{inView && (
<img
src={src}
alt={alt}
onLoad={() => setLoaded(true)}
style={{ opacity: loaded ? 1 : 0, transition: 'opacity 0.3s' }}
/>
)}
</div>
);
};
// Select appropriate component
const capabilities = detectCapabilities();
const tier = determineEnhancementTier(capabilities);
const ProgressiveImage = createProgressiveComponent<ProgressiveImageProps>(
{
basic: BasicImage,
standard: StandardImage,
enhanced: EnhancedImage,
},
tier
);
export {
detectCapabilities,
determineEnhancementTier,
createProgressiveComponent,
ProgressiveImage,
FeatureCapabilities,
EnhancementTier,
};
Graceful Degradation Patterns
Fallback Hierarchies
// src/enhancement/degradation.ts
interface DegradationConfig<T> {
ideal: () => Promise<T>;
fallbacks: Array<{
condition: () => boolean;
implementation: () => Promise<T>;
name: string;
}>;
ultimate: () => T; // Synchronous, always works
timeout: number;
}
async function withGracefulDegradation<T>(config: DegradationConfig<T>): Promise<T> {
// Try ideal implementation with timeout
try {
const result = await Promise.race([
config.ideal(),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('timeout')), config.timeout)
),
]);
return result;
} catch (error) {
console.debug('Ideal implementation failed:', error);
}
// Try fallbacks in order
for (const fallback of config.fallbacks) {
if (fallback.condition()) {
try {
const result = await Promise.race([
fallback.implementation(),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('timeout')), config.timeout)
),
]);
console.debug(`Using fallback: ${fallback.name}`);
return result;
} catch (error) {
console.debug(`Fallback ${fallback.name} failed:`, error);
}
}
}
// Ultimate fallback - always works
console.debug('Using ultimate fallback');
return config.ultimate();
}
// Example: Data fetching with degradation
interface Product {
id: string;
name: string;
price: number;
}
async function fetchProducts(): Promise<Product[]> {
return withGracefulDegradation({
// Ideal: Fetch with streaming
ideal: async () => {
const response = await fetch('/api/products');
const reader = response.body!.getReader();
const products: Product[] = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Parse streamed JSON
const chunk = new TextDecoder().decode(value);
products.push(...JSON.parse(chunk));
}
return products;
},
fallbacks: [
// Fallback 1: Regular fetch
{
condition: () => typeof fetch !== 'undefined',
name: 'regular-fetch',
implementation: async () => {
const response = await fetch('/api/products');
return response.json();
},
},
// Fallback 2: XMLHttpRequest
{
condition: () => typeof XMLHttpRequest !== 'undefined',
name: 'xhr',
implementation: () =>
new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.open('GET', '/api/products');
xhr.onload = () => resolve(JSON.parse(xhr.responseText));
xhr.onerror = () => reject(new Error('XHR failed'));
xhr.send();
}),
},
// Fallback 3: From localStorage cache
{
condition: () => typeof localStorage !== 'undefined',
name: 'local-cache',
implementation: async () => {
const cached = localStorage.getItem('products-cache');
if (cached) {
return JSON.parse(cached);
}
throw new Error('No cache');
},
},
],
// Ultimate: Return empty or static data
ultimate: () => [],
timeout: 5000,
});
}
// UI degradation component
interface DegradingUIProps {
children: React.ReactNode;
fallback: React.ReactNode;
condition: boolean;
}
const DegradingUI: React.FC<DegradingUIProps> = ({ children, fallback, condition }) => {
if (!condition) {
return <>{fallback}</>;
}
return <>{children}</>;
};
// Error boundary with degradation
class DegradationBoundary extends React.Component<
{
children: React.ReactNode;
fallback: React.ReactNode;
onError?: (error: Error) => void;
},
{ hasError: boolean }
> {
state = { hasError: false };
static getDerivedStateFromError() {
return { hasError: true };
}
componentDidCatch(error: Error) {
this.props.onError?.(error);
}
render() {
if (this.state.hasError) {
return this.props.fallback;
}
return this.props.children;
}
}
export { withGracefulDegradation, DegradingUI, DegradationBoundary };
Network-Aware Loading
Adaptive Loading Based on Connection
// src/network/adaptive-loading.ts
interface NetworkInfo {
effectiveType: '4g' | '3g' | '2g' | 'slow-2g';
downlink: number; // Mbps
rtt: number; // ms
saveData: boolean;
}
function getNetworkInfo(): NetworkInfo {
const nav = navigator as any;
const connection = nav.connection || nav.mozConnection || nav.webkitConnection;
if (!connection) {
// Assume decent connection if API not available
return {
effectiveType: '4g',
downlink: 10,
rtt: 50,
saveData: false,
};
}
return {
effectiveType: connection.effectiveType || '4g',
downlink: connection.downlink || 10,
rtt: connection.rtt || 50,
saveData: connection.saveData || false,
};
}
// Image quality selection
interface ImageQualityConfig {
url: string;
qualities: {
low: string; // WebP, compressed
medium: string; // Standard quality
high: string; // Full quality
};
width: number;
}
function selectImageQuality(config: ImageQualityConfig): string {
const network = getNetworkInfo();
if (network.saveData || network.effectiveType === 'slow-2g') {
return config.qualities.low;
}
if (network.effectiveType === '2g' || network.effectiveType === '3g') {
return config.qualities.medium;
}
return config.qualities.high;
}
// Prefetch strategy
type PrefetchPriority = 'high' | 'low' | 'none';
function getPrefetchPriority(network: NetworkInfo): PrefetchPriority {
if (network.saveData) return 'none';
if (network.effectiveType === 'slow-2g' || network.effectiveType === '2g') return 'none';
if (network.effectiveType === '3g') return 'low';
return 'high';
}
// Component loading strategy
interface LoadingStrategy {
preload: boolean;
lazy: boolean;
priority: 'eager' | 'lazy' | 'idle';
fetchPriority: 'high' | 'low' | 'auto';
}
function getLoadingStrategy(importance: 'critical' | 'high' | 'normal' | 'low'): LoadingStrategy {
const network = getNetworkInfo();
if (importance === 'critical') {
return {
preload: true,
lazy: false,
priority: 'eager',
fetchPriority: 'high',
};
}
if (network.saveData || network.effectiveType === '2g') {
return {
preload: false,
lazy: true,
priority: 'idle',
fetchPriority: 'low',
};
}
if (importance === 'high') {
return {
preload: network.effectiveType === '4g',
lazy: false,
priority: 'eager',
fetchPriority: 'auto',
};
}
return {
preload: false,
lazy: true,
priority: 'lazy',
fetchPriority: 'low',
};
}
// React hook for network-aware loading
function useNetworkAwareLoader<T>(
loader: () => Promise<T>,
options: {
importance: 'critical' | 'high' | 'normal' | 'low';
fallback?: T;
}
) {
const [data, setData] = useState<T | undefined>(options.fallback);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<Error | null>(null);
useEffect(() => {
const strategy = getLoadingStrategy(options.importance);
const load = async () => {
try {
const result = await loader();
setData(result);
} catch (err) {
setError(err as Error);
if (options.fallback !== undefined) {
setData(options.fallback);
}
} finally {
setLoading(false);
}
};
if (strategy.priority === 'eager') {
load();
} else if (strategy.priority === 'lazy') {
// Wait for idle
if ('requestIdleCallback' in window) {
requestIdleCallback(() => load(), { timeout: 2000 });
} else {
setTimeout(load, 100);
}
} else {
// Very low priority
requestIdleCallback(() => load(), { timeout: 5000 });
}
}, [loader, options.importance]);
return { data, loading, error };
}
// Network change listener
function onNetworkChange(callback: (network: NetworkInfo) => void): () => void {
const nav = navigator as any;
const connection = nav.connection || nav.mozConnection || nav.webkitConnection;
if (!connection) return () => {};
const handler = () => callback(getNetworkInfo());
connection.addEventListener('change', handler);
return () => connection.removeEventListener('change', handler);
}
export {
getNetworkInfo,
selectImageQuality,
getPrefetchPriority,
getLoadingStrategy,
useNetworkAwareLoader,
onNetworkChange,
NetworkInfo,
};
Bundle Size Optimization for Emerging Markets
Aggressive Code Splitting
// src/build/code-splitting.ts
// Route-based splitting configuration
const routeConfig = {
// Core routes - minimal bundle
core: {
routes: ['/', '/login', '/404'],
maxSize: 50_000, // 50KB
},
// Feature routes - loaded on demand
features: {
dashboard: {
routes: ['/dashboard/*'],
maxSize: 100_000,
preload: 'on-hover',
},
profile: {
routes: ['/profile/*'],
maxSize: 50_000,
preload: 'on-interaction',
},
settings: {
routes: ['/settings/*'],
maxSize: 30_000,
preload: 'never',
},
},
// Heavy features - explicit user action only
heavy: {
editor: {
routes: ['/editor/*'],
maxSize: 200_000,
preload: 'never',
showLoader: true,
},
analytics: {
routes: ['/analytics/*'],
maxSize: 150_000,
preload: 'never',
showLoader: true,
},
},
};
// Dynamic import with retry
async function dynamicImportWithRetry<T>(
importFn: () => Promise<T>,
options: { retries?: number; delay?: number } = {}
): Promise<T> {
const { retries = 3, delay = 1000 } = options;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
return await importFn();
} catch (error) {
if (attempt === retries) throw error;
console.warn(`Import failed, retrying in ${delay}ms...`);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
throw new Error('Import failed after retries');
}
// Lazy component with loading states
function createLazyComponent<P extends object>(
importFn: () => Promise<{ default: React.ComponentType<P> }>,
options: {
fallback?: React.ReactNode;
errorFallback?: React.ReactNode;
preload?: 'on-hover' | 'on-interaction' | 'never';
} = {}
) {
let preloaded = false;
const LazyComponent = React.lazy(() =>
dynamicImportWithRetry(importFn)
);
const preload = () => {
if (!preloaded) {
preloaded = true;
importFn();
}
};
const WrappedComponent: React.FC<P & { preloadTrigger?: React.RefObject<HTMLElement> }> = (props) => {
const ref = props.preloadTrigger || useRef<HTMLElement>(null);
useEffect(() => {
if (options.preload === 'never') return;
const element = ref.current;
if (!element) return;
const events = options.preload === 'on-hover'
? ['mouseenter', 'focus']
: ['click', 'touchstart'];
events.forEach(event => {
element.addEventListener(event, preload, { once: true, passive: true });
});
return () => {
events.forEach(event => {
element.removeEventListener(event, preload);
});
};
}, []);
return (
<DegradationBoundary fallback={options.errorFallback || <div>Error loading component</div>}>
<React.Suspense fallback={options.fallback || <div>Loading...</div>}>
<LazyComponent {...props} />
</React.Suspense>
</DegradationBoundary>
);
};
WrappedComponent.preload = preload;
return WrappedComponent;
}
// Example usage
const Dashboard = createLazyComponent(
() => import('./pages/Dashboard'),
{
fallback: <DashboardSkeleton />,
preload: 'on-hover',
}
);
const Editor = createLazyComponent(
() => import('./pages/Editor'),
{
fallback: <FullPageLoader message="Loading editor..." />,
preload: 'never', // Too heavy to preload
}
);
export { createLazyComponent, dynamicImportWithRetry, routeConfig };
Key Takeaways
-
Device tiers need different budgets: A $50 phone parses JS 10x slower than a flagship—design accordingly
-
CPU budget is finite per frame: 16ms for 60fps, but target 33ms (30fps) on low-end devices
-
Memory is the silent killer: 1-2GB devices can't cache aggressively; implement LRU with pressure monitoring
-
Progressive enhancement, not degradation: Start with HTML that works everywhere, enhance with JS
-
Network detection enables adaptation: Load high-res images on 4G, tiny placeholders on 2G
-
Code splitting is mandatory: 50KB core bundle for emerging markets; lazy load everything else
-
Chunk large computations: Never block the main thread for more than 50ms
-
Fallback hierarchies are defensive: fetch → XHR → cache → static data
-
Test on real devices: Emulators don't capture true performance of low-end hardware
-
Save-Data header is a signal: Users explicitly asking for less data should always get minimal experiences
Building for the next billion users isn't about removing features—it's about delivering core value within severe constraints while progressively enhancing for capable devices.
What did you think?