Optimizing React Performance in Production
Performance optimization in React isn't just about making things faster - it's about creating experiences that feel instantaneous. After optimizing numerous production applications, I've discovered patterns and techniques that consistently deliver exceptional performance. This guide covers advanced optimization strategies that go beyond the basics.
Measuring Performance: Start with Data
Before optimizing, establish baseline metrics:
// utils/performance-monitor.ts
export class PerformanceMonitor {
private metrics: Map<string, PerformanceEntry[]> = new Map();
measureComponent(componentName: string, fn: () => void) {
const startMark = `${componentName}-start`;
const endMark = `${componentName}-end`;
performance.mark(startMark);
fn();
performance.mark(endMark);
performance.measure(componentName, startMark, endMark);
const measure = performance.getEntriesByName(componentName)[0];
this.recordMetric(componentName, measure);
}
recordMetric(name: string, entry: PerformanceEntry) {
if (!this.metrics.has(name)) {
this.metrics.set(name, []);
}
this.metrics.get(name)!.push(entry);
// Send to analytics after threshold
if (this.metrics.get(name)!.length >= 100) {
this.flushMetrics(name);
}
}
}
Advanced Code Splitting Strategies
1. Route-Based with Prefetching
// app/routes.tsx
import { lazy, Suspense } from 'react';
import { prefetchComponent } from '@/utils/prefetch';
// Lazy load with prefetch on hover
const Dashboard = lazy(() =>
import(/* webpackChunkName: "dashboard" */ './pages/Dashboard')
);
export function AppRoutes() {
return (
<Routes>
<Route
path="/dashboard"
element={
<Suspense fallback={<DashboardSkeleton />}>
<div
onMouseEnter={() => prefetchComponent(Dashboard)}
onTouchStart={() => prefetchComponent(Dashboard)}
>
<Dashboard />
</div>
</Suspense>
}
/>
</Routes>
);
}
// Prefetch utility
export async function prefetchComponent(Component: any) {
if (Component._payload && typeof Component._payload._result === 'undefined') {
await Component._payload._result;
}
}
2. Component-Level Splitting
// components/HeavyComponent.tsx
import { lazy, useState, useEffect } from 'react';
const HeavyChart = lazy(() =>
import(/* webpackChunkName: "charts" */ './Chart')
);
export function DataVisualization({ data }) {
const [shouldLoad, setShouldLoad] = useState(false);
useEffect(() => {
// Load when component is in viewport
const observer = new IntersectionObserver(
(entries) => {
if (entries[0].isIntersecting) {
setShouldLoad(true);
}
},
{ threshold: 0.1 }
);
const element = document.getElementById('chart-container');
if (element) observer.observe(element);
return () => observer.disconnect();
}, []);
return (
<div id="chart-container">
{shouldLoad ? (
<Suspense fallback={<ChartSkeleton />}>
<HeavyChart data={data} />
</Suspense>
) : (
<ChartPlaceholder />
)}
</div>
);
}
React 18 Concurrent Features
1. Transitions for Non-Urgent Updates
// components/SearchResults.tsx
import { useTransition, useState, useDeferredValue } from 'react';
export function SearchInterface() {
const [query, setQuery] = useState('');
const [results, setResults] = useState([]);
const [isPending, startTransition] = useTransition();
const deferredQuery = useDeferredValue(query);
const handleSearch = (e: React.ChangeEvent<HTMLInputElement>) => {
// Urgent: Update input immediately
setQuery(e.target.value);
// Non-urgent: Update results
startTransition(() => {
searchAPI(e.target.value).then(setResults);
});
};
return (
<div>
<input
value={query}
onChange={handleSearch}
className={isPending ? 'opacity-70' : ''}
/>
<SearchResults
results={results}
query={deferredQuery}
isPending={isPending}
/>
</div>
);
}
2. Suspense for Data Fetching
// lib/data-fetching.ts
function wrapPromise<T>(promise: Promise<T>) {
let status = 'pending';
let result: T;
const suspender = promise.then(
(r) => {
status = 'success';
result = r;
},
(e) => {
status = 'error';
result = e;
}
);
return {
read() {
if (status === 'pending') throw suspender;
if (status === 'error') throw result;
return result;
},
};
}
// Usage with Suspense
const resource = wrapPromise(fetchUserData());
function UserProfile() {
const userData = resource.read();
return <div>{userData.name}</div>;
}
Virtualization for Large Lists
// components/VirtualList.tsx
import { useVirtual } from '@tanstack/react-virtual';
import { useRef } from 'react';
export function VirtualizedList({ items }: { items: any[] }) {
const parentRef = useRef<HTMLDivElement>(null);
const virtualizer = useVirtual({
count: items.length,
getScrollElement: () => parentRef.current,
estimateSize: () => 50,
overscan: 5,
});
return (
<div
ref={parentRef}
className="h-[600px] overflow-auto"
>
<div
style={{
height: `${virtualizer.totalSize}px`,
position: 'relative',
}}
>
{virtualizer.virtualItems.map((virtualItem) => (
<div
key={virtualItem.index}
style={{
position: 'absolute',
top: 0,
left: 0,
width: '100%',
height: `${virtualItem.size}px`,
transform: `translateY(${virtualItem.start}px)`,
}}
>
<ListItem item={items[virtualItem.index]} />
</div>
))}
</div>
</div>
);
}
Memoization Strategies
1. Smart Memoization with Custom Comparators
// hooks/useMemoizedValue.ts
import { useRef } from 'react';
export function useDeepMemo<T>(
factory: () => T,
deps: any[],
compareFn: (a: any[], b: any[]) => boolean = deepEqual
): T {
const ref = useRef<{ deps: any[]; value: T }>();
if (!ref.current || !compareFn(deps, ref.current.deps)) {
ref.current = { deps, value: factory() };
}
return ref.current.value;
}
// Selective memo for expensive components
export const ExpensiveComponent = memo(
({ data, config }) => {
// Component logic
},
(prevProps, nextProps) => {
// Only re-render if data changes
return prevProps.data === nextProps.data;
}
);
2. useMemo for Expensive Computations
// components/DataProcessor.tsx
export function DataAnalytics({ rawData }: { rawData: number[] }) {
const processedData = useMemo(() => {
console.time('Processing');
const result = rawData
.filter(n => n > 0)
.map(n => ({
value: n,
squared: n ** 2,
sqrt: Math.sqrt(n),
log: Math.log(n),
}))
.sort((a, b) => b.value - a.value);
console.timeEnd('Processing');
return result;
}, [rawData]);
const statistics = useMemo(() => ({
mean: processedData.reduce((sum, item) => sum + item.value, 0) / processedData.length,
max: processedData[0]?.value || 0,
min: processedData[processedData.length - 1]?.value || 0,
}), [processedData]);
return <DataVisualization data={processedData} stats={statistics} />;
}
State Management Optimization
1. State Colocation
// ❌ Bad: Global state for local data
const globalState = {
userProfile: {...},
dashboardFilters: {...}, // Only used in Dashboard
chartSettings: {...}, // Only used in Chart
};
// ✅ Good: Colocate state where it's used
function Dashboard() {
const [filters, setFilters] = useState(defaultFilters);
// Dashboard-specific state stays here
}
function Chart() {
const [settings, setSettings] = useState(defaultSettings);
// Chart-specific state stays here
}
2. Context Optimization
// contexts/AppContext.tsx
// Split contexts to avoid unnecessary re-renders
const ThemeContext = createContext<Theme>(defaultTheme);
const UserContext = createContext<User | null>(null);
const SettingsContext = createContext<Settings>(defaultSettings);
// Use multiple providers
export function AppProviders({ children }: { children: ReactNode }) {
return (
<ThemeProvider>
<UserProvider>
<SettingsProvider>
{children}
</SettingsProvider>
</UserProvider>
</ThemeProvider>
);
}
// Optimize context value
export function UserProvider({ children }: { children: ReactNode }) {
const [user, setUser] = useState<User | null>(null);
// Memoize context value to prevent re-renders
const value = useMemo(
() => ({ user, setUser }),
[user]
);
return (
<UserContext.Provider value={value}>
{children}
</UserContext.Provider>
);
}
Bundle Size Optimization
1. Tree Shaking and Dead Code Elimination
// utils/imports.ts
// ❌ Bad: Importing entire library
import * as _ from 'lodash';
const result = _.debounce(fn, 300);
// ✅ Good: Import specific functions
import debounce from 'lodash/debounce';
const result = debounce(fn, 300);
// Even better: Use native alternatives
export function debounce(fn: Function, delay: number) {
let timeoutId: NodeJS.Timeout;
return (...args: any[]) => {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => fn(...args), delay);
};
}
2. Dynamic Imports for Heavy Libraries
// components/MarkdownEditor.tsx
export function MarkdownEditor({ initial }: { initial: string }) {
const [MDEditor, setMDEditor] = useState<any>(null);
useEffect(() => {
// Dynamically import heavy markdown editor
import('@uiw/react-md-editor').then((mod) => {
setMDEditor(() => mod.default);
});
}, []);
if (!MDEditor) {
return <TextAreaFallback value={initial} />;
}
return <MDEditor value={initial} />;
}
Image Optimization
// components/OptimizedImage.tsx
import Image from 'next/image';
export function OptimizedImage({
src,
alt,
priority = false
}: ImageProps) {
const [isLoading, setIsLoading] = useState(true);
return (
<div className="relative overflow-hidden">
{isLoading && (
<div className="absolute inset-0 bg-zinc-800 animate-pulse" />
)}
<Image
src={src}
alt={alt}
priority={priority}
loading={priority ? 'eager' : 'lazy'}
placeholder="blur"
blurDataURL={generateBlurDataURL(src)}
onLoadingComplete={() => setIsLoading(false)}
sizes="(max-width: 640px) 100vw,
(max-width: 1024px) 75vw,
50vw"
/>
</div>
);
}
Web Workers for Heavy Computations
// workers/data-processor.worker.ts
self.addEventListener('message', (event) => {
const { data, operation } = event.data;
switch (operation) {
case 'process':
const result = performHeavyComputation(data);
self.postMessage({ result });
break;
case 'analyze':
const analysis = analyzeDataSet(data);
self.postMessage({ analysis });
break;
}
});
// hooks/useWebWorker.ts
export function useWebWorker<T, R>(
workerPath: string,
data: T,
operation: string
): R | null {
const [result, setResult] = useState<R | null>(null);
const workerRef = useRef<Worker>();
useEffect(() => {
workerRef.current = new Worker(workerPath);
workerRef.current.onmessage = (event) => {
setResult(event.data.result);
};
workerRef.current.postMessage({ data, operation });
return () => workerRef.current?.terminate();
}, [data, operation, workerPath]);
return result;
}
Production Monitoring
// monitoring/performance.ts
export function setupPerformanceMonitoring() {
// Core Web Vitals
if ('PerformanceObserver' in window) {
// Largest Contentful Paint
new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
analytics.track('LCP', {
value: entry.startTime,
url: window.location.href,
});
}
}).observe({ entryTypes: ['largest-contentful-paint'] });
// First Input Delay
new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
analytics.track('FID', {
value: entry.processingStart - entry.startTime,
url: window.location.href,
});
}
}).observe({ entryTypes: ['first-input'] });
// Cumulative Layout Shift
let clsValue = 0;
new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
if (!entry.hadRecentInput) {
clsValue += entry.value;
}
}
}).observe({ entryTypes: ['layout-shift'] });
// Report CLS on page unload
window.addEventListener('beforeunload', () => {
analytics.track('CLS', {
value: clsValue,
url: window.location.href,
});
});
}
}
Key Takeaways
- Measure First: Always profile before optimizing
- Bundle Size Matters: Every KB counts on mobile
- Lazy Load Aggressively: Load only what's needed
- Memoize Wisely: Not everything needs memoization
- Use Concurrent Features: Leverage React 18's capabilities
- Monitor Production: Real user metrics trump synthetic tests
Conclusion
React performance optimization is an iterative process. Start with measuring, identify bottlenecks, apply targeted optimizations, and monitor the results. The techniques covered here have helped achieve sub-second load times and 60fps interactions in production applications serving millions of users.
Remember: premature optimization is the root of all evil, but thoughtful performance work creates delightful user experiences.