掌握 React 性能:Web Worker 和 Generator 函数
在构建数据密集型 React 应用程序时,您可能会遇到处理大型数据集导致 UI 冻结的情况。这是因为 JavaScript 在单个线程上运行,这意味着大量计算可能会阻止用户交互。让我们通过一个真实示例探索如何使用 Generator Functions 和 Web Workers 解决这个问题。
问题:繁重计算时 UI 冻结
假设您正在构建一个事件分析仪表板,需要处理数千个事件并进行复杂的计算。通常会发生以下情况:
function EventsDashboard() {
const [events, setEvents] = useState([]);
// This function blocks the UI thread
const processEvents = (rawEvents) => {
return rawEvents.map(event => {
// Complex calculations that take time
const score = calculateEventScore(event); // ~2ms per event
const sentiment = analyzeSentiment(event); // ~3ms per event
const category = classifyEvent(event); // ~1ms per event
// With 10,000 events, this takes:
// 10,000 * (2 + 3 + 1) = 60,000ms = 60 seconds!
return {
...event,
score,
sentiment,
category
};
});
};
const processAndDisplay = () => {
const processedEvents = processEvents(events);
setEvents(processedEvents);
};
return (
);
}问题是什么?如果有 10,000 个事件,您的 UI 会冻结 60 秒!在此期间:
解决方案 1:分块处理的生成器函数
生成器函数允许我们分块处理数据,并定期将控制权交还给主线程:
function* eventProcessor(events, chunkSize = 100) {
// Process events in chunks of 100
for (let i = 0; i < events.length; i += chunkSize) {
const chunk = events.slice(i, i + chunkSize);
const processedChunk = chunk.map(event => ({
...event,
score: calculateEventScore(event),
sentiment: analyzeSentiment(event),
category: classifyEvent(event)
}));
// Yield each processed chunk
yield processedChunk;
}
}
function EventsDashboard() {
const [events, setEvents] = useState([]);
const [progress, setProgress] = useState(0);
const [isProcessing, setIsProcessing] = useState(false);
const processEventsInChunks = async () => {
setIsProcessing(true);
const generator = eventProcessor(events);
let processedEvents = [];
try {
while (true) {
const { value: chunk, done } = generator.next();
if (done) break;
processedEvents = [...processedEvents, ...chunk];
// Update progress
const progress = (processedEvents.length / events.length) * 100;
setProgress(progress);
// Let the UI breathe
await new Promise(resolve => setTimeout(resolve, 0));
// Update UI with processed events so far
setEvents(processedEvents);
}
} finally {
setIsProcessing(false);
}
};
return (
{isProcessing && (
)}
);
}解决方案 2:实现真正并行处理的 Web Worker
Web Workers 允许我们在单独的线程中运行计算:
// eventWorker.ts
type Event = {
id: string;
name: string;
timestamp: number;
data: any;
};
type ProcessedEvent = Event & {
score: number;
sentiment: string;
category: string;
};
type WorkerMessage = {
type: 'PROCESS_CHUNK';
payload: Event[];
};
type WorkerResponse = {
type: 'CHUNK_PROCESSED' | 'PROCESSING_COMPLETE' | 'ERROR';
payload: ProcessedEvent[] | Error;
progress?: number;
};
self.onmessage = (e: MessageEvent) => {
const { type, payload: events } = e.data;
if (type === 'PROCESS_CHUNK') {
try {
let processedCount = 0;
const totalEvents = events.length;
const chunkSize = 100;
// Process in smaller chunks to report progress
for (let i = 0; i < events.length; i += chunkSize) {
const chunk = events.slice(i, i + chunkSize);
const processedChunk = chunk.map(event => ({
...event,
score: calculateEventScore(event),
sentiment: analyzeSentiment(event),
category: classifyEvent(event)
}));
processedCount += chunk.length;
// Report progress
self.postMessage({
type: 'CHUNK_PROCESSED',
payload: processedChunk,
progress: (processedCount / totalEvents) * 100
});
}
self.postMessage({
type: 'PROCESSING_COMPLETE',
payload: events
});
} catch (error) {
self.postMessage({
type: 'ERROR',
payload: error
});
}
}
}; // EventsDashboard.tsx
function EventsDashboard() {
const [events, setEvents] = useState([]);
const [progress, setProgress] = useState(0);
const [error, setError] = useState(null);
const workerRef = useRef();
useEffect(() => {
// Initialize worker
workerRef.current = new Worker('/eventWorker.ts');
// Handle worker messages
workerRef.current.onmessage = (e) => {
const { type, payload, progress } = e.data;
switch (type) {
case 'CHUNK_PROCESSED':
setEvents(current => [...current, ...payload]);
setProgress(progress);
break;
case 'PROCESSING_COMPLETE':
setProgress(100);
break;
case 'ERROR':
setError(payload);
break;
}
};
return () => workerRef.current?.terminate();
}, []);
const processEvents = () => {
setEvents([]);
setProgress(0);
setError(null);
workerRef.current.postMessage({
type: 'PROCESS_CHUNK',
payload: events
});
};
return (
{progress > 0 && progress < 100 && (
)}
{error && (
Error: {error.message}
)}
0 && progress < 100}
/>
);
}最终解决方案:结合两种方法
为了获得最佳性能,特别是对于非常大的数据集(100,000+ 个事件),请结合使用这两种方法:
完整的实现如下:
// advancedEventWorker.ts
function* processInChunks(events: Event[], chunkSize: number) {
for (let i = 0; i < events.length; i += chunkSize) {
const chunk = events.slice(i, i + chunkSize);
yield chunk;
}
}
self.onmessage = async (e: MessageEvent) => {
const { type, payload: events } = e.data;
if (type === 'PROCESS_EVENTS') {
try {
const CHUNK_SIZE = 100;
const chunks = processInChunks(events, CHUNK_SIZE);
let processedCount = 0;
const totalEvents = events.length;
for (const chunk of chunks) {
// Process each chunk
const processedChunk = await Promise.all(
chunk.map(async event => ({
...event,
score: await calculateEventScore(event),
sentiment: await analyzeSentiment(event),
category: await classifyEvent(event)
}))
);
processedCount += chunk.length;
// Stream results back to main thread
self.postMessage({
type: 'CHUNK_PROCESSED',
payload: processedChunk,
progress: (processedCount / totalEvents) * 100
});
// Simulate giving the worker thread a breather
await new Promise(resolve => setTimeout(resolve, 0));
}
self.postMessage({
type: 'PROCESSING_COMPLETE',
payload: null,
progress: 100
});
} catch (error) {
self.postMessage({
type: 'ERROR',
payload: error
});
}
}
}; 性能监控
为了衡量这些优化的影响:
// Before processing
performance.mark('processStart');
// After processing
performance.mark('processEnd');
performance.measure(
'eventProcessing',
'processStart',
'processEnd'
);
// Log metrics
const metrics = performance.getEntriesByName('eventProcessing')[0];
console.log(`Processing took ${metrics.duration}ms`);最佳实践和技巧
// Clear processed chunks from memory
let processedEvents = new Array(totalEvents);
for (const [index, chunk] of chunks.entries()) {
processedEvents.splice(index * CHUNK_SIZE, CHUNK_SIZE, ...processedChunk);
// Clear references to help garbage collection
chunk.length = 0;
}const safeProcess = async (event) => {
try {
return await processEvent(event);
} catch (error) {
console.error(`Failed to process event ${event.id}:`, error);
return {
...event,
error: error.message
};
}
};function EventsDashboard() {
const cancelRef = useRef(false);
useEffect(() => {
return () => {
cancelRef.current = true;
};
}, []);
const processEvents = async () => {
for (const chunk of chunks) {
if (cancelRef.current) break;
// Process chunk...
}
};
}实际性能改进
通过此实现:
结论
通过结合使用生成器函数和 Web Workers,我们可以处理密集的数据处理任务,同时保持流畅的用户体验。此模式对于以下情况特别有价值:
关键是将大任务分解为可管理的部分,并以不阻塞主线程的方式处理它们,同时让用户了解进度。
请记住在实施这些优化之前和之后始终测量性能,以确保它们为您的特定用例提供有意义的好处。