expand sub tasks
This commit is contained in:
@@ -29,18 +29,22 @@ class PerformanceOptimizer {
|
||||
private initializeObservers() {
|
||||
// Monitor long tasks
|
||||
if ('PerformanceObserver' in window) {
|
||||
this.longTaskObserver = new PerformanceObserver((list) => {
|
||||
this.longTaskObserver = new PerformanceObserver(list => {
|
||||
for (const entry of list.getEntries()) {
|
||||
if (entry.entryType === 'longtask') {
|
||||
this.metrics.longTaskCount++;
|
||||
this.metrics.averageLongTaskDuration =
|
||||
(this.metrics.averageLongTaskDuration * (this.metrics.longTaskCount - 1) + entry.duration) / this.metrics.longTaskCount;
|
||||
|
||||
console.warn(`🚨 Long task detected: ${entry.duration.toFixed(2)}ms - Consider chunking this operation`);
|
||||
this.metrics.averageLongTaskDuration =
|
||||
(this.metrics.averageLongTaskDuration * (this.metrics.longTaskCount - 1) +
|
||||
entry.duration) /
|
||||
this.metrics.longTaskCount;
|
||||
|
||||
console.warn(
|
||||
`🚨 Long task detected: ${entry.duration.toFixed(2)}ms - Consider chunking this operation`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
this.longTaskObserver.observe({ entryTypes: ['longtask'] });
|
||||
}
|
||||
|
||||
@@ -51,24 +55,24 @@ class PerformanceOptimizer {
|
||||
private startFrameRateMonitoring() {
|
||||
let frameCount = 0;
|
||||
let lastTime = performance.now();
|
||||
|
||||
|
||||
const measureFrameRate = () => {
|
||||
frameCount++;
|
||||
const currentTime = performance.now();
|
||||
|
||||
|
||||
if (currentTime - lastTime >= 1000) {
|
||||
this.metrics.frameRate = frameCount;
|
||||
frameCount = 0;
|
||||
lastTime = currentTime;
|
||||
|
||||
|
||||
if (this.metrics.frameRate < 30) {
|
||||
console.warn(`⚠️ Low frame rate detected: ${this.metrics.frameRate}fps`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
requestAnimationFrame(measureFrameRate);
|
||||
};
|
||||
|
||||
|
||||
requestAnimationFrame(measureFrameRate);
|
||||
}
|
||||
|
||||
@@ -79,26 +83,28 @@ class PerformanceOptimizer {
|
||||
chunkSize: number = 10,
|
||||
delay: number = 16
|
||||
): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
return new Promise(resolve => {
|
||||
let index = 0;
|
||||
|
||||
|
||||
const processChunk = () => {
|
||||
const startTime = performance.now();
|
||||
const endIndex = Math.min(index + chunkSize, items.length);
|
||||
|
||||
|
||||
// Process items in this chunk
|
||||
for (let i = index; i < endIndex; i++) {
|
||||
operation(items[i], i);
|
||||
}
|
||||
|
||||
|
||||
index = endIndex;
|
||||
|
||||
|
||||
// Check if we need to yield to prevent long tasks
|
||||
const processingTime = performance.now() - startTime;
|
||||
if (processingTime > 16) {
|
||||
console.warn(`⚠️ Chunk processing took ${processingTime.toFixed(2)}ms - consider smaller chunks`);
|
||||
console.warn(
|
||||
`⚠️ Chunk processing took ${processingTime.toFixed(2)}ms - consider smaller chunks`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
if (index < items.length) {
|
||||
// Schedule next chunk with delay to prevent blocking
|
||||
setTimeout(processChunk, delay);
|
||||
@@ -106,7 +112,7 @@ class PerformanceOptimizer {
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
processChunk();
|
||||
});
|
||||
}
|
||||
@@ -117,19 +123,16 @@ class PerformanceOptimizer {
|
||||
requestAnimationFrame(() => {
|
||||
// Force layout read first
|
||||
document.body.offsetHeight;
|
||||
|
||||
|
||||
// Perform all write operations
|
||||
operations.forEach(operation => operation());
|
||||
});
|
||||
}
|
||||
|
||||
// Debounce function for expensive operations
|
||||
static debounce<T extends (...args: any[]) => void>(
|
||||
func: T,
|
||||
delay: number
|
||||
): T {
|
||||
static debounce<T extends (...args: any[]) => void>(func: T, delay: number): T {
|
||||
let timeoutId: NodeJS.Timeout;
|
||||
|
||||
|
||||
return ((...args: any[]) => {
|
||||
clearTimeout(timeoutId);
|
||||
timeoutId = setTimeout(() => func(...args), delay);
|
||||
@@ -137,15 +140,12 @@ class PerformanceOptimizer {
|
||||
}
|
||||
|
||||
// Throttle function for frequent operations
|
||||
static throttle<T extends (...args: any[]) => void>(
|
||||
func: T,
|
||||
delay: number
|
||||
): T {
|
||||
static throttle<T extends (...args: any[]) => void>(func: T, delay: number): T {
|
||||
let lastExecTime = 0;
|
||||
|
||||
|
||||
return ((...args: any[]) => {
|
||||
const currentTime = performance.now();
|
||||
|
||||
|
||||
if (currentTime - lastExecTime > delay) {
|
||||
func(...args);
|
||||
lastExecTime = currentTime;
|
||||
@@ -163,11 +163,7 @@ class PerformanceOptimizer {
|
||||
overscanCount?: number;
|
||||
} = {}
|
||||
): React.ReactNode[] {
|
||||
const {
|
||||
chunkSize = 50,
|
||||
virtualizationThreshold = 100,
|
||||
overscanCount = 5
|
||||
} = options;
|
||||
const { chunkSize = 50, virtualizationThreshold = 100, overscanCount = 5 } = options;
|
||||
|
||||
// For small lists, render everything
|
||||
if (items.length <= virtualizationThreshold) {
|
||||
@@ -176,16 +172,18 @@ class PerformanceOptimizer {
|
||||
|
||||
// For large lists, use chunked rendering
|
||||
const chunks: React.ReactNode[] = [];
|
||||
|
||||
|
||||
for (let i = 0; i < items.length; i += chunkSize) {
|
||||
const chunk = items.slice(i, i + chunkSize);
|
||||
chunks.push(
|
||||
React.createElement('div', { key: `chunk-${i}`, className: 'virtualized-chunk' },
|
||||
React.createElement(
|
||||
'div',
|
||||
{ key: `chunk-${i}`, className: 'virtualized-chunk' },
|
||||
chunk.map((item, index) => renderItem(item, i + index))
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
@@ -196,7 +194,7 @@ class PerformanceOptimizer {
|
||||
return {
|
||||
used: memory.usedJSHeapSize / 1024 / 1024,
|
||||
total: memory.totalJSHeapSize / 1024 / 1024,
|
||||
limit: memory.jsHeapSizeLimit / 1024 / 1024
|
||||
limit: memory.jsHeapSizeLimit / 1024 / 1024,
|
||||
};
|
||||
}
|
||||
return { used: 0, total: 0, limit: 0 };
|
||||
@@ -205,7 +203,7 @@ class PerformanceOptimizer {
|
||||
// Optimize scroll performance
|
||||
static optimizeScroll(container: HTMLElement, handler: (event: Event) => void): () => void {
|
||||
let ticking = false;
|
||||
|
||||
|
||||
const optimizedHandler = (event: Event) => {
|
||||
if (!ticking) {
|
||||
requestAnimationFrame(() => {
|
||||
@@ -215,9 +213,9 @@ class PerformanceOptimizer {
|
||||
ticking = true;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
container.addEventListener('scroll', optimizedHandler, { passive: true });
|
||||
|
||||
|
||||
return () => {
|
||||
container.removeEventListener('scroll', optimizedHandler);
|
||||
};
|
||||
@@ -232,20 +230,20 @@ class PerformanceOptimizer {
|
||||
): () => void {
|
||||
let enterTimeout: NodeJS.Timeout;
|
||||
let leaveTimeout: NodeJS.Timeout;
|
||||
|
||||
|
||||
const handleMouseEnter = () => {
|
||||
clearTimeout(leaveTimeout);
|
||||
enterTimeout = setTimeout(onEnter, delay);
|
||||
};
|
||||
|
||||
|
||||
const handleMouseLeave = () => {
|
||||
clearTimeout(enterTimeout);
|
||||
leaveTimeout = setTimeout(onLeave, delay);
|
||||
};
|
||||
|
||||
|
||||
element.addEventListener('mouseenter', handleMouseEnter, { passive: true });
|
||||
element.addEventListener('mouseleave', handleMouseLeave, { passive: true });
|
||||
|
||||
|
||||
return () => {
|
||||
clearTimeout(enterTimeout);
|
||||
clearTimeout(leaveTimeout);
|
||||
@@ -259,7 +257,7 @@ class PerformanceOptimizer {
|
||||
const memory = PerformanceOptimizer.getMemoryUsage();
|
||||
this.metrics.memoryUsage = memory.used;
|
||||
this.metrics.layoutThrashingCount = this.layoutThrashingCount;
|
||||
|
||||
|
||||
return { ...this.metrics };
|
||||
}
|
||||
|
||||
@@ -294,4 +292,4 @@ window.addEventListener('beforeunload', () => {
|
||||
});
|
||||
|
||||
export { PerformanceOptimizer, performanceOptimizer };
|
||||
export default PerformanceOptimizer;
|
||||
export default PerformanceOptimizer;
|
||||
|
||||
Reference in New Issue
Block a user