refactor: modularize render analysis and size checks

This commit is contained in:
2025-12-27 16:49:05 +00:00
parent 60669ead49
commit 94aa22828f
25 changed files with 619 additions and 531 deletions

View File

@@ -5,9 +5,9 @@
## Summary
- **Total files > 150 lines:** 106
- **Pending:** 93
- **Pending:** 91
- **In Progress:** 0
- **Completed:** 1
- **Completed:** 3
- **Skipped:** 12
## By Category
@@ -38,8 +38,8 @@ Library and tool files - easiest to refactor
- [ ] `frontends/nextjs/src/lib/components/component-catalog.ts` (337 lines)
- [ ] `frontends/nextjs/src/lib/schema/default-schema.ts` (308 lines)
- [ ] `frontends/nextjs/src/lib/lua/snippets/lua-snippets-data.ts` (983 lines)
- [ ] `tools/analysis/code/analyze-render-performance.ts` (294 lines)
- [ ] `tools/misc/metrics/enforce-size-limits.ts` (249 lines)
- [x] `tools/analysis/code/analyze-render-performance.ts` (294 lines)
- [x] `tools/misc/metrics/enforce-size-limits.ts` (249 lines)
- [ ] `tools/refactoring/refactor-to-lambda.ts` (243 lines)
- [x] `tools/analysis/test/analyze-implementation-completeness.ts` (230 lines)
- [ ] `tools/detection/detect-stub-implementations.ts` (215 lines)

View File

@@ -1,294 +1,5 @@
#!/usr/bin/env tsx
import { existsSync, readdirSync, readFileSync, statSync } from 'fs'
import { basename, extname, join, relative } from 'path'
import { runRenderPerformanceAnalysis } from './analyze-render-performance'
interface HookCounts {
[key: string]: number
}
interface ComponentMetrics {
file: string
component: string
lines: number
bytes: number
hooks: {
builtIn: number
custom: number
total: number
byHook: HookCounts
}
effects: number
memoization: number
estimatedRenderTimeMs: number
reasons: string[]
risk: 'low' | 'medium' | 'high'
}
const BUILTIN_HOOKS = [
'useState',
'useReducer',
'useEffect',
'useLayoutEffect',
'useInsertionEffect',
'useMemo',
'useCallback',
'useRef',
'useContext',
'useSyncExternalStore',
'useTransition',
'useDeferredValue',
'useId',
'useImperativeHandle',
]
const BUILTIN_HOOK_SET = new Set(BUILTIN_HOOKS)
const SKIP_DIRS = new Set([
'node_modules',
'.next',
'dist',
'build',
'coverage',
'.git',
'__tests__',
'__mocks__',
'__snapshots__',
])
const THRESHOLDS = {
slowRenderMs: 16,
largeComponentLines: 200,
veryLargeComponentLines: 300,
highHookCount: 12,
highEffectCount: 3,
}
const TARGET_EXTENSIONS = new Set(['.tsx'])
function countMatches(content: string, regex: RegExp): number {
return content.match(regex)?.length ?? 0
}
function pickSourceRoot(): string | null {
const candidates = [
process.env.RENDER_ANALYSIS_ROOT,
join(process.cwd(), 'frontends', 'nextjs', 'src'),
join(process.cwd(), 'src'),
].filter(Boolean) as string[]
for (const candidate of candidates) {
if (existsSync(candidate)) {
return candidate
}
}
return null
}
function walkDir(dir: string, files: string[]): void {
let entries: string[]
try {
entries = readdirSync(dir)
} catch {
return
}
for (const entry of entries) {
const fullPath = join(dir, entry)
let stats
try {
stats = statSync(fullPath)
} catch {
continue
}
if (stats.isDirectory()) {
if (SKIP_DIRS.has(entry)) {
continue
}
walkDir(fullPath, files)
continue
}
if (!stats.isFile()) {
continue
}
if (!TARGET_EXTENSIONS.has(extname(entry))) {
continue
}
if (entry.endsWith('.test.tsx') || entry.endsWith('.spec.tsx') || entry.endsWith('.stories.tsx')) {
continue
}
files.push(fullPath)
}
}
function estimateRenderTimeMs(lines: number, hooks: number, effects: number, memoization: number): number {
const base = 1.5
const lineCost = Math.min(lines, 400) * 0.03
const hookCost = hooks * 0.4
const effectCost = effects * 0.8
const memoSavings = Math.min(memoization, 4) * 0.3
const estimate = base + lineCost + hookCost + effectCost - memoSavings
return Math.max(0.5, Math.round(estimate * 10) / 10)
}
function analyzeFile(filePath: string): ComponentMetrics | null {
let content = ''
try {
content = readFileSync(filePath, 'utf8')
} catch {
return null
}
const lines = content.split(/\r?\n/).length
const bytes = Buffer.byteLength(content, 'utf8')
const byHook: HookCounts = {}
let builtInCount = 0
for (const hook of BUILTIN_HOOKS) {
const count = countMatches(content, new RegExp(`\\b${hook}\\b`, 'g'))
byHook[hook] = count
builtInCount += count
}
const allHookCalls = content.match(/\buse[A-Z]\w*\b/g) ?? []
const customHookCount = Math.max(0, allHookCalls.filter(hook => !BUILTIN_HOOK_SET.has(hook)).length)
const hookCount = builtInCount + customHookCount
const effectCount = (byHook.useEffect ?? 0) + (byHook.useLayoutEffect ?? 0) + (byHook.useInsertionEffect ?? 0)
const memoCount = (byHook.useMemo ?? 0) + (byHook.useCallback ?? 0)
const reactMemoCount = countMatches(content, /\bReact\.memo\b/g)
const memoCallCount = countMatches(content, /\bmemo\s*\(/g)
const memoization = memoCount + reactMemoCount + Math.max(0, memoCallCount - reactMemoCount)
const estimatedRenderTimeMs = estimateRenderTimeMs(lines, hookCount, effectCount, memoization)
const reasons: string[] = []
if (lines >= THRESHOLDS.veryLargeComponentLines) {
reasons.push(`Very large component: ${lines} lines`)
} else if (lines >= THRESHOLDS.largeComponentLines) {
reasons.push(`Large component: ${lines} lines`)
}
if (hookCount >= THRESHOLDS.highHookCount) {
reasons.push(`High hook count: ${hookCount}`)
}
if (effectCount >= THRESHOLDS.highEffectCount) {
reasons.push(`Multiple effects: ${effectCount}`)
}
if (estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs) {
reasons.push(`Estimated render time: ${estimatedRenderTimeMs}ms`)
}
let risk: ComponentMetrics['risk'] = 'low'
if (reasons.length >= 3 || estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs) {
risk = 'high'
} else if (reasons.length >= 1) {
risk = 'medium'
}
return {
file: relative(process.cwd(), filePath),
component: basename(filePath, '.tsx'),
lines,
bytes,
hooks: {
builtIn: builtInCount,
custom: customHookCount,
total: hookCount,
byHook,
},
effects: effectCount,
memoization,
estimatedRenderTimeMs,
reasons,
risk,
}
}
function buildRecommendations(slowComponents: ComponentMetrics[]): string[] {
const recommendations: string[] = []
if (slowComponents.length === 0) {
recommendations.push('No high-risk components detected. Re-run after significant UI changes.')
return recommendations
}
if (slowComponents.some(component => component.lines >= THRESHOLDS.veryLargeComponentLines)) {
recommendations.push('Split components over 300 lines into smaller pieces to reduce render work.')
}
if (slowComponents.some(component => component.effects >= THRESHOLDS.highEffectCount)) {
recommendations.push('Reduce the number of effects per component by extracting side effects into hooks.')
}
if (slowComponents.some(component => component.hooks.total >= THRESHOLDS.highHookCount)) {
recommendations.push('Consider splitting stateful logic across smaller components or hooks.')
}
if (slowComponents.some(component => component.memoization === 0 && component.estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs)) {
recommendations.push('Add memoization (React.memo/useMemo/useCallback) where render work is heavy.')
}
if (recommendations.length === 0) {
recommendations.push('Review flagged components for unnecessary renders or expensive computations.')
}
return recommendations
}
const rootDir = pickSourceRoot()
if (!rootDir) {
console.log(JSON.stringify({
analysisType: 'static-heuristic',
averageRenderTime: 0,
slowComponents: [],
recommendations: ['No source directory found to analyze.'],
timestamp: new Date().toISOString(),
}, null, 2))
process.exit(0)
}
const files: string[] = []
walkDir(rootDir, files)
const metrics: ComponentMetrics[] = files
.map(file => analyzeFile(file))
.filter((result): result is ComponentMetrics => result !== null)
const averageRenderTime = metrics.length === 0
? 0
: Math.round((metrics.reduce((sum, metric) => sum + metric.estimatedRenderTimeMs, 0) / metrics.length) * 10) / 10
const slowComponents = metrics
.filter(metric => metric.reasons.length > 0 || metric.estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs)
.sort((a, b) => b.estimatedRenderTimeMs - a.estimatedRenderTimeMs)
const topByLines = [...metrics].sort((a, b) => b.lines - a.lines).slice(0, 10)
const topByHooks = [...metrics].sort((a, b) => b.hooks.total - a.hooks.total).slice(0, 10)
const summary = {
analysisType: 'static-heuristic',
rootDir: relative(process.cwd(), rootDir) || '.',
componentsAnalyzed: metrics.length,
averageRenderTime,
averageRenderTimeMs: averageRenderTime,
slowComponentsTotal: slowComponents.length,
thresholds: THRESHOLDS,
slowComponents: slowComponents.slice(0, 15),
topByLines,
topByHooks,
recommendations: buildRecommendations(slowComponents),
note: 'Estimated render times are derived from file size and hook usage. Use React Profiler for real timings.',
timestamp: new Date().toISOString(),
}
console.log(JSON.stringify(summary, null, 2))
console.log(JSON.stringify(runRenderPerformanceAnalysis(), null, 2))

View File

@@ -0,0 +1,39 @@
export const BUILTIN_HOOKS = [
'useState',
'useReducer',
'useEffect',
'useLayoutEffect',
'useInsertionEffect',
'useMemo',
'useCallback',
'useRef',
'useContext',
'useSyncExternalStore',
'useTransition',
'useDeferredValue',
'useId',
'useImperativeHandle',
]
export const BUILTIN_HOOK_SET = new Set(BUILTIN_HOOKS)
export const SKIP_DIRS = new Set([
'node_modules',
'.next',
'dist',
'build',
'coverage',
'.git',
'__tests__',
'__mocks__',
'__snapshots__',
])
export const THRESHOLDS = {
slowRenderMs: 16,
largeComponentLines: 200,
veryLargeComponentLines: 300,
highHookCount: 12,
highEffectCount: 3,
}
export const TARGET_EXTENSIONS = new Set(['.tsx'])

View File

@@ -0,0 +1,83 @@
import { readFileSync } from 'fs'
import { basename, relative } from 'path'
import { BUILTIN_HOOKS, BUILTIN_HOOK_SET, THRESHOLDS } from '../constants'
import { ComponentMetrics, HookCounts } from '../types'
import { countMatches } from './count-matches'
import { estimateRenderTimeMs } from './estimate-render-time-ms'
export function analyzeFile(filePath: string): ComponentMetrics | null {
let content = ''
try {
content = readFileSync(filePath, 'utf8')
} catch {
return null
}
const lines = content.split(/\r?\n/).length
const bytes = Buffer.byteLength(content, 'utf8')
const byHook: HookCounts = {}
let builtInCount = 0
for (const hook of BUILTIN_HOOKS) {
const count = countMatches(content, new RegExp(`\\b${hook}\\b`, 'g'))
byHook[hook] = count
builtInCount += count
}
const allHookCalls = content.match(/\buse[A-Z]\w*\b/g) ?? []
const customHookCount = Math.max(0, allHookCalls.filter(hook => !BUILTIN_HOOK_SET.has(hook)).length)
const hookCount = builtInCount + customHookCount
const effectCount = (byHook.useEffect ?? 0) + (byHook.useLayoutEffect ?? 0) + (byHook.useInsertionEffect ?? 0)
const memoCount = (byHook.useMemo ?? 0) + (byHook.useCallback ?? 0)
const reactMemoCount = countMatches(content, /\bReact\.memo\b/g)
const memoCallCount = countMatches(content, /\bmemo\s*\(/g)
const memoization = memoCount + reactMemoCount + Math.max(0, memoCallCount - reactMemoCount)
const estimatedRenderTimeMs = estimateRenderTimeMs(lines, hookCount, effectCount, memoization)
const reasons: string[] = []
if (lines >= THRESHOLDS.veryLargeComponentLines) {
reasons.push(`Very large component: ${lines} lines`)
} else if (lines >= THRESHOLDS.largeComponentLines) {
reasons.push(`Large component: ${lines} lines`)
}
if (hookCount >= THRESHOLDS.highHookCount) {
reasons.push(`High hook count: ${hookCount}`)
}
if (effectCount >= THRESHOLDS.highEffectCount) {
reasons.push(`Multiple effects: ${effectCount}`)
}
if (estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs) {
reasons.push(`Estimated render time: ${estimatedRenderTimeMs}ms`)
}
let risk: ComponentMetrics['risk'] = 'low'
if (reasons.length >= 3 || estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs) {
risk = 'high'
} else if (reasons.length >= 1) {
risk = 'medium'
}
return {
file: relative(process.cwd(), filePath),
component: basename(filePath, '.tsx'),
lines,
bytes,
hooks: {
builtIn: builtInCount,
custom: customHookCount,
total: hookCount,
byHook,
},
effects: effectCount,
memoization,
estimatedRenderTimeMs,
reasons,
risk,
}
}

View File

@@ -0,0 +1,33 @@
import { THRESHOLDS } from '../constants'
import { ComponentMetrics } from '../types'
export function buildRecommendations(slowComponents: ComponentMetrics[]): string[] {
const recommendations: string[] = []
if (slowComponents.length === 0) {
recommendations.push('No high-risk components detected. Re-run after significant UI changes.')
return recommendations
}
if (slowComponents.some(component => component.lines >= THRESHOLDS.veryLargeComponentLines)) {
recommendations.push('Split components over 300 lines into smaller pieces to reduce render work.')
}
if (slowComponents.some(component => component.effects >= THRESHOLDS.highEffectCount)) {
recommendations.push('Reduce the number of effects per component by extracting side effects into hooks.')
}
if (slowComponents.some(component => component.hooks.total >= THRESHOLDS.highHookCount)) {
recommendations.push('Consider splitting stateful logic across smaller components or hooks.')
}
if (slowComponents.some(component => component.memoization === 0 && component.estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs)) {
recommendations.push('Add memoization (React.memo/useMemo/useCallback) where render work is heavy.')
}
if (recommendations.length === 0) {
recommendations.push('Review flagged components for unnecessary renders or expensive computations.')
}
return recommendations
}

View File

@@ -0,0 +1,33 @@
import { relative } from 'path'
import { THRESHOLDS } from '../constants'
import { ComponentMetrics, RenderPerformanceSummary } from '../types'
import { buildRecommendations } from './build-recommendations'
export function buildSummary(metrics: ComponentMetrics[], rootDir: string): RenderPerformanceSummary {
const averageRenderTime = metrics.length === 0
? 0
: Math.round((metrics.reduce((sum, metric) => sum + metric.estimatedRenderTimeMs, 0) / metrics.length) * 10) / 10
const slowComponents = metrics
.filter(metric => metric.reasons.length > 0 || metric.estimatedRenderTimeMs >= THRESHOLDS.slowRenderMs)
.sort((a, b) => b.estimatedRenderTimeMs - a.estimatedRenderTimeMs)
const topByLines = [...metrics].sort((a, b) => b.lines - a.lines).slice(0, 10)
const topByHooks = [...metrics].sort((a, b) => b.hooks.total - a.hooks.total).slice(0, 10)
return {
analysisType: 'static-heuristic',
rootDir: relative(process.cwd(), rootDir) || '.',
componentsAnalyzed: metrics.length,
averageRenderTime,
averageRenderTimeMs: averageRenderTime,
slowComponentsTotal: slowComponents.length,
thresholds: THRESHOLDS,
slowComponents: slowComponents.slice(0, 15),
topByLines,
topByHooks,
recommendations: buildRecommendations(slowComponents),
note: 'Estimated render times are derived from file size and hook usage. Use React Profiler for real timings.',
timestamp: new Date().toISOString(),
}
}

View File

@@ -0,0 +1,3 @@
export function countMatches(content: string, regex: RegExp): number {
return content.match(regex)?.length ?? 0
}

View File

@@ -0,0 +1,9 @@
export function estimateRenderTimeMs(lines: number, hooks: number, effects: number, memoization: number): number {
const base = 1.5
const lineCost = Math.min(lines, 400) * 0.03
const hookCost = hooks * 0.4
const effectCost = effects * 0.8
const memoSavings = Math.min(memoization, 4) * 0.3
const estimate = base + lineCost + hookCost + effectCost - memoSavings
return Math.max(0.5, Math.round(estimate * 10) / 10)
}

View File

@@ -0,0 +1,18 @@
import { existsSync } from 'fs'
import { join } from 'path'
export function pickSourceRoot(): string | null {
const candidates = [
process.env.RENDER_ANALYSIS_ROOT,
join(process.cwd(), 'frontends', 'nextjs', 'src'),
join(process.cwd(), 'src'),
].filter(Boolean) as string[]
for (const candidate of candidates) {
if (existsSync(candidate)) {
return candidate
}
}
return null
}

View File

@@ -0,0 +1,44 @@
import { readdirSync, statSync } from 'fs'
import { extname, join } from 'path'
import { SKIP_DIRS, TARGET_EXTENSIONS } from '../constants'
export function walkDir(dir: string, files: string[]): void {
let entries: string[]
try {
entries = readdirSync(dir)
} catch {
return
}
for (const entry of entries) {
const fullPath = join(dir, entry)
let stats
try {
stats = statSync(fullPath)
} catch {
continue
}
if (stats.isDirectory()) {
if (SKIP_DIRS.has(entry)) {
continue
}
walkDir(fullPath, files)
continue
}
if (!stats.isFile()) {
continue
}
if (!TARGET_EXTENSIONS.has(extname(entry))) {
continue
}
if (entry.endsWith('.test.tsx') || entry.endsWith('.spec.tsx') || entry.endsWith('.stories.tsx')) {
continue
}
files.push(fullPath)
}
}

View File

@@ -0,0 +1,36 @@
import { THRESHOLDS } from './constants'
import { analyzeFile } from './functions/analyze-file'
import { buildSummary } from './functions/build-summary'
import { pickSourceRoot } from './functions/pick-source-root'
import { walkDir } from './functions/walk-dir'
import { RenderPerformanceSummary } from './types'
export function runRenderPerformanceAnalysis(): RenderPerformanceSummary {
const rootDir = pickSourceRoot()
if (!rootDir) {
return {
analysisType: 'static-heuristic',
componentsAnalyzed: 0,
averageRenderTime: 0,
averageRenderTimeMs: 0,
slowComponentsTotal: 0,
thresholds: THRESHOLDS,
slowComponents: [],
topByLines: [],
topByHooks: [],
recommendations: ['No source directory found to analyze.'],
note: 'Estimated render times are derived from file size and hook usage. Use React Profiler for real timings.',
timestamp: new Date().toISOString(),
}
}
const files: string[] = []
walkDir(rootDir, files)
const metrics = files
.map(file => analyzeFile(file))
.filter((result): result is NonNullable<ReturnType<typeof analyzeFile>> => result !== null)
return buildSummary(metrics, rootDir)
}

View File

@@ -0,0 +1,43 @@
export interface HookCounts {
[key: string]: number
}
export interface ComponentMetrics {
file: string
component: string
lines: number
bytes: number
hooks: {
builtIn: number
custom: number
total: number
byHook: HookCounts
}
effects: number
memoization: number
estimatedRenderTimeMs: number
reasons: string[]
risk: 'low' | 'medium' | 'high'
}
export interface RenderPerformanceSummary {
analysisType: string
rootDir?: string
componentsAnalyzed: number
averageRenderTime: number
averageRenderTimeMs: number
slowComponentsTotal: number
thresholds: {
slowRenderMs: number
largeComponentLines: number
veryLargeComponentLines: number
highHookCount: number
highEffectCount: number
}
slowComponents: ComponentMetrics[]
topByLines: ComponentMetrics[]
topByHooks: ComponentMetrics[]
recommendations: string[]
note: string
timestamp: string
}

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env node
/**
* Code Size Limit Enforcer
*
*
* Enforces multiple metrics to keep files maintainable:
* - TypeScript/React: Max 150 lines of actual code (LOC)
* - Any file: Max 300 lines total (including comments/whitespace)
@@ -10,240 +10,12 @@
* - Max 5 parameters per function
*/
import * as fs from 'fs';
import * as path from 'path';
import { fileURLToPath } from 'url';
import { runSizeLimitEnforcement } from './enforce-size-limits'
interface FileSizeLimits {
maxLoc: number;
maxTotalLines: number;
maxNestingDepth: number;
maxExports: number;
maxFunctionParams: number;
console.log('🔍 Scanning for size limit violations...\n')
const { exitCode } = runSizeLimitEnforcement()
if (exitCode !== 0) {
process.exit(exitCode)
}
const DEFAULT_LIMITS: Record<string, FileSizeLimits> = {
tsx: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 5, maxFunctionParams: 5 },
ts: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 10, maxFunctionParams: 5 },
jsx: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 5, maxFunctionParams: 5 },
js: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 10, maxFunctionParams: 5 },
};
interface Violation {
file: string;
metric: string;
current: number;
limit: number;
severity: 'error' | 'warning';
}
const violations: Violation[] = [];
function countLinesOfCode(content: string): number {
return content
.split('\n')
.filter(line => {
const trimmed = line.trim();
return trimmed.length > 0 && !trimmed.startsWith('//');
})
.length;
}
function countExports(content: string): number {
const exportMatches = content.match(/^\s*(export\s+(default\s+)?(function|const|class|interface|type|enum))/gm);
return exportMatches ? exportMatches.length : 0;
}
function maxNestingDepth(content: string): number {
let maxDepth = 0;
let currentDepth = 0;
for (const char of content) {
if (char === '{' || char === '[' || char === '(') {
currentDepth++;
maxDepth = Math.max(maxDepth, currentDepth);
} else if (char === '}' || char === ']' || char === ')') {
currentDepth--;
}
}
return maxDepth;
}
function maxFunctionParams(content: string): number {
const funcMatches = content.match(/(?:function|const\s+\w+\s*=|\s*\()\s*\(([^)]*)\)/g);
if (!funcMatches) return 0;
let maxParams = 0;
for (const match of funcMatches) {
const params = match
.substring(match.indexOf('(') + 1, match.lastIndexOf(')'))
.split(',')
.filter(p => p.trim().length > 0).length;
maxParams = Math.max(maxParams, params);
}
return maxParams;
}
function analyzeFile(filePath: string): void {
try {
const content = fs.readFileSync(filePath, 'utf-8');
const ext = path.extname(filePath).substring(1);
if (!DEFAULT_LIMITS[ext]) return;
const limits = DEFAULT_LIMITS[ext];
const totalLines = content.split('\n').length;
const loc = countLinesOfCode(content);
const exports = countExports(content);
const nesting = maxNestingDepth(content);
const params = maxFunctionParams(content);
if (loc > limits.maxLoc) {
violations.push({
file: filePath,
metric: `Lines of Code (LOC)`,
current: loc,
limit: limits.maxLoc,
severity: 'error',
});
}
if (totalLines > limits.maxTotalLines) {
violations.push({
file: filePath,
metric: `Total Lines`,
current: totalLines,
limit: limits.maxTotalLines,
severity: 'warning',
});
}
if (exports > limits.maxExports) {
violations.push({
file: filePath,
metric: `Number of Exports`,
current: exports,
limit: limits.maxExports,
severity: 'warning',
});
}
if (nesting > limits.maxNestingDepth) {
violations.push({
file: filePath,
metric: `Max Nesting Depth`,
current: nesting,
limit: limits.maxNestingDepth,
severity: 'warning',
});
}
if (params > limits.maxFunctionParams) {
violations.push({
file: filePath,
metric: `Max Function Parameters`,
current: params,
limit: limits.maxFunctionParams,
severity: 'warning',
});
}
} catch (error) {
// Silently skip files that can't be read
}
}
function scanDirectory(dir: string, exclude: string[] = []): void {
const files = fs.readdirSync(dir);
for (const file of files) {
const fullPath = path.join(dir, file);
const stat = fs.statSync(fullPath);
// Skip excluded directories
if (stat.isDirectory()) {
if (exclude.some(ex => fullPath.includes(ex))) {
continue;
}
scanDirectory(fullPath, exclude);
} else if (/\.(ts|tsx|js|jsx)$/.test(file)) {
analyzeFile(fullPath);
}
}
}
function generateReport(): void {
if (violations.length === 0) {
console.log('✅ All files comply with size limits!');
return;
}
const errors = violations.filter(v => v.severity === 'error');
const warnings = violations.filter(v => v.severity === 'warning');
console.log('\n📊 Code Size Limit Violations Report\n');
console.log('━'.repeat(100));
if (errors.length > 0) {
console.log(`\n❌ ERRORS (${errors.length}):\n`);
for (const v of errors) {
console.log(` 📄 ${v.file}`);
console.log(` ${v.metric}: ${v.current} / ${v.limit}`);
console.log('');
}
}
if (warnings.length > 0) {
console.log(`\n⚠ WARNINGS (${warnings.length}):\n`);
for (const v of warnings) {
console.log(` 📄 ${v.file}`);
console.log(` ${v.metric}: ${v.current} / ${v.limit}`);
console.log('');
}
}
console.log('━'.repeat(100));
console.log(
`\n📈 Summary: ${errors.length} errors, ${warnings.length} warnings\n`
);
// Export to JSON for CI/CD
const report = {
timestamp: new Date().toISOString(),
errors: errors.length,
warnings: warnings.length,
violations: violations.map(v => ({
file: v.file,
metric: v.metric,
current: v.current,
limit: v.limit,
severity: v.severity,
})),
};
fs.writeFileSync(
path.join(process.cwd(), 'size-limits-report.json'),
JSON.stringify(report, null, 2)
);
if (errors.length > 0) {
process.exit(1);
}
}
// Main execution
const rootDir = process.cwd();
const excludeDirs = [
'node_modules',
'build',
'.next',
'dist',
'.git',
'coverage',
'.venv',
];
console.log('🔍 Scanning for size limit violations...\n');
scanDirectory(rootDir, excludeDirs);
generateReport();

View File

@@ -0,0 +1,15 @@
import { EnforcementConfig } from './types'
const DEFAULT_LIMITS = {
tsx: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 5, maxFunctionParams: 5 },
ts: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 10, maxFunctionParams: 5 },
jsx: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 5, maxFunctionParams: 5 },
js: { maxLoc: 150, maxTotalLines: 200, maxNestingDepth: 3, maxExports: 10, maxFunctionParams: 5 },
}
export const DEFAULT_CONFIG: EnforcementConfig = {
rootDir: process.cwd(),
excludeDirs: ['node_modules', 'build', '.next', 'dist', '.git', 'coverage', '.venv'],
reportFileName: 'size-limits-report.json',
limits: DEFAULT_LIMITS,
}

View File

@@ -0,0 +1,79 @@
import fs from 'fs'
import path from 'path'
import { FileSizeLimits, Violation } from '../types'
import { countExports } from './count-exports'
import { countLinesOfCode } from './count-lines-of-code'
import { maxFunctionParams } from './max-function-params'
import { maxNestingDepth } from './max-nesting-depth'
export function analyzeFile(filePath: string, limits: Record<string, FileSizeLimits>): Violation[] {
const violations: Violation[] = []
try {
const content = fs.readFileSync(filePath, 'utf-8')
const ext = path.extname(filePath).substring(1)
if (!limits[ext]) return violations
const fileLimits = limits[ext]
const totalLines = content.split('\n').length
const loc = countLinesOfCode(content)
const exports = countExports(content)
const nesting = maxNestingDepth(content)
const params = maxFunctionParams(content)
if (loc > fileLimits.maxLoc) {
violations.push({
file: filePath,
metric: 'Lines of Code (LOC)',
current: loc,
limit: fileLimits.maxLoc,
severity: 'error',
})
}
if (totalLines > fileLimits.maxTotalLines) {
violations.push({
file: filePath,
metric: 'Total Lines',
current: totalLines,
limit: fileLimits.maxTotalLines,
severity: 'warning',
})
}
if (exports > fileLimits.maxExports) {
violations.push({
file: filePath,
metric: 'Number of Exports',
current: exports,
limit: fileLimits.maxExports,
severity: 'warning',
})
}
if (nesting > fileLimits.maxNestingDepth) {
violations.push({
file: filePath,
metric: 'Max Nesting Depth',
current: nesting,
limit: fileLimits.maxNestingDepth,
severity: 'warning',
})
}
if (params > fileLimits.maxFunctionParams) {
violations.push({
file: filePath,
metric: 'Max Function Parameters',
current: params,
limit: fileLimits.maxFunctionParams,
severity: 'warning',
})
}
} catch {
// Silently skip files that can't be read
}
return violations
}

View File

@@ -0,0 +1,13 @@
import { ReportData, Violation } from '../types'
export function buildReportData(violations: Violation[]): ReportData {
const errors = violations.filter(v => v.severity === 'error').length
const warnings = violations.filter(v => v.severity === 'warning').length
return {
timestamp: new Date().toISOString(),
errors,
warnings,
violations,
}
}

View File

@@ -0,0 +1,4 @@
export function countExports(content: string): number {
const exportMatches = content.match(/^\s*(export\s+(default\s+)?(function|const|class|interface|type|enum))/gm)
return exportMatches ? exportMatches.length : 0
}

View File

@@ -0,0 +1,9 @@
export function countLinesOfCode(content: string): number {
return content
.split('\n')
.filter(line => {
const trimmed = line.trim()
return trimmed.length > 0 && !trimmed.startsWith('//')
})
.length
}

View File

@@ -0,0 +1,15 @@
export function maxFunctionParams(content: string): number {
const funcMatches = content.match(/(?:function|const\s+\w+\s*=|\s*\()\s*\(([^)]*)\)/g)
if (!funcMatches) return 0
let maxParams = 0
for (const match of funcMatches) {
const params = match
.substring(match.indexOf('(') + 1, match.lastIndexOf(')'))
.split(',')
.filter(p => p.trim().length > 0).length
maxParams = Math.max(maxParams, params)
}
return maxParams
}

View File

@@ -0,0 +1,15 @@
export function maxNestingDepth(content: string): number {
let maxDepth = 0
let currentDepth = 0
for (const char of content) {
if (char === '{' || char === '[' || char === '(') {
currentDepth++
maxDepth = Math.max(maxDepth, currentDepth)
} else if (char === '}' || char === ']' || char === ')') {
currentDepth--
}
}
return maxDepth
}

View File

@@ -0,0 +1,35 @@
import { ReportData } from '../types'
export function printReport(report: ReportData): void {
const errors = report.violations.filter(v => v.severity === 'error')
const warnings = report.violations.filter(v => v.severity === 'warning')
if (report.violations.length === 0) {
console.log('✅ All files comply with size limits!')
return
}
console.log('\n📊 Code Size Limit Violations Report\n')
console.log('━'.repeat(100))
if (errors.length > 0) {
console.log(`\n❌ ERRORS (${errors.length}):\n`)
for (const violation of errors) {
console.log(` 📄 ${violation.file}`)
console.log(` ${violation.metric}: ${violation.current} / ${violation.limit}`)
console.log('')
}
}
if (warnings.length > 0) {
console.log(`\n⚠ WARNINGS (${warnings.length}):\n`)
for (const violation of warnings) {
console.log(` 📄 ${violation.file}`)
console.log(` ${violation.metric}: ${violation.current} / ${violation.limit}`)
console.log('')
}
}
console.log('━'.repeat(100))
console.log(`\n📈 Summary: ${errors.length} errors, ${warnings.length} warnings\n`)
}

View File

@@ -0,0 +1,25 @@
import fs from 'fs'
import path from 'path'
import { FileSizeLimits, Violation } from '../types'
import { analyzeFile } from './analyze-file'
export function scanDirectory(dir: string, limits: Record<string, FileSizeLimits>, exclude: string[] = []): Violation[] {
const violations: Violation[] = []
const files = fs.readdirSync(dir)
for (const file of files) {
const fullPath = path.join(dir, file)
const stat = fs.statSync(fullPath)
if (stat.isDirectory()) {
if (exclude.some(ex => fullPath.includes(ex))) {
continue
}
violations.push(...scanDirectory(fullPath, limits, exclude))
} else if (/\.(ts|tsx|js|jsx)$/.test(file)) {
violations.push(...analyzeFile(fullPath, limits))
}
}
return violations
}

View File

@@ -0,0 +1,8 @@
import fs from 'fs'
import path from 'path'
import { ReportData } from '../types'
export function writeReportFile(report: ReportData, rootDir: string, fileName: string): void {
const destination = path.join(rootDir, fileName)
fs.writeFileSync(destination, JSON.stringify(report, null, 2))
}

View File

@@ -0,0 +1,17 @@
import { DEFAULT_CONFIG } from './constants'
import { buildReportData } from './functions/build-report-data'
import { printReport } from './functions/print-report'
import { scanDirectory } from './functions/scan-directory'
import { writeReportFile } from './functions/write-report-file'
import { EnforcementConfig, ReportData } from './types'
export function runSizeLimitEnforcement(config: EnforcementConfig = DEFAULT_CONFIG): { report: ReportData; exitCode: number } {
const violations = scanDirectory(config.rootDir, config.limits, config.excludeDirs)
const report = buildReportData(violations)
printReport(report)
writeReportFile(report, config.rootDir, config.reportFileName)
const exitCode = report.errors > 0 ? 1 : 0
return { report, exitCode }
}

View File

@@ -0,0 +1,29 @@
export interface FileSizeLimits {
maxLoc: number
maxTotalLines: number
maxNestingDepth: number
maxExports: number
maxFunctionParams: number
}
export interface Violation {
file: string
metric: string
current: number
limit: number
severity: 'error' | 'warning'
}
export interface EnforcementConfig {
rootDir: string
excludeDirs: string[]
reportFileName: string
limits: Record<string, FileSizeLimits>
}
export interface ReportData {
timestamp: string
errors: number
warnings: number
violations: Violation[]
}