Merge pull request #156 from johndoe6345789/codex/refactor-tool-scripts-into-single-purpose-lambdas

Refactor lambda refactoring tools into modular helpers
This commit is contained in:
2025-12-27 17:33:46 +00:00
committed by GitHub
23 changed files with 820 additions and 1120 deletions

View File

@@ -1,33 +1,16 @@
#!/usr/bin/env tsx
/**
* AST-based Lambda Refactoring Tool
*
* Uses TypeScript compiler API for accurate code analysis and transformation
*/
import * as ts from 'typescript'
import * as fs from 'fs/promises'
import * as path from 'path'
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
interface ExtractedFunction {
name: string
fullText: string
isExported: boolean
isAsync: boolean
leadingComments: string
startPos: number
endPos: number
}
interface ExtractedImport {
fullText: string
moduleSpecifier: string
namedImports: string[]
}
import { analyzeAstFile, astNameHelpers } from './ast/analyze-ast-file'
import { AstExtractedFunction, ExtractedImport } from './lambda/types'
import { buildAstFunctionContent } from './io/build-ast-function-content'
import { buildClassWrapper } from './io/build-class-wrapper'
import { buildIndexContent } from './io/build-index-content'
import { writeFileSafely } from './io/write-file'
import { runLintFix } from './workflow/run-lint'
class ASTLambdaRefactor {
private dryRun: boolean
@@ -44,176 +27,57 @@ class ASTLambdaRefactor {
}
}
/**
* Parse TypeScript file and extract functions using AST
*/
async analyzeFil(filePath: string): Promise<{
functions: ExtractedFunction[]
imports: ExtractedImport[]
types: string[]
}> {
const sourceCode = await fs.readFile(filePath, 'utf-8')
const sourceFile = ts.createSourceFile(
filePath,
sourceCode,
ts.ScriptTarget.Latest,
true
)
const functions: ExtractedFunction[] = []
const imports: ExtractedImport[] = []
const types: string[] = []
const visit = (node: ts.Node) => {
// Extract function declarations
if (ts.isFunctionDeclaration(node) && node.name) {
const isExported = node.modifiers?.some(m => m.kind === ts.SyntaxKind.ExportKeyword) || false
const isAsync = node.modifiers?.some(m => m.kind === ts.SyntaxKind.AsyncKeyword) || false
// Get leading comments
const leadingComments = ts.getLeadingCommentRanges(sourceCode, node.getFullStart())
let commentText = ''
if (leadingComments) {
for (const comment of leadingComments) {
commentText += sourceCode.substring(comment.pos, comment.end) + '\n'
}
}
functions.push({
name: node.name.text,
fullText: node.getText(sourceFile),
isExported,
isAsync,
leadingComments: commentText.trim(),
startPos: node.getStart(sourceFile),
endPos: node.getEnd(),
})
}
// Extract class methods
if (ts.isClassDeclaration(node) && node.members) {
for (const member of node.members) {
if (ts.isMethodDeclaration(member) && member.name && ts.isIdentifier(member.name)) {
const isAsync = member.modifiers?.some(m => m.kind === ts.SyntaxKind.AsyncKeyword) || false
// Get leading comments
const leadingComments = ts.getLeadingCommentRanges(sourceCode, member.getFullStart())
let commentText = ''
if (leadingComments) {
for (const comment of leadingComments) {
commentText += sourceCode.substring(comment.pos, comment.end) + '\n'
}
}
// Convert method to function
const methodText = member.getText(sourceFile)
const functionText = this.convertMethodToFunction(methodText, member.name.text, isAsync)
functions.push({
name: member.name.text,
fullText: functionText,
isExported: true,
isAsync,
leadingComments: commentText.trim(),
startPos: member.getStart(sourceFile),
endPos: member.getEnd(),
})
}
}
}
// Extract imports
if (ts.isImportDeclaration(node)) {
const moduleSpec = (node.moduleSpecifier as ts.StringLiteral).text
const namedImports: string[] = []
if (node.importClause?.namedBindings && ts.isNamedImports(node.importClause.namedBindings)) {
for (const element of node.importClause.namedBindings.elements) {
namedImports.push(element.name.text)
}
}
imports.push({
fullText: node.getText(sourceFile),
moduleSpecifier: moduleSpec,
namedImports,
})
}
// Extract type definitions
if (ts.isTypeAliasDeclaration(node) || ts.isInterfaceDeclaration(node)) {
types.push(node.getText(sourceFile))
}
ts.forEachChild(node, visit)
}
visit(sourceFile)
return { functions, imports, types }
private toKebabCase(name: string) {
return astNameHelpers.toKebabCase(name)
}
/**
* Convert a class method to a standalone function
*/
private convertMethodToFunction(methodText: string, methodName: string, isAsync: boolean): string {
// Remove visibility modifiers (public, private, protected)
let funcText = methodText.replace(/^\s*(public|private|protected)\s+/, '')
// Ensure it starts with async if needed
if (isAsync && !funcText.trim().startsWith('async')) {
funcText = 'async ' + funcText
}
// Convert method syntax to function syntax
// "methodName(...): Type {" -> "function methodName(...): Type {"
funcText = funcText.replace(/^(\s*)(async\s+)?([a-zA-Z0-9_]+)(\s*\([^)]*\))/, '$1$2function $3$4')
return funcText
private toClassName(name: string) {
return astNameHelpers.toClassName(name)
}
/**
* Create individual function file with proper imports
*/
async createFunctionFile(
func: ExtractedFunction,
allImports: ExtractedImport[],
private async createFunctionFile(
func: AstExtractedFunction,
imports: ExtractedImport[],
outputPath: string
): Promise<void> {
let content = ''
// Add imports (for now, include all - can be optimized to only include used imports)
if (allImports.length > 0) {
content += allImports.map(imp => imp.fullText).join('\n') + '\n\n'
}
// Add comments
if (func.leadingComments) {
content += func.leadingComments + '\n'
}
// Add function (ensure it's exported)
let funcText = func.fullText
if (!func.isExported && !funcText.includes('export ')) {
funcText = 'export ' + funcText
} else if (!funcText.includes('export ')) {
funcText = 'export ' + funcText
}
content += funcText + '\n'
if (!this.dryRun) {
await fs.writeFile(outputPath, content, 'utf-8')
}
) {
const content = buildAstFunctionContent(func, imports)
await writeFileSafely(outputPath, content, this.dryRun)
}
private async createIndexFile(functions: AstExtractedFunction[], functionsDir: string, outputPath: string) {
const indexContent = buildIndexContent(functions, functionsDir)
await writeFileSafely(outputPath, indexContent, this.dryRun)
}
private async createClassWrapper(className: string, functions: AstExtractedFunction[], outputPath: string) {
const classContent = buildClassWrapper(className, functions, 'functions')
await writeFileSafely(outputPath, classContent, this.dryRun)
}
private async replaceOriginal(filePath: string, basename: string, className: string, sampleFunction: string) {
const newMainContent = `/**
* This file has been refactored into modular lambda-per-file structure.
*
* Import individual functions or use the class wrapper:
* @example
* import { ${sampleFunction} } from './${basename}'
*
* @example
* import { ${className} } from './${basename}'
* ${className}.${sampleFunction}(...)
*/
export * from './${basename}'
`
await writeFileSafely(filePath, newMainContent, this.dryRun)
this.log(` ✓ Updated ${path.basename(filePath)}`)
}
/**
* Refactor a file using AST analysis
*/
async refactorFile(filePath: string): Promise<void> {
this.log(`\n🔍 Analyzing ${filePath}...`)
const { functions, imports, types } = await this.analyzeFile(filePath)
const { functions, imports } = await analyzeAstFile(filePath)
if (functions.length === 0) {
this.log(' ⏭️ No functions found - skipping')
@@ -227,127 +91,34 @@ class ASTLambdaRefactor {
this.log(` Found ${functions.length} functions: ${functions.map(f => f.name).join(', ')}`)
// Create output directory structure
const dir = path.dirname(filePath)
const basename = path.basename(filePath, path.extname(filePath))
const functionsDir = path.join(dir, basename, 'functions')
if (!this.dryRun) {
await fs.mkdir(functionsDir, { recursive: true })
}
this.log(` Creating: ${functionsDir}`)
// Create individual function files
for (const func of functions) {
const kebabName = this.toKebabCase(func.name)
const funcFile = path.join(functionsDir, `${kebabName}.ts`)
await this.createFunctionFile(func, imports, funcFile)
this.log(`${kebabName}.ts`)
}
// Create index file for re-exports
const indexContent = this.generateIndexFile(functions, 'functions')
const indexPath = path.join(dir, basename, 'index.ts')
if (!this.dryRun) {
await fs.writeFile(indexPath, indexContent, 'utf-8')
}
await this.createIndexFile(functions, 'functions', indexPath)
this.log(` ✓ index.ts`)
// Create class wrapper
const className = this.toClassName(basename)
const classContent = this.generateClassWrapper(className, functions)
const classPath = path.join(dir, basename, `${className}.ts`)
if (!this.dryRun) {
await fs.writeFile(classPath, classContent, 'utf-8')
}
await this.createClassWrapper(className, functions, classPath)
this.log(`${className}.ts`)
// Replace original file with re-export
const newMainContent = `/**
* This file has been refactored into modular lambda-per-file structure.
*
* Import individual functions or use the class wrapper:
* @example
* import { ${functions[0].name} } from './${basename}'
*
* @example
* import { ${className} } from './${basename}'
* ${className}.${functions[0].name}(...)
*/
export * from './${basename}'
`
if (!this.dryRun) {
await fs.writeFile(filePath, newMainContent, 'utf-8')
}
this.log(` ✓ Updated ${path.basename(filePath)}`)
await this.replaceOriginal(filePath, basename, className, functions[0].name)
this.log(` ✅ Refactored into ${functions.length + 2} files`)
}
private toKebabCase(str: string): string {
return str.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
}
private toClassName(str: string): string {
return str
.split(/[-_]/)
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
.join('') + 'Utils'
}
private generateIndexFile(functions: ExtractedFunction[], functionsDir: string): string {
let content = '// Auto-generated re-exports\n\n'
for (const func of functions) {
const kebabName = this.toKebabCase(func.name)
content += `export { ${func.name} } from './${functionsDir}/${kebabName}'\n`
}
return content
}
private generateClassWrapper(className: string, functions: ExtractedFunction[]): string {
let content = `// Auto-generated class wrapper\n\n`
// Import all functions
for (const func of functions) {
const kebabName = this.toKebabCase(func.name)
content += `import { ${func.name} } from './functions/${kebabName}'\n`
}
content += `\n/**\n * ${className} - Convenience class wrapper\n */\n`
content += `export class ${className} {\n`
for (const func of functions) {
const asyncKeyword = func.isAsync ? 'async ' : ''
content += ` static ${asyncKeyword}${func.name}(...args: any[]) {\n`
content += ` return ${func.isAsync ? 'await ' : ''}${func.name}(...args)\n`
content += ` }\n\n`
}
content += '}\n'
return content
}
// Fix the typo in the method name
async analyzeFile(filePath: string): Promise<{
functions: ExtractedFunction[]
imports: ExtractedImport[]
types: string[]
}> {
return this.analyzeFil(filePath)
}
/**
* Process multiple files
*/
async bulkRefactor(files: string[]): Promise<void> {
console.log(`\n📦 AST-based Lambda Refactoring`)
console.log(` Mode: ${this.dryRun ? 'DRY RUN' : 'LIVE'}`)
@@ -381,10 +152,9 @@ export * from './${basename}'
}
}
// CLI
async function main() {
const args = process.argv.slice(2)
if (args.includes('--help') || args.includes('-h') || args.length === 0) {
console.log('AST-based Lambda Refactoring Tool\n')
console.log('Usage: tsx ast-lambda-refactor.ts [options] <file>')
@@ -409,12 +179,7 @@ async function main() {
if (!dryRun) {
console.log('\n🔧 Running linter...')
try {
await execAsync('npm run lint:fix')
console.log(' ✅ Lint complete')
} catch (e) {
console.log(' ⚠️ Lint had warnings (may be expected)')
}
await runLintFix(process.cwd(), message => console.log(message))
}
console.log('\n✨ Done!')

View File

@@ -0,0 +1,111 @@
import * as ts from 'typescript'
import * as fs from 'fs/promises'
import { AstExtractedFunction, ExtractedImport } from '../lambda/types'
import { convertMethodToFunction } from './convert-method-to-function'
function toKebabCase(str: string): string {
return str.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
}
function toClassName(str: string): string {
return str
.split(/[-_]/)
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
.join('') + 'Utils'
}
export async function analyzeAstFile(filePath: string): Promise<{
functions: AstExtractedFunction[]
imports: ExtractedImport[]
types: string[]
}> {
const sourceCode = await fs.readFile(filePath, 'utf-8')
const sourceFile = ts.createSourceFile(filePath, sourceCode, ts.ScriptTarget.Latest, true)
const functions: AstExtractedFunction[] = []
const imports: ExtractedImport[] = []
const types: string[] = []
const visit = (node: ts.Node) => {
if (ts.isFunctionDeclaration(node) && node.name) {
const isExported = node.modifiers?.some(m => m.kind === ts.SyntaxKind.ExportKeyword) || false
const isAsync = node.modifiers?.some(m => m.kind === ts.SyntaxKind.AsyncKeyword) || false
const leadingComments = ts.getLeadingCommentRanges(sourceCode, node.getFullStart())
let commentText = ''
if (leadingComments) {
for (const comment of leadingComments) {
commentText += sourceCode.substring(comment.pos, comment.end) + '\n'
}
}
functions.push({
name: node.name.text,
fullText: node.getText(sourceFile),
isExported,
isAsync,
leadingComments: commentText.trim(),
startPos: node.getStart(sourceFile),
endPos: node.getEnd(),
})
}
if (ts.isClassDeclaration(node) && node.members) {
for (const member of node.members) {
if (ts.isMethodDeclaration(member) && member.name && ts.isIdentifier(member.name)) {
const isAsync = member.modifiers?.some(m => m.kind === ts.SyntaxKind.AsyncKeyword) || false
const leadingComments = ts.getLeadingCommentRanges(sourceCode, member.getFullStart())
let commentText = ''
if (leadingComments) {
for (const comment of leadingComments) {
commentText += sourceCode.substring(comment.pos, comment.end) + '\n'
}
}
const methodText = member.getText(sourceFile)
const functionText = convertMethodToFunction(methodText, isAsync)
functions.push({
name: member.name.text,
fullText: functionText,
isExported: true,
isAsync,
leadingComments: commentText.trim(),
startPos: member.getStart(sourceFile),
endPos: member.getEnd(),
})
}
}
}
if (ts.isImportDeclaration(node)) {
const moduleSpec = (node.moduleSpecifier as ts.StringLiteral).text
const namedImports: string[] = []
if (node.importClause?.namedBindings && ts.isNamedImports(node.importClause.namedBindings)) {
for (const element of node.importClause.namedBindings.elements) {
namedImports.push(element.name.text)
}
}
imports.push({
fullText: node.getText(sourceFile),
moduleSpecifier: moduleSpec,
namedImports,
})
}
if (ts.isTypeAliasDeclaration(node) || ts.isInterfaceDeclaration(node)) {
types.push(node.getText(sourceFile))
}
ts.forEachChild(node, visit)
}
visit(sourceFile)
return { functions, imports, types }
}
export const astNameHelpers = { toKebabCase, toClassName }

View File

@@ -0,0 +1,11 @@
export function convertMethodToFunction(methodText: string, isAsync: boolean): string {
let funcText = methodText.replace(/^\s*(public|private|protected)\s+/, '')
if (isAsync && !funcText.trim().startsWith('async')) {
funcText = 'async ' + funcText
}
funcText = funcText.replace(/^(\s*)(async\s+)?([a-zA-Z0-9_]+)(\s*\([^)]*\))/, '$1$2function $3$4')
return funcText
}

View File

@@ -1,35 +1,17 @@
#!/usr/bin/env tsx
/**
* Bulk Lambda-per-File Refactoring Tool
*
* Automatically refactors TypeScript files into lambda-per-file structure:
* 1. Analyzes file to extract functions/methods
* 2. Creates functions/ subdirectory
* 3. Extracts each function to its own file
* 4. Creates class wrapper
* 5. Updates imports
* 6. Runs linter to fix issues
*/
import * as fs from 'fs/promises'
import * as path from 'path'
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
interface FunctionInfo {
name: string
isAsync: boolean
isExported: boolean
params: string
returnType: string
body: string
startLine: number
endLine: number
comments: string[]
isMethod: boolean
}
import { extractFunctions } from './parsing/extract-functions'
import { extractImportsAndTypes } from './parsing/extract-imports-and-types'
import { buildFunctionContent } from './io/build-function-content'
import { buildClassWrapper } from './io/build-class-wrapper'
import { buildIndexContent } from './io/build-index-content'
import { writeFileSafely } from './io/write-file'
import { runLintFix } from './workflow/run-lint'
import { FunctionInfo } from './lambda/types'
interface RefactorResult {
success: boolean
@@ -53,213 +35,79 @@ class BulkLambdaRefactor {
}
}
/**
* Extract functions from a TypeScript file
*/
async extractFunctions(filePath: string): Promise<FunctionInfo[]> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const functions: FunctionInfo[] = []
// Simple regex-based extraction (can be improved with AST parsing)
const functionRegex = /^(export\s+)?(async\s+)?function\s+([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
const methodRegex = /^\s*(public|private|protected)?\s*(async\s+)?([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
let i = 0
while (i < lines.length) {
const line = lines[i]
// Try to match function
const funcMatch = line.match(functionRegex)
const methodMatch = line.match(methodRegex)
if (funcMatch || methodMatch) {
const isMethod = !!methodMatch
const match = funcMatch || methodMatch!
const isExported = !!match[1]
const isAsync = !!(funcMatch ? match[2] : methodMatch![2])
const name = funcMatch ? match[3] : methodMatch![3]
const params = funcMatch ? match[4] : methodMatch![4]
const returnType = (funcMatch ? match[5] : methodMatch![5]) || ''
// Collect comments above function
const comments: string[] = []
let commentLine = i - 1
while (commentLine >= 0 && (lines[commentLine].trim().startsWith('//') ||
lines[commentLine].trim().startsWith('*') ||
lines[commentLine].trim().startsWith('/*'))) {
comments.unshift(lines[commentLine])
commentLine--
}
// Find matching closing brace
let braceCount = 1
let bodyStart = i + 1
let j = i
let bodyLines: string[] = [line]
// Count braces to find function end
j++
while (j < lines.length && braceCount > 0) {
bodyLines.push(lines[j])
for (const char of lines[j]) {
if (char === '{') braceCount++
if (char === '}') braceCount--
if (braceCount === 0) break
}
j++
}
functions.push({
name,
isAsync,
isExported,
params,
returnType: returnType.trim(),
body: bodyLines.join('\n'),
startLine: i,
endLine: j - 1,
comments,
isMethod,
})
i = j
} else {
i++
}
}
return functions
private toKebabCase(name: string): string {
return name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
}
/**
* Extract imports and types from original file
*/
async extractImportsAndTypes(filePath: string): Promise<{ imports: string[]; types: string[] }> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const imports: string[] = []
const types: string[] = []
let inImport = false
let currentImport = ''
for (const line of lines) {
const trimmed = line.trim()
// Handle multi-line imports
if (trimmed.startsWith('import ') || inImport) {
currentImport += line + '\n'
if (trimmed.includes('}') || (!trimmed.includes('{') && trimmed.endsWith("'"))) {
imports.push(currentImport.trim())
currentImport = ''
inImport = false
} else {
inImport = true
}
}
// Extract type definitions
if (trimmed.startsWith('export type ') || trimmed.startsWith('export interface ') ||
trimmed.startsWith('type ') || trimmed.startsWith('interface ')) {
types.push(line)
}
}
return { imports, types }
private toClassName(name: string): string {
return name
.split('-')
.map(part => part.charAt(0).toUpperCase() + part.slice(1))
.join('') + 'Utils'
}
/**
* Generate individual function file
*/
generateFunctionFile(func: FunctionInfo, imports: string[], types: string[]): string {
let content = ''
// Add relevant imports (simplified - could be smarter about which imports are needed)
if (imports.length > 0) {
content += imports.join('\n') + '\n\n'
}
// Add comments
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
// Add function
const asyncKeyword = func.isAsync ? 'async ' : ''
const exportKeyword = 'export '
content += `${exportKeyword}${asyncKeyword}function ${func.name}${func.params}${func.returnType} {\n`
// Extract function body (remove first and last line which are the function declaration and closing brace)
const bodyLines = func.body.split('\n')
const actualBody = bodyLines.slice(1, -1).join('\n')
content += actualBody + '\n'
content += '}\n'
return content
}
/**
* Generate class wrapper file
*/
generateClassWrapper(className: string, functions: FunctionInfo[], functionsDir: string): string {
let content = ''
// Import all functions
content += `// Auto-generated class wrapper\n`
private async writeFunctions(
functions: FunctionInfo[],
imports: string[],
types: string[],
functionsDir: string,
result: RefactorResult
) {
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `import { ${func.name} } from './${functionsDir}/${kebabName}'\n`
const kebabName = this.toKebabCase(func.name)
const funcFilePath = path.join(functionsDir, `${kebabName}.ts`)
const content = buildFunctionContent(func, imports, types)
await writeFileSafely(funcFilePath, content, this.dryRun)
result.newFiles.push(funcFilePath)
this.log(`${kebabName}.ts`)
}
content += `\n/**\n`
content += ` * ${className} - Class wrapper for ${functions.length} functions\n`
content += ` * \n`
content += ` * This is a convenience wrapper. Prefer importing individual functions.\n`
content += ` */\n`
content += `export class ${className} {\n`
// Add static methods
for (const func of functions) {
const asyncKeyword = func.isAsync ? 'async ' : ''
content += ` static ${asyncKeyword}${func.name}${func.params}${func.returnType} {\n`
content += ` return ${func.isAsync ? 'await ' : ''}${func.name}(...arguments as any)\n`
content += ` }\n\n`
}
content += '}\n'
return content
}
/**
* Generate index file that re-exports everything
*/
generateIndexFile(functions: FunctionInfo[], functionsDir: string, className: string): string {
let content = ''
content += `// Auto-generated re-exports for backward compatibility\n\n`
// Re-export all functions
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `export { ${func.name} } from './${functionsDir}/${kebabName}'\n`
}
// Re-export class wrapper
content += `\n// Class wrapper for convenience\n`
content += `export { ${className} } from './${className}'\n`
return content
private async writeClassWrapper(
basename: string,
functions: FunctionInfo[],
dir: string,
result: RefactorResult
) {
const className = this.toClassName(basename)
const classFilePath = path.join(dir, basename, `${className}.ts`)
const classContent = buildClassWrapper(className, functions, 'functions')
await writeFileSafely(classFilePath, classContent, this.dryRun)
result.newFiles.push(classFilePath)
this.log(`${className}.ts (class wrapper)`)
return className
}
private async writeIndex(
functions: FunctionInfo[],
dir: string,
basename: string,
className: string,
result: RefactorResult
) {
const indexFilePath = path.join(dir, basename, 'index.ts')
const indexContent = buildIndexContent(functions, 'functions', className)
await writeFileSafely(indexFilePath, indexContent, this.dryRun)
result.newFiles.push(indexFilePath)
this.log(' ✓ index.ts (re-exports)')
}
private async writeReexportFile(filePath: string, basename: string) {
const reexportContent = `// This file has been refactored into modular functions\\n` +
`// Import from individual functions or use the class wrapper\\n\\n` +
`export * from './${basename}'\\n`
await writeFileSafely(filePath, reexportContent, this.dryRun)
this.log(` ✓ Updated ${path.basename(filePath)} to re-export`)
}
/**
* Refactor a single file
*/
async refactorFile(filePath: string): Promise<RefactorResult> {
const result: RefactorResult = {
success: false,
@@ -270,88 +118,37 @@ class BulkLambdaRefactor {
try {
this.log(`\n🔍 Analyzing ${filePath}...`)
// Extract functions
const functions = await this.extractFunctions(filePath)
const functions = await extractFunctions(filePath)
if (functions.length === 0) {
result.errors.push('No functions found to extract')
return result
}
// Skip if only 1-2 functions (not worth refactoring)
if (functions.length <= 2) {
result.errors.push(`Only ${functions.length} function(s) - skipping`)
return result
}
this.log(` Found ${functions.length} functions: ${functions.map(f => f.name).join(', ')}`)
// Extract imports and types
const { imports, types } = await this.extractImportsAndTypes(filePath)
// Create directories
const { imports, types } = await extractImportsAndTypes(filePath)
const dir = path.dirname(filePath)
const basename = path.basename(filePath, path.extname(filePath))
const functionsDir = path.join(dir, basename, 'functions')
if (!this.dryRun) {
await fs.mkdir(functionsDir, { recursive: true })
}
this.log(` Creating functions directory: ${functionsDir}`)
// Generate function files
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
const funcFilePath = path.join(functionsDir, `${kebabName}.ts`)
const content = this.generateFunctionFile(func, imports, types)
if (!this.dryRun) {
await fs.writeFile(funcFilePath, content, 'utf-8')
}
result.newFiles.push(funcFilePath)
this.log(`${kebabName}.ts`)
}
// Generate class wrapper
const className = basename.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join('') + 'Utils'
const classFilePath = path.join(dir, basename, `${className}.ts`)
const classContent = this.generateClassWrapper(className, functions, 'functions')
if (!this.dryRun) {
await fs.writeFile(classFilePath, classContent, 'utf-8')
}
result.newFiles.push(classFilePath)
this.log(`${className}.ts (class wrapper)`)
// Generate index file
const indexFilePath = path.join(dir, basename, 'index.ts')
const indexContent = this.generateIndexFile(functions, 'functions', className)
if (!this.dryRun) {
await fs.writeFile(indexFilePath, indexContent, 'utf-8')
}
result.newFiles.push(indexFilePath)
this.log(` ✓ index.ts (re-exports)`)
// Update original file to re-export from new location
const reexportContent = `// This file has been refactored into modular functions\n` +
`// Import from individual functions or use the class wrapper\n\n` +
`export * from './${basename}'\n`
if (!this.dryRun) {
await fs.writeFile(filePath, reexportContent, 'utf-8')
}
this.log(` ✓ Updated ${path.basename(filePath)} to re-export`)
await this.writeFunctions(functions, imports, types, functionsDir, result)
const className = await this.writeClassWrapper(basename, functions, dir, result)
await this.writeIndex(functions, dir, basename, className, result)
await this.writeReexportFile(filePath, basename)
result.success = true
this.log(` ✅ Successfully refactored into ${result.newFiles.length} files`)
} catch (error) {
result.errors.push(`Error: ${error instanceof Error ? error.message : String(error)}`)
this.log(` ❌ Failed: ${result.errors[0]}`)
@@ -360,42 +157,23 @@ class BulkLambdaRefactor {
return result
}
/**
* Run linter and fix imports
*/
async runLintFix(workingDir: string): Promise<void> {
this.log('\n🔧 Running ESLint to fix imports and formatting...')
try {
const { stdout, stderr } = await execAsync('npm run lint:fix', { cwd: workingDir })
if (stdout) this.log(stdout)
if (stderr) this.log(stderr)
this.log(' ✅ Linting completed')
} catch (error) {
this.log(` ⚠️ Linting had issues (may be expected): ${error}`)
}
}
/**
* Bulk refactor multiple files
*/
async bulkRefactor(files: string[]): Promise<RefactorResult[]> {
console.log(`\n📦 Bulk Lambda Refactoring Tool`)
console.log(` Mode: ${this.dryRun ? 'DRY RUN' : 'LIVE'}`)
console.log(` Files to process: ${files.length}\n`)
const results: RefactorResult[] = []
let successCount = 0
let skipCount = 0
let errorCount = 0
for (let i = 0; i < files.length; i++) {
const file = files[i]
console.log(`[${i + 1}/${files.length}] Processing: ${file}`)
const result = await this.refactorFile(file)
results.push(result)
if (result.success) {
successCount++
} else if (result.errors.some(e => e.includes('skipping'))) {
@@ -403,29 +181,31 @@ class BulkLambdaRefactor {
} else {
errorCount++
}
// Small delay to avoid overwhelming the system
await new Promise(resolve => setTimeout(resolve, 100))
}
console.log(`\n📊 Summary:`)
console.log(` ✅ Success: ${successCount}`)
console.log(` ⏭️ Skipped: ${skipCount}`)
console.log(` ❌ Errors: ${errorCount}`)
console.log(` 📁 Total new files: ${results.reduce((acc, r) => acc + r.newFiles.length, 0)}`)
return results
}
async runLintFix(workingDir: string): Promise<void> {
await runLintFix(workingDir, message => this.log(message))
}
}
// CLI
async function main() {
const args = process.argv.slice(2)
const dryRun = args.includes('--dry-run') || args.includes('-d')
const verbose = args.includes('--verbose') || args.includes('-v')
const filesArg = args.find(arg => !arg.startsWith('-'))
if (!filesArg && !args.includes('--help') && !args.includes('-h')) {
console.log('Usage: tsx bulk-lambda-refactor.ts [options] <file-pattern>')
console.log('\nOptions:')
@@ -450,17 +230,15 @@ async function main() {
}
const refactor = new BulkLambdaRefactor({ dryRun, verbose })
// For now, process single file (can be extended to glob patterns)
const files = [filesArg!]
const results = await refactor.bulkRefactor(files)
if (!dryRun && results.some(r => r.success)) {
console.log('\n🔧 Running linter to fix imports...')
await refactor.runLintFix(process.cwd())
}
console.log('\n✨ Done!')
}

View File

@@ -8,43 +8,7 @@
import { BulkLambdaRefactor } from '../bulk-lambda-refactor'
import * as fs from 'fs/promises'
import * as path from 'path'
interface FileToRefactor {
path: string
lines: number
category: string
priority: 'high' | 'medium' | 'low'
}
async function loadFilesFromReport(): Promise<FileToRefactor[]> {
const reportPath = path.join(process.cwd(), 'docs/todo/LAMBDA_REFACTOR_PROGRESS.md')
const content = await fs.readFile(reportPath, 'utf-8')
const files: FileToRefactor[] = []
const lines = content.split('\n')
let currentPriority: 'high' | 'medium' | 'low' = 'high'
for (const line of lines) {
if (line.includes('### High Priority')) currentPriority = 'high'
else if (line.includes('### Medium Priority')) currentPriority = 'medium'
else if (line.includes('### Low Priority')) currentPriority = 'low'
else if (line.includes('### Skipped')) break
// Match checklist items: - [ ] `path/to/file.ts` (123 lines)
const match = line.match(/- \[ \] `([^`]+)` \((\d+) lines\)/)
if (match) {
files.push({
path: match[1],
lines: parseInt(match[2], 10),
category: currentPriority,
priority: currentPriority,
})
}
}
return files
}
import { loadFilesFromReport } from './utils/load-files-from-report'
async function main() {
const args = process.argv.slice(2)

View File

@@ -13,10 +13,8 @@
import { ASTLambdaRefactor } from '../ast-lambda-refactor'
import * as fs from 'fs/promises'
import * as path from 'path'
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
import { loadFilesFromReport } from './utils/load-files-from-report'
import { runCommand } from './utils/run-command'
interface FileToProcess {
path: string
@@ -26,43 +24,6 @@ interface FileToProcess {
error?: string
}
async function loadFilesFromReport(): Promise<FileToProcess[]> {
const reportPath = path.join(process.cwd(), 'docs/todo/LAMBDA_REFACTOR_PROGRESS.md')
const content = await fs.readFile(reportPath, 'utf-8')
const files: FileToProcess[] = []
const lines = content.split('\n')
let currentPriority: 'high' | 'medium' | 'low' = 'high'
for (const line of lines) {
if (line.includes('### High Priority')) currentPriority = 'high'
else if (line.includes('### Medium Priority')) currentPriority = 'medium'
else if (line.includes('### Low Priority')) currentPriority = 'low'
else if (line.includes('### Skipped')) break
const match = line.match(/- \[ \] `([^`]+)` \((\d+) lines\)/)
if (match) {
files.push({
path: match[1],
lines: parseInt(match[2], 10),
priority: currentPriority,
status: 'pending',
})
}
}
return files
}
async function runCommand(cmd: string, cwd: string = process.cwd()): Promise<{ stdout: string; stderr: string }> {
try {
return await execAsync(cmd, { cwd, maxBuffer: 10 * 1024 * 1024 })
} catch (error: any) {
return { stdout: error.stdout || '', stderr: error.stderr || error.message }
}
}
async function main() {
const args = process.argv.slice(2)
const dryRun = args.includes('--dry-run') || args.includes('-d')
@@ -76,7 +37,10 @@ async function main() {
// Load files
console.log('📋 Loading files from tracking report...')
let files = await loadFilesFromReport()
let files: FileToProcess[] = (await loadFilesFromReport()).map(file => ({
...file,
status: 'pending',
}))
if (priorityFilter !== 'all') {
files = files.filter(f => f.priority === priorityFilter)

View File

@@ -5,217 +5,10 @@
* This tool helps identify files exceeding 150 lines and tracks refactoring progress.
*/
import { exec } from 'child_process'
import { promisify } from 'util'
import * as fs from 'fs/promises'
import * as path from 'path'
const execAsync = promisify(exec)
interface FileInfo {
path: string
lines: number
category: 'component' | 'library' | 'test' | 'tool' | 'dbal' | 'type' | 'other'
priority: number
status: 'pending' | 'in-progress' | 'completed' | 'skipped'
reason?: string
}
async function countLines(filePath: string): Promise<number> {
try {
const content = await fs.readFile(filePath, 'utf-8')
return content.split('\n').length
} catch {
return 0
}
}
function categorizeFile(filePath: string): FileInfo['category'] {
if (filePath.includes('.test.')) return 'test'
if (filePath.endsWith('.tsx')) return 'component'
if (filePath.includes('/tools/')) return 'tool'
if (filePath.includes('/dbal/')) return 'dbal'
if (filePath.includes('/types/') || filePath.endsWith('.d.ts')) return 'type'
if (filePath.includes('/lib/') && filePath.endsWith('.ts')) return 'library'
return 'other'
}
function calculatePriority(file: FileInfo): number {
// Higher priority for library files (easiest to refactor)
// Lower priority for components (need more complex refactoring)
// Skip tests and types
const categoryPriority = {
library: 10,
tool: 8,
dbal: 6,
component: 4,
test: 0, // Skip
type: 0, // Skip
other: 2,
}
const base = categoryPriority[file.category]
// Prioritize moderately large files over extremely large ones
// (easier to refactor step-by-step)
if (file.lines > 1000) return base - 3
if (file.lines > 500) return base - 1
if (file.lines > 300) return base
return base + 1
}
async function findLargeFiles(rootDir: string, minLines: number = 150): Promise<FileInfo[]> {
const { stdout } = await execAsync(
`find ${rootDir} \\( -name "*.ts" -o -name "*.tsx" \\) ` +
`-not -path "*/node_modules/*" ` +
`-not -path "*/.next/*" ` +
`-not -path "*/dist/*" ` +
`-not -path "*/build/*" ` +
`-exec sh -c 'lines=$(wc -l < "$1"); if [ "$lines" -gt ${minLines} ]; then echo "$lines $1"; fi' _ {} \\;`
)
const files: FileInfo[] = []
for (const line of stdout.trim().split('\n').filter(Boolean)) {
const [linesStr, filePath] = line.trim().split(' ', 2)
const lines = parseInt(linesStr, 10)
const category = categorizeFile(filePath)
const fileInfo: FileInfo = {
path: filePath.replace(rootDir + '/', ''),
lines,
category,
priority: 0,
status: category === 'test' || category === 'type' ? 'skipped' : 'pending',
reason: category === 'test' ? 'Test files can remain large for comprehensive coverage' :
category === 'type' ? 'Type definition files are typically large' : undefined
}
fileInfo.priority = calculatePriority(fileInfo)
files.push(fileInfo)
}
return files.sort((a, b) => b.priority - a.priority || b.lines - a.lines)
}
async function generateReport(files: FileInfo[]): Promise<string> {
const total = files.length
const byCategory = files.reduce((acc, f) => {
acc[f.category] = (acc[f.category] || 0) + 1
return acc
}, {} as Record<string, number>)
const byStatus = files.reduce((acc, f) => {
acc[f.status] = (acc[f.status] || 0) + 1
return acc
}, {} as Record<string, number>)
let report = '# Lambda-per-File Refactoring Progress\n\n'
report += `**Generated:** ${new Date().toISOString()}\n\n`
report += `## Summary\n\n`
report += `- **Total files > 150 lines:** ${total}\n`
report += `- **Pending:** ${byStatus.pending || 0}\n`
report += `- **In Progress:** ${byStatus['in-progress'] || 0}\n`
report += `- **Completed:** ${byStatus.completed || 0}\n`
report += `- **Skipped:** ${byStatus.skipped || 0}\n\n`
report += `## By Category\n\n`
for (const [category, count] of Object.entries(byCategory).sort((a, b) => b[1] - a[1])) {
report += `- **${category}:** ${count}\n`
}
report += `\n## Refactoring Queue\n\n`
report += `Files are prioritized by ease of refactoring and impact.\n\n`
// Group by priority
const highPriority = files.filter(f => f.priority >= 8 && f.status === 'pending')
const medPriority = files.filter(f => f.priority >= 4 && f.priority < 8 && f.status === 'pending')
const lowPriority = files.filter(f => f.priority < 4 && f.status === 'pending')
if (highPriority.length > 0) {
report += `### High Priority (${highPriority.length} files)\n\n`
report += `Library and tool files - easiest to refactor\n\n`
for (const file of highPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (highPriority.length > 20) {
report += `- ... and ${highPriority.length - 20} more\n`
}
report += `\n`
}
if (medPriority.length > 0) {
report += `### Medium Priority (${medPriority.length} files)\n\n`
report += `DBAL and component files - moderate complexity\n\n`
for (const file of medPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (medPriority.length > 20) {
report += `- ... and ${medPriority.length - 20} more\n`
}
report += `\n`
}
if (lowPriority.length > 0) {
report += `### Low Priority (${lowPriority.length} files)\n\n`
for (const file of lowPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (lowPriority.length > 20) {
report += `- ... and ${lowPriority.length - 20} more\n`
}
report += `\n`
}
// Skipped files
const skipped = files.filter(f => f.status === 'skipped')
if (skipped.length > 0) {
report += `### Skipped Files (${skipped.length})\n\n`
report += `These files do not need refactoring:\n\n`
for (const file of skipped.slice(0, 10)) {
report += `- \`${file.path}\` (${file.lines} lines) - ${file.reason}\n`
}
if (skipped.length > 10) {
report += `- ... and ${skipped.length - 10} more\n`
}
report += `\n`
}
report += `## Refactoring Patterns\n\n`
report += `### For Library Files\n`
report += `1. Create a \`functions/\` subdirectory\n`
report += `2. Extract each function to its own file\n`
report += `3. Create a class wrapper (like SchemaUtils)\n`
report += `4. Update main file to re-export\n`
report += `5. Verify tests still pass\n\n`
report += `### For Components\n`
report += `1. Extract hooks into separate files\n`
report += `2. Extract sub-components\n`
report += `3. Extract utility functions\n`
report += `4. Keep main component < 150 lines\n\n`
report += `### For DBAL Files\n`
report += `1. Split adapters by operation type\n`
report += `2. Extract provider implementations\n`
report += `3. Keep interfaces separate from implementations\n\n`
report += `## Example: SchemaUtils Pattern\n\n`
report += `The \`frontends/nextjs/src/lib/schema/\` directory demonstrates the lambda-per-file pattern:\n\n`
report += `\`\`\`\n`
report += `schema/\n`
report += `├── functions/\n`
report += `│ ├── field/\n`
report += `│ │ ├── get-field-label.ts\n`
report += `│ │ ├── validate-field.ts\n`
report += `│ │ └── ...\n`
report += `│ ├── model/\n`
report += `│ │ ├── find-model.ts\n`
report += `│ │ └── ...\n`
report += `│ └── index.ts (re-exports all)\n`
report += `├── SchemaUtils.ts (class wrapper)\n`
report += `└── schema-utils.ts (backward compat re-exports)\n`
report += `\`\`\`\n\n`
return report
}
import { findLargeFiles } from '../reporting/find-large-files'
import { generateProgressReport } from '../reporting/generate-progress-report'
async function main() {
const rootDir = process.cwd()
@@ -224,7 +17,7 @@ async function main() {
const files = await findLargeFiles(rootDir, 150)
console.log(`Found ${files.length} files`)
const report = await generateReport(files)
const report = await generateProgressReport(files)
const outputPath = path.join(rootDir, 'docs', 'todo', 'LAMBDA_REFACTOR_PROGRESS.md')
await fs.writeFile(outputPath, report, 'utf-8')
@@ -240,4 +33,4 @@ if (require.main === module) {
main().catch(console.error)
}
export { findLargeFiles, generateReport }
export { findLargeFiles, generateProgressReport as generateReport }

View File

@@ -0,0 +1,36 @@
import * as fs from 'fs/promises'
import * as path from 'path'
interface FileFromReport {
path: string
lines: number
priority: 'high' | 'medium' | 'low'
}
export async function loadFilesFromReport(): Promise<FileFromReport[]> {
const reportPath = path.join(process.cwd(), 'docs/todo/LAMBDA_REFACTOR_PROGRESS.md')
const content = await fs.readFile(reportPath, 'utf-8')
const files: FileFromReport[] = []
const lines = content.split('\n')
let currentPriority: 'high' | 'medium' | 'low' = 'high'
for (const line of lines) {
if (line.includes('### High Priority')) currentPriority = 'high'
else if (line.includes('### Medium Priority')) currentPriority = 'medium'
else if (line.includes('### Low Priority')) currentPriority = 'low'
else if (line.includes('### Skipped')) break
const match = line.match(/- \[ \] `([^`]+)` \((\d+) lines\)/)
if (match) {
files.push({
path: match[1],
lines: parseInt(match[2], 10),
priority: currentPriority,
})
}
}
return files
}

View File

@@ -0,0 +1,12 @@
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
export async function runCommand(cmd: string, cwd: string = process.cwd()): Promise<{ stdout: string; stderr: string }> {
try {
return await execAsync(cmd, { cwd, maxBuffer: 10 * 1024 * 1024 })
} catch (error: any) {
return { stdout: error.stdout || '', stderr: error.stderr || error.message }
}
}

View File

@@ -0,0 +1,24 @@
import { AstExtractedFunction, ExtractedImport } from '../lambda/types'
export function buildAstFunctionContent(func: AstExtractedFunction, imports: ExtractedImport[]): string {
let content = ''
if (imports.length > 0) {
content += imports.map(imp => imp.fullText).join('\n') + '\n\n'
}
if (func.leadingComments) {
content += func.leadingComments + '\n'
}
let funcText = func.fullText
if (!func.isExported && !funcText.includes('export ')) {
funcText = 'export ' + funcText
} else if (!funcText.includes('export ')) {
funcText = 'export ' + funcText
}
content += funcText + '\n'
return content
}

View File

@@ -0,0 +1,31 @@
interface MinimalFunctionInfo {
name: string
isAsync: boolean
}
export function buildClassWrapper(className: string, functions: MinimalFunctionInfo[], functionsDir: string): string {
let content = ''
content += `// Auto-generated class wrapper\n`
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `import { ${func.name} } from './${functionsDir}/${kebabName}'\n`
}
content += `\n/**\n * ${className} - Class wrapper for ${functions.length} functions\n` +
` * \n` +
` * This is a convenience wrapper. Prefer importing individual functions.\n` +
` */\n`
content += `export class ${className} {\n`
for (const func of functions) {
const asyncKeyword = func.isAsync ? 'async ' : ''
content += ` static ${asyncKeyword}${func.name}(...args: any[]) {\n`
content += ` return ${func.isAsync ? 'await ' : ''}${func.name}(...args as any)\n`
content += ` }\n\n`
}
content += '}\n'
return content
}

View File

@@ -0,0 +1,30 @@
import { FunctionInfo } from '../lambda/types'
export function buildFunctionContent(func: FunctionInfo, imports: string[], types: string[]): string {
let content = ''
if (imports.length > 0) {
content += imports.join('\n') + '\n\n'
}
if (types.length > 0) {
content += types.join('\n') + '\n\n'
}
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
const asyncKeyword = func.isAsync ? 'async ' : ''
const exportKeyword = 'export '
content += `${exportKeyword}${asyncKeyword}function ${func.name}${func.params}${func.returnType} {\n`
const bodyLines = func.body.split('\n')
const actualBody = bodyLines.slice(1, -1).join('\n')
content += actualBody + '\n'
content += '}\n'
return content
}

View File

@@ -0,0 +1,21 @@
interface MinimalFunctionInfo {
name: string
}
export function buildIndexContent(functions: MinimalFunctionInfo[], functionsDir: string, className?: string): string {
let content = ''
content += `// Auto-generated re-exports for backward compatibility\n\n`
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `export { ${func.name} } from './${functionsDir}/${kebabName}'\n`
}
if (className) {
content += `\n// Class wrapper for convenience\n`
content += `export { ${className} } from './${className}'\n`
}
return content
}

View File

@@ -0,0 +1,11 @@
import * as fs from 'fs/promises'
import * as path from 'path'
export async function writeFileSafely(targetPath: string, content: string, dryRun: boolean): Promise<void> {
if (dryRun) {
return
}
await fs.mkdir(path.dirname(targetPath), { recursive: true })
await fs.writeFile(targetPath, content, 'utf-8')
}

View File

@@ -0,0 +1,28 @@
export interface FunctionInfo {
name: string
isAsync: boolean
isExported: boolean
params: string
returnType: string
body: string
startLine: number
endLine: number
comments: string[]
isMethod: boolean
}
export interface AstExtractedFunction {
name: string
fullText: string
isExported: boolean
isAsync: boolean
leadingComments: string
startPos: number
endPos: number
}
export interface ExtractedImport {
fullText: string
moduleSpecifier: string
namedImports: string[]
}

View File

@@ -0,0 +1,73 @@
import * as fs from 'fs/promises'
import { FunctionInfo } from '../lambda/types'
export async function extractFunctions(filePath: string): Promise<FunctionInfo[]> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const functions: FunctionInfo[] = []
const functionRegex = /^(export\s+)?(async\s+)?function\s+([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
const methodRegex = /^\s*(public|private|protected)?\s*(async\s+)?([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
let i = 0
while (i < lines.length) {
const line = lines[i]
const funcMatch = line.match(functionRegex)
const methodMatch = line.match(methodRegex)
if (funcMatch || methodMatch) {
const isMethod = !!methodMatch
const match = funcMatch || methodMatch!
const isExported = !!match[1]
const isAsync = !!(funcMatch ? match[2] : methodMatch![2])
const name = funcMatch ? match[3] : methodMatch![3]
const params = funcMatch ? match[4] : methodMatch![4]
const returnType = (funcMatch ? match[5] : methodMatch![5]) || ''
const comments: string[] = []
let commentLine = i - 1
while (commentLine >= 0 && (lines[commentLine].trim().startsWith('//') ||
lines[commentLine].trim().startsWith('*') ||
lines[commentLine].trim().startsWith('/*'))) {
comments.unshift(lines[commentLine])
commentLine--
}
let braceCount = 1
let j = i
const bodyLines: string[] = [line]
j++
while (j < lines.length && braceCount > 0) {
bodyLines.push(lines[j])
for (const char of lines[j]) {
if (char === '{') braceCount++
if (char === '}') braceCount--
if (braceCount === 0) break
}
j++
}
functions.push({
name,
isAsync,
isExported,
params,
returnType: returnType.trim(),
body: bodyLines.join('\n'),
startLine: i,
endLine: j - 1,
comments,
isMethod,
})
i = j
} else {
i++
}
}
return functions
}

View File

@@ -0,0 +1,38 @@
import * as fs from 'fs/promises'
export async function extractImportsAndTypes(filePath: string): Promise<{ imports: string[]; types: string[] }> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const imports: string[] = []
const types: string[] = []
let inImport = false
let currentImport = ''
for (const line of lines) {
const trimmed = line.trim()
if (trimmed.startsWith('import ') || inImport) {
currentImport += line + '\n'
if (trimmed.includes('}') || (!trimmed.includes('{') && trimmed.endsWith("'"))) {
imports.push(currentImport.trim())
currentImport = ''
inImport = false
} else {
inImport = true
}
}
if (
trimmed.startsWith('export type ') ||
trimmed.startsWith('export interface ') ||
trimmed.startsWith('type ') ||
trimmed.startsWith('interface ')
) {
types.push(line)
}
}
return { imports, types }
}

View File

@@ -1,234 +1,25 @@
#!/usr/bin/env ts-node
/**
* Refactor large TypeScript files into lambda-per-file structure
*
* This tool helps identify files exceeding 150 lines and tracks refactoring progress.
*/
import { exec } from 'child_process'
import { promisify } from 'util'
import * as fs from 'fs/promises'
import * as path from 'path'
const execAsync = promisify(exec)
interface FileInfo {
path: string
lines: number
category: 'component' | 'library' | 'test' | 'tool' | 'dbal' | 'type' | 'other'
priority: number
status: 'pending' | 'in-progress' | 'completed' | 'skipped'
reason?: string
}
async function countLines(filePath: string): Promise<number> {
try {
const content = await fs.readFile(filePath, 'utf-8')
return content.split('\n').length
} catch {
return 0
}
}
function categorizeFile(filePath: string): FileInfo['category'] {
if (filePath.includes('.test.')) return 'test'
if (filePath.endsWith('.tsx')) return 'component'
if (filePath.includes('/tools/')) return 'tool'
if (filePath.includes('/dbal/')) return 'dbal'
if (filePath.includes('/types/') || filePath.endsWith('.d.ts')) return 'type'
if (filePath.includes('/lib/') && filePath.endsWith('.ts')) return 'library'
return 'other'
}
function calculatePriority(file: FileInfo): number {
// Higher priority for library files (easiest to refactor)
// Lower priority for components (need more complex refactoring)
// Skip tests and types
const categoryPriority = {
library: 10,
tool: 8,
dbal: 6,
component: 4,
test: 0, // Skip
type: 0, // Skip
other: 2,
}
const base = categoryPriority[file.category]
// Prioritize moderately large files over extremely large ones
// (easier to refactor step-by-step)
if (file.lines > 1000) return base - 3
if (file.lines > 500) return base - 1
if (file.lines > 300) return base
return base + 1
}
async function findLargeFiles(rootDir: string, minLines: number = 150): Promise<FileInfo[]> {
const { stdout } = await execAsync(
`find ${rootDir} \\( -name "*.ts" -o -name "*.tsx" \\) ` +
`-not -path "*/node_modules/*" ` +
`-not -path "*/.next/*" ` +
`-not -path "*/dist/*" ` +
`-not -path "*/build/*" ` +
`-exec sh -c 'lines=$(wc -l < "$1"); if [ "$lines" -gt ${minLines} ]; then echo "$lines $1"; fi' _ {} \\;`
)
const files: FileInfo[] = []
for (const line of stdout.trim().split('\n').filter(Boolean)) {
const [linesStr, filePath] = line.trim().split(' ', 2)
const lines = parseInt(linesStr, 10)
const category = categorizeFile(filePath)
const fileInfo: FileInfo = {
path: filePath.replace(rootDir + '/', ''),
lines,
category,
priority: 0,
status: category === 'test' || category === 'type' ? 'skipped' : 'pending',
reason: category === 'test' ? 'Test files can remain large for comprehensive coverage' :
category === 'type' ? 'Type definition files are typically large' : undefined
}
fileInfo.priority = calculatePriority(fileInfo)
files.push(fileInfo)
}
return files.sort((a, b) => b.priority - a.priority || b.lines - a.lines)
}
async function generateReport(files: FileInfo[]): Promise<string> {
const total = files.length
const byCategory = files.reduce((acc, f) => {
acc[f.category] = (acc[f.category] || 0) + 1
return acc
}, {} as Record<string, number>)
const byStatus = files.reduce((acc, f) => {
acc[f.status] = (acc[f.status] || 0) + 1
return acc
}, {} as Record<string, number>)
let report = '# Lambda-per-File Refactoring Progress\n\n'
report += `**Generated:** ${new Date().toISOString()}\n\n`
report += `## Summary\n\n`
report += `- **Total files > 150 lines:** ${total}\n`
report += `- **Pending:** ${byStatus.pending || 0}\n`
report += `- **In Progress:** ${byStatus['in-progress'] || 0}\n`
report += `- **Completed:** ${byStatus.completed || 0}\n`
report += `- **Skipped:** ${byStatus.skipped || 0}\n\n`
report += `## By Category\n\n`
for (const [category, count] of Object.entries(byCategory).sort((a, b) => b[1] - a[1])) {
report += `- **${category}:** ${count}\n`
}
report += `\n## Refactoring Queue\n\n`
report += `Files are prioritized by ease of refactoring and impact.\n\n`
// Group by priority
const highPriority = files.filter(f => f.priority >= 8 && f.status === 'pending')
const medPriority = files.filter(f => f.priority >= 4 && f.priority < 8 && f.status === 'pending')
const lowPriority = files.filter(f => f.priority < 4 && f.status === 'pending')
if (highPriority.length > 0) {
report += `### High Priority (${highPriority.length} files)\n\n`
report += `Library and tool files - easiest to refactor\n\n`
for (const file of highPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (highPriority.length > 20) {
report += `- ... and ${highPriority.length - 20} more\n`
}
report += `\n`
}
if (medPriority.length > 0) {
report += `### Medium Priority (${medPriority.length} files)\n\n`
report += `DBAL and component files - moderate complexity\n\n`
for (const file of medPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (medPriority.length > 20) {
report += `- ... and ${medPriority.length - 20} more\n`
}
report += `\n`
}
if (lowPriority.length > 0) {
report += `### Low Priority (${lowPriority.length} files)\n\n`
for (const file of lowPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (lowPriority.length > 20) {
report += `- ... and ${lowPriority.length - 20} more\n`
}
report += `\n`
}
// Skipped files
const skipped = files.filter(f => f.status === 'skipped')
if (skipped.length > 0) {
report += `### Skipped Files (${skipped.length})\n\n`
report += `These files do not need refactoring:\n\n`
for (const file of skipped.slice(0, 10)) {
report += `- \`${file.path}\` (${file.lines} lines) - ${file.reason}\n`
}
if (skipped.length > 10) {
report += `- ... and ${skipped.length - 10} more\n`
}
report += `\n`
}
report += `## Refactoring Patterns\n\n`
report += `### For Library Files\n`
report += `1. Create a \`functions/\` subdirectory\n`
report += `2. Extract each function to its own file\n`
report += `3. Create a class wrapper (like SchemaUtils)\n`
report += `4. Update main file to re-export\n`
report += `5. Verify tests still pass\n\n`
report += `### For Components\n`
report += `1. Extract hooks into separate files\n`
report += `2. Extract sub-components\n`
report += `3. Extract utility functions\n`
report += `4. Keep main component < 150 lines\n\n`
report += `### For DBAL Files\n`
report += `1. Split adapters by operation type\n`
report += `2. Extract provider implementations\n`
report += `3. Keep interfaces separate from implementations\n\n`
report += `## Example: SchemaUtils Pattern\n\n`
report += `The \`frontends/nextjs/src/lib/schema/\` directory demonstrates the lambda-per-file pattern:\n\n`
report += `\`\`\`\n`
report += `schema/\n`
report += `├── functions/\n`
report += `│ ├── field/\n`
report += `│ │ ├── get-field-label.ts\n`
report += `│ │ ├── validate-field.ts\n`
report += `│ │ └── ...\n`
report += `│ ├── model/\n`
report += `│ │ ├── find-model.ts\n`
report += `│ │ └── ...\n`
report += `│ └── index.ts (re-exports all)\n`
report += `├── SchemaUtils.ts (class wrapper)\n`
report += `└── schema-utils.ts (backward compat re-exports)\n`
report += `\`\`\`\n\n`
return report
}
import { findLargeFiles } from './reporting/find-large-files'
import { generateProgressReport } from './reporting/generate-progress-report'
async function main() {
const rootDir = process.cwd()
console.log('Scanning for TypeScript files exceeding 150 lines...')
const files = await findLargeFiles(rootDir, 150)
console.log(`Found ${files.length} files`)
const report = await generateReport(files)
const report = await generateProgressReport(files)
const outputPath = path.join(rootDir, 'docs', 'todo', 'LAMBDA_REFACTOR_PROGRESS.md')
await fs.writeFile(outputPath, report, 'utf-8')
console.log(`Report generated: ${outputPath}`)
console.log(`\nSummary:`)
console.log(`- Total files: ${files.length}`)
@@ -240,4 +31,4 @@ if (require.main === module) {
main().catch(console.error)
}
export { findLargeFiles, generateReport }
export { findLargeFiles, generateProgressReport }

View File

@@ -0,0 +1,11 @@
import { FileCategory } from './types'
export function categorizeFile(filePath: string): FileCategory {
if (filePath.includes('.test.')) return 'test'
if (filePath.endsWith('.tsx')) return 'component'
if (filePath.includes('/tools/')) return 'tool'
if (filePath.includes('/dbal/')) return 'dbal'
if (filePath.includes('/types/') || filePath.endsWith('.d.ts')) return 'type'
if (filePath.includes('/lib/') && filePath.endsWith('.ts')) return 'library'
return 'other'
}

View File

@@ -0,0 +1,60 @@
import { exec } from 'child_process'
import { promisify } from 'util'
import { categorizeFile } from './categorize-file'
import { FileInfo } from './types'
const execAsync = promisify(exec)
function calculatePriority(file: FileInfo): number {
const categoryPriority = {
library: 10,
tool: 8,
dbal: 6,
component: 4,
test: 0,
type: 0,
other: 2,
}
const base = categoryPriority[file.category as keyof typeof categoryPriority]
if (file.lines > 1000) return base - 3
if (file.lines > 500) return base - 1
if (file.lines > 300) return base
return base + 1
}
export async function findLargeFiles(rootDir: string, minLines: number = 150): Promise<FileInfo[]> {
const { stdout } = await execAsync(
`find ${rootDir} \\( -name "*.ts" -o -name "*.tsx" \\) ` +
`-not -path "*/node_modules/*" ` +
`-not -path "*/.next/*" ` +
`-not -path "*/dist/*" ` +
`-not -path "*/build/*" ` +
`-exec sh -c 'lines=$(wc -l < "$1"); if [ "$lines" -gt ${minLines} ]; then echo "$lines $1"; fi' _ {} \\;`
)
const files: FileInfo[] = []
for (const line of stdout.trim().split('\n').filter(Boolean)) {
const [linesStr, filePath] = line.trim().split(' ', 2)
const lines = parseInt(linesStr, 10)
const category = categorizeFile(filePath)
const fileInfo: FileInfo = {
path: filePath.replace(rootDir + '/', ''),
lines,
category,
priority: 0,
status: category === 'test' || category === 'type' ? 'skipped' : 'pending',
reason:
category === 'test'
? 'Test files can remain large for comprehensive coverage'
: category === 'type'
? 'Type definition files are typically large'
: undefined,
}
fileInfo.priority = calculatePriority(fileInfo)
files.push(fileInfo)
}
return files.sort((a, b) => b.priority - a.priority || b.lines - a.lines)
}

View File

@@ -0,0 +1,121 @@
import { FileInfo } from './types'
export async function generateProgressReport(files: FileInfo[]): Promise<string> {
const total = files.length
const byCategory = files.reduce((acc, f) => {
acc[f.category] = (acc[f.category] || 0) + 1
return acc
}, {} as Record<string, number>)
const byStatus = files.reduce((acc, f) => {
acc[f.status] = (acc[f.status] || 0) + 1
return acc
}, {} as Record<string, number>)
let report = '# Lambda-per-File Refactoring Progress\n\n'
report += `**Generated:** ${new Date().toISOString()}\n\n`
report += `## Summary\n\n`
report += `- **Total files > 150 lines:** ${total}\n`
report += `- **Pending:** ${byStatus.pending || 0}\n`
report += `- **In Progress:** ${byStatus['in-progress'] || 0}\n`
report += `- **Completed:** ${byStatus.completed || 0}\n`
report += `- **Skipped:** ${byStatus.skipped || 0}\n\n`
report += `## By Category\n\n`
for (const [category, count] of Object.entries(byCategory).sort((a, b) => b[1] - a[1])) {
report += `- **${category}:** ${count}\n`
}
report += `\n## Refactoring Queue\n\n`
report += `Files are prioritized by ease of refactoring and impact.\n\n`
const highPriority = files.filter(f => f.priority >= 8 && f.status === 'pending')
const medPriority = files.filter(f => f.priority >= 4 && f.priority < 8 && f.status === 'pending')
const lowPriority = files.filter(f => f.priority < 4 && f.status === 'pending')
if (highPriority.length > 0) {
report += `### High Priority (${highPriority.length} files)\n\n`
report += `Library and tool files - easiest to refactor\n\n`
for (const file of highPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (highPriority.length > 20) {
report += `- ... and ${highPriority.length - 20} more\n`
}
report += `\n`
}
if (medPriority.length > 0) {
report += `### Medium Priority (${medPriority.length} files)\n\n`
report += `DBAL and component files - moderate complexity\n\n`
for (const file of medPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (medPriority.length > 20) {
report += `- ... and ${medPriority.length - 20} more\n`
}
report += `\n`
}
if (lowPriority.length > 0) {
report += `### Low Priority (${lowPriority.length} files)\n\n`
for (const file of lowPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (lowPriority.length > 20) {
report += `- ... and ${lowPriority.length - 20} more\n`
}
report += `\n`
}
const skipped = files.filter(f => f.status === 'skipped')
if (skipped.length > 0) {
report += `### Skipped Files (${skipped.length})\n\n`
report += `These files do not need refactoring:\n\n`
for (const file of skipped.slice(0, 10)) {
report += `- \`${file.path}\` (${file.lines} lines) - ${file.reason}\n`
}
if (skipped.length > 10) {
report += `- ... and ${skipped.length - 10} more\n`
}
report += `\n`
}
report += `## Refactoring Patterns\n\n`
report += `### For Library Files\n`
report += `1. Create a \`functions/\` subdirectory\n`
report += `2. Extract each function to its own file\n`
report += `3. Create a class wrapper (like SchemaUtils)\n`
report += `4. Update main file to re-export\n`
report += `5. Verify tests still pass\n\n`
report += `### For Components\n`
report += `1. Extract hooks into separate files\n`
report += `2. Extract sub-components\n`
report += `3. Extract utility functions\n`
report += `4. Keep main component < 150 lines\n\n`
report += `### For DBAL Files\n`
report += `1. Split adapters by operation type\n`
report += `2. Extract provider implementations\n`
report += `3. Keep interfaces separate from implementations\n\n`
report += `## Example: SchemaUtils Pattern\n\n`
report += `The \`frontends/nextjs/src/lib/schema/\` directory demonstrates the lambda-per-file pattern:\n\n`
report += `\`\`\`\n`
report += `schema/\n`
report += `├── functions/\n`
report += `│ ├── field/\n`
report += `│ │ ├── get-field-label.ts\n`
report += `│ │ ├── validate-field.ts\n`
report += `│ │ └── ...\n`
report += `│ ├── model/\n`
report += `│ │ ├── find-model.ts\n`
report += `│ │ └── ...\n`
report += `│ └── index.ts (re-exports all)\n`
report += `├── SchemaUtils.ts (class wrapper)\n`
report += `└── schema-utils.ts (backward compat re-exports)\n`
report += `\`\`\`\n\n`
return report
}

View File

@@ -0,0 +1,11 @@
export type FileCategory = 'component' | 'library' | 'test' | 'tool' | 'dbal' | 'type' | 'other'
export type FileStatus = 'pending' | 'in-progress' | 'completed' | 'skipped'
export interface FileInfo {
path: string
lines: number
category: FileCategory
priority: number
status: FileStatus
reason?: string
}

View File

@@ -0,0 +1,16 @@
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
export async function runLintFix(workingDir: string, log: (message: string) => void): Promise<void> {
log('\n🔧 Running ESLint to fix imports and formatting...')
try {
const { stdout, stderr } = await execAsync('npm run lint:fix', { cwd: workingDir })
if (stdout) log(stdout)
if (stderr) log(stderr)
log(' ✅ Linting completed')
} catch (error) {
log(` ⚠️ Linting had issues (may be expected): ${error}`)
}
}