Merge pull request #141 from johndoe6345789/codex/refactor-tools/refactoring-structure

Refactor multi-language refactor tooling
This commit is contained in:
2025-12-27 17:20:02 +00:00
committed by GitHub
10 changed files with 1164 additions and 565 deletions

View File

@@ -14,7 +14,7 @@ Automated tools for refactoring large TypeScript and C++ files into modular lamb
### 1. Generate Progress Report
```bash
npx tsx tools/refactoring/refactor-to-lambda.ts
npx tsx tools/refactoring/cli/refactor-to-lambda.ts
```
This scans the codebase and generates `docs/todo/LAMBDA_REFACTOR_PROGRESS.md` with:
@@ -29,10 +29,10 @@ Preview what would happen without modifying files:
```bash
# Preview all high-priority files
npx tsx tools/refactoring/orchestrate-refactor.ts --dry-run high
npx tsx tools/refactoring/cli/orchestrate-refactor.ts --dry-run high
# Preview specific number of files
npx tsx tools/refactoring/orchestrate-refactor.ts --dry-run high --limit=5
npx tsx tools/refactoring/cli/orchestrate-refactor.ts --dry-run high --limit=5
# Preview a single file
npx tsx tools/refactoring/ast-lambda-refactor.ts --dry-run -v frontends/nextjs/src/lib/rendering/page/page-definition-builder.ts
@@ -44,13 +44,13 @@ Refactor files in bulk with automatic linting and import fixing:
```bash
# Refactor all high-priority files (recommended start)
npx tsx tools/refactoring/orchestrate-refactor.ts high
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high
# Refactor first 10 high-priority files
npx tsx tools/refactoring/orchestrate-refactor.ts high --limit=10
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high --limit=10
# Refactor all pending files
npx tsx tools/refactoring/orchestrate-refactor.ts all
npx tsx tools/refactoring/cli/orchestrate-refactor.ts all
```
The orchestrator will:
@@ -67,7 +67,7 @@ The orchestrator will:
Scans codebase and generates tracking report.
```bash
npx tsx tools/refactoring/refactor-to-lambda.ts
npx tsx tools/refactoring/cli/refactor-to-lambda.ts
```
**Output:** `docs/todo/LAMBDA_REFACTOR_PROGRESS.md`
@@ -134,7 +134,7 @@ npx tsx tools/refactoring/bulk-lambda-refactor.ts [options] <file>
Refactor both TypeScript and C++ files with automatic language detection.
```bash
npx tsx tools/refactoring/multi-lang-refactor.ts [options] <file>
npx tsx tools/refactoring/cli/cli.ts [options] <file>
# Options:
# -d, --dry-run Preview without writing
@@ -145,13 +145,13 @@ npx tsx tools/refactoring/multi-lang-refactor.ts [options] <file>
**Examples:**
```bash
# Refactor TypeScript file
npx tsx tools/refactoring/multi-lang-refactor.ts --dry-run src/lib/utils.ts
npx tsx tools/refactoring/cli/cli.ts --dry-run src/lib/utils.ts
# Refactor C++ file
npx tsx tools/refactoring/multi-lang-refactor.ts --verbose dbal/src/adapter.cpp
npx tsx tools/refactoring/cli/cli.ts --verbose dbal/src/adapter.cpp
# Multiple files
npx tsx tools/refactoring/multi-lang-refactor.ts file1.ts file2.cpp
npx tsx tools/refactoring/cli/cli.ts file1.ts file2.cpp
```
### 5. `orchestrate-refactor.ts` - Master Orchestrator
@@ -159,7 +159,7 @@ npx tsx tools/refactoring/multi-lang-refactor.ts file1.ts file2.cpp
Complete automated workflow for bulk refactoring (TypeScript only).
```bash
npx tsx tools/refactoring/orchestrate-refactor.ts [priority] [options]
npx tsx tools/refactoring/cli/orchestrate-refactor.ts [priority] [options]
# Priority: high | medium | low | all
# Options:
@@ -172,13 +172,13 @@ npx tsx tools/refactoring/orchestrate-refactor.ts [priority] [options]
**Examples:**
```bash
# Dry run for high-priority files
npx tsx tools/refactoring/orchestrate-refactor.ts high --dry-run
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high --dry-run
# Refactor 5 high-priority files
npx tsx tools/refactoring/orchestrate-refactor.ts high --limit=5
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high --limit=5
# Refactor all medium-priority files, skip tests
npx tsx tools/refactoring/orchestrate-refactor.ts medium --skip-test
npx tsx tools/refactoring/cli/orchestrate-refactor.ts medium --skip-test
```
## Refactoring Pattern
@@ -275,13 +275,13 @@ import { validateEmail } from '@/lib/utils/functions/validate-email'
### Phase 1: High-Priority Files (Library & Tools - 20 files)
```bash
# 1. Generate report
npx tsx tools/refactoring/refactor-to-lambda.ts
npx tsx tools/refactoring/cli/refactor-to-lambda.ts
# 2. Dry run to preview
npx tsx tools/refactoring/orchestrate-refactor.ts high --dry-run
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high --dry-run
# 3. Refactor in small batches
npx tsx tools/refactoring/orchestrate-refactor.ts high --limit=5
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high --limit=5
# 4. Review, test, commit
git diff
@@ -289,7 +289,7 @@ npm run test:unit
git add . && git commit -m "refactor: convert 5 library files to lambda-per-file"
# 5. Repeat for next batch
npx tsx tools/refactoring/orchestrate-refactor.ts high --limit=5
npx tsx tools/refactoring/cli/orchestrate-refactor.ts high --limit=5
```
### Phase 2: Medium-Priority Files (DBAL & Components - 68 files)

View File

@@ -0,0 +1,118 @@
#!/usr/bin/env tsx
/**
* Batch Refactor All Large Files
*
* Processes all files from the tracking report in priority order
*/
import { BulkLambdaRefactor } from '../bulk-lambda-refactor'
import * as fs from 'fs/promises'
import * as path from 'path'
interface FileToRefactor {
path: string
lines: number
category: string
priority: 'high' | 'medium' | 'low'
}
async function loadFilesFromReport(): Promise<FileToRefactor[]> {
const reportPath = path.join(process.cwd(), 'docs/todo/LAMBDA_REFACTOR_PROGRESS.md')
const content = await fs.readFile(reportPath, 'utf-8')
const files: FileToRefactor[] = []
const lines = content.split('\n')
let currentPriority: 'high' | 'medium' | 'low' = 'high'
for (const line of lines) {
if (line.includes('### High Priority')) currentPriority = 'high'
else if (line.includes('### Medium Priority')) currentPriority = 'medium'
else if (line.includes('### Low Priority')) currentPriority = 'low'
else if (line.includes('### Skipped')) break
// Match checklist items: - [ ] `path/to/file.ts` (123 lines)
const match = line.match(/- \[ \] `([^`]+)` \((\d+) lines\)/)
if (match) {
files.push({
path: match[1],
lines: parseInt(match[2], 10),
category: currentPriority,
priority: currentPriority,
})
}
}
return files
}
async function main() {
const args = process.argv.slice(2)
const dryRun = args.includes('--dry-run') || args.includes('-d')
const verbose = args.includes('--verbose') || args.includes('-v')
const priorityFilter = args.find(a => ['high', 'medium', 'low', 'all'].includes(a)) || 'high'
const limit = parseInt(args.find(a => a.startsWith('--limit='))?.split('=')[1] || '999', 10)
console.log('📋 Loading files from tracking report...')
const allFiles = await loadFilesFromReport()
let filesToProcess = allFiles
if (priorityFilter !== 'all') {
filesToProcess = allFiles.filter(f => f.priority === priorityFilter)
}
filesToProcess = filesToProcess.slice(0, limit)
console.log(`\n📊 Plan:`)
console.log(` Priority filter: ${priorityFilter}`)
console.log(` Files to process: ${filesToProcess.length}`)
console.log(` Mode: ${dryRun ? 'DRY RUN (preview only)' : 'LIVE (will modify files)'}`)
if (filesToProcess.length === 0) {
console.log('\n⚠ No files to process')
process.exit(0)
}
// Show what will be processed
console.log(`\n📝 Files queued:`)
for (let i = 0; i < Math.min(10, filesToProcess.length); i++) {
console.log(` ${i + 1}. ${filesToProcess[i].path} (${filesToProcess[i].lines} lines)`)
}
if (filesToProcess.length > 10) {
console.log(` ... and ${filesToProcess.length - 10} more`)
}
// Confirmation for live mode
if (!dryRun) {
console.log(`\n⚠ WARNING: This will modify ${filesToProcess.length} files!`)
console.log(` Press Ctrl+C to cancel, or wait 3 seconds to continue...`)
await new Promise(resolve => setTimeout(resolve, 3000))
}
console.log('\n🚀 Starting refactoring...\n')
const refactor = new BulkLambdaRefactor({ dryRun, verbose })
const filePaths = filesToProcess.map(f => f.path)
const results = await refactor.bulkRefactor(filePaths)
// Save results
const resultsPath = path.join(process.cwd(), 'docs/todo/REFACTOR_RESULTS.json')
await fs.writeFile(resultsPath, JSON.stringify(results, null, 2), 'utf-8')
console.log(`\n💾 Results saved to: ${resultsPath}`)
// Update progress report
console.log('\n📝 Updating progress report...')
// TODO: Mark completed files in the report
console.log('\n✅ Batch refactoring complete!')
console.log('\nNext steps:')
console.log(' 1. Run: npm run lint:fix')
console.log(' 2. Run: npm run typecheck')
console.log(' 3. Run: npm run test:unit')
console.log(' 4. Review changes and commit')
}
if (require.main === module) {
main().catch(console.error)
}

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env tsx
import { MultiLanguageLambdaRefactor } from '../multi-lang-refactor'
function printHelp() {
console.log('Multi-Language Lambda Refactoring Tool\n')
console.log('Supports: TypeScript (.ts, .tsx) and C++ (.cpp, .hpp, .cc, .h)\n')
console.log('Usage: tsx tools/refactoring/cli/cli.ts [options] <file>')
console.log('\nOptions:')
console.log(' -d, --dry-run Preview without writing')
console.log(' -v, --verbose Verbose output')
console.log(' -h, --help Show help')
console.log('\nExamples:')
console.log(' tsx tools/refactoring/cli/cli.ts --dry-run src/utils.ts')
console.log(' tsx tools/refactoring/cli/cli.ts --verbose dbal/src/adapter.cpp')
}
export async function handler(argv: string[] = process.argv.slice(2)) {
if (argv.includes('--help') || argv.includes('-h') || argv.length === 0) {
printHelp()
return { status: 'help' }
}
const dryRun = argv.includes('--dry-run') || argv.includes('-d')
const verbose = argv.includes('--verbose') || argv.includes('-v')
const files = argv.filter(a => !a.startsWith('-'))
if (files.length === 0) {
throw new Error('Error: Please provide file(s) to refactor')
}
const refactor = new MultiLanguageLambdaRefactor({ dryRun, verbose })
await refactor.bulkRefactor(files)
console.log('\n✨ Done!')
return { status: 'ok' }
}
if (require.main === module) {
handler().catch(error => {
console.error(error)
process.exit(1)
})
}

View File

@@ -0,0 +1,249 @@
#!/usr/bin/env tsx
/**
* Master Refactoring Orchestrator
*
* Orchestrates the complete lambda-per-file refactoring process:
* 1. Loads files from tracking report
* 2. Refactors in priority order
* 3. Runs linter and fixes imports
* 4. Runs type checking
* 5. Updates progress report
*/
import { ASTLambdaRefactor } from '../ast-lambda-refactor'
import * as fs from 'fs/promises'
import * as path from 'path'
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
interface FileToProcess {
path: string
lines: number
priority: 'high' | 'medium' | 'low'
status: 'pending' | 'completed' | 'failed' | 'skipped'
error?: string
}
async function loadFilesFromReport(): Promise<FileToProcess[]> {
const reportPath = path.join(process.cwd(), 'docs/todo/LAMBDA_REFACTOR_PROGRESS.md')
const content = await fs.readFile(reportPath, 'utf-8')
const files: FileToProcess[] = []
const lines = content.split('\n')
let currentPriority: 'high' | 'medium' | 'low' = 'high'
for (const line of lines) {
if (line.includes('### High Priority')) currentPriority = 'high'
else if (line.includes('### Medium Priority')) currentPriority = 'medium'
else if (line.includes('### Low Priority')) currentPriority = 'low'
else if (line.includes('### Skipped')) break
const match = line.match(/- \[ \] `([^`]+)` \((\d+) lines\)/)
if (match) {
files.push({
path: match[1],
lines: parseInt(match[2], 10),
priority: currentPriority,
status: 'pending',
})
}
}
return files
}
async function runCommand(cmd: string, cwd: string = process.cwd()): Promise<{ stdout: string; stderr: string }> {
try {
return await execAsync(cmd, { cwd, maxBuffer: 10 * 1024 * 1024 })
} catch (error: any) {
return { stdout: error.stdout || '', stderr: error.stderr || error.message }
}
}
async function main() {
const args = process.argv.slice(2)
const dryRun = args.includes('--dry-run') || args.includes('-d')
const priorityFilter = args.find(a => ['high', 'medium', 'low', 'all'].includes(a)) || 'all'
const limitArg = args.find(a => a.startsWith('--limit='))
const limit = limitArg ? parseInt(limitArg.split('=')[1], 10) : 999
const skipLint = args.includes('--skip-lint')
const skipTest = args.includes('--skip-test')
console.log('🚀 Lambda-per-File Refactoring Orchestrator\n')
// Load files
console.log('📋 Loading files from tracking report...')
let files = await loadFilesFromReport()
if (priorityFilter !== 'all') {
files = files.filter(f => f.priority === priorityFilter)
}
files = files.slice(0, limit)
console.log(`\n📊 Configuration:`)
console.log(` Priority: ${priorityFilter}`)
console.log(` Limit: ${limit}`)
console.log(` Files to process: ${files.length}`)
console.log(` Mode: ${dryRun ? '🔍 DRY RUN (preview only)' : '⚡ LIVE (will modify files)'}`)
console.log(` Skip lint: ${skipLint}`)
console.log(` Skip tests: ${skipTest}`)
if (files.length === 0) {
console.log('\n⚠ No files to process')
return
}
// Show preview
console.log(`\n📝 Files queued:`)
const preview = files.slice(0, 10)
preview.forEach((f, i) => {
console.log(` ${i + 1}. [${f.priority.toUpperCase()}] ${f.path} (${f.lines} lines)`)
})
if (files.length > 10) {
console.log(` ... and ${files.length - 10} more`)
}
// Safety confirmation for live mode
if (!dryRun) {
console.log(`\n⚠ WARNING: This will refactor ${files.length} files!`)
console.log(' Press Ctrl+C to cancel, or wait 5 seconds to continue...')
await new Promise(resolve => setTimeout(resolve, 5000))
}
console.log('\n' + '='.repeat(60))
console.log('PHASE 1: REFACTORING')
console.log('='.repeat(60) + '\n')
// Refactor files
const refactor = new ASTLambdaRefactor({ dryRun, verbose: true })
for (let i = 0; i < files.length; i++) {
const file = files[i]
console.log(`\n[${i + 1}/${files.length}] Processing: ${file.path}`)
try {
await refactor.refactorFile(file.path)
file.status = 'completed'
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error)
if (errorMsg.includes('skipping') || errorMsg.includes('No functions')) {
file.status = 'skipped'
file.error = errorMsg
} else {
file.status = 'failed'
file.error = errorMsg
console.error(` ❌ Error: ${errorMsg}`)
}
}
// Small delay to avoid overwhelming system
await new Promise(resolve => setTimeout(resolve, 100))
}
// Summary
const summary = {
total: files.length,
completed: files.filter(f => f.status === 'completed').length,
skipped: files.filter(f => f.status === 'skipped').length,
failed: files.filter(f => f.status === 'failed').length,
}
console.log('\n' + '='.repeat(60))
console.log('REFACTORING SUMMARY')
console.log('='.repeat(60))
console.log(` ✅ Completed: ${summary.completed}`)
console.log(` ⏭️ Skipped: ${summary.skipped}`)
console.log(` ❌ Failed: ${summary.failed}`)
console.log(` 📊 Total: ${summary.total}`)
if (!dryRun && summary.completed > 0) {
// Phase 2: Linting
if (!skipLint) {
console.log('\n' + '='.repeat(60))
console.log('PHASE 2: LINTING & IMPORT FIXING')
console.log('='.repeat(60) + '\n')
console.log('🔧 Running ESLint with --fix...')
const lintResult = await runCommand('npm run lint:fix')
console.log(lintResult.stdout)
if (lintResult.stderr && !lintResult.stderr.includes('warning')) {
console.log('⚠️ Lint stderr:', lintResult.stderr)
}
console.log(' ✅ Linting complete')
}
// Phase 3: Type checking
console.log('\n' + '='.repeat(60))
console.log('PHASE 3: TYPE CHECKING')
console.log('='.repeat(60) + '\n')
console.log('🔍 Running TypeScript compiler check...')
const typecheckResult = await runCommand('npm run typecheck')
if (typecheckResult.stderr.includes('error TS')) {
console.log('❌ Type errors detected:')
console.log(typecheckResult.stderr.split('\n').slice(0, 20).join('\n'))
console.log('\n⚠ Please fix type errors before committing')
} else {
console.log(' ✅ No type errors')
}
// Phase 4: Testing
if (!skipTest) {
console.log('\n' + '='.repeat(60))
console.log('PHASE 4: TESTING')
console.log('='.repeat(60) + '\n')
console.log('🧪 Running unit tests...')
const testResult = await runCommand('npm run test:unit -- --run')
if (testResult.stderr.includes('FAIL') || testResult.stdout.includes('FAIL')) {
console.log('❌ Some tests failed')
console.log(testResult.stdout.split('\n').slice(-30).join('\n'))
} else {
console.log(' ✅ All tests passed')
}
}
}
// Save detailed results
const resultsPath = path.join(process.cwd(), 'docs/todo/REFACTOR_RESULTS.json')
await fs.writeFile(resultsPath, JSON.stringify(files, null, 2), 'utf-8')
console.log(`\n💾 Detailed results saved: ${resultsPath}`)
// Final instructions
console.log('\n' + '='.repeat(60))
console.log('✨ REFACTORING COMPLETE!')
console.log('='.repeat(60))
if (dryRun) {
console.log('\n📌 This was a DRY RUN. No files were modified.')
console.log(' Run without --dry-run to apply changes.')
} else {
console.log('\n📌 Next Steps:')
console.log(' 1. Review the changes: git diff')
console.log(' 2. Fix any type errors if needed')
console.log(' 3. Run tests: npm run test:unit')
console.log(' 4. Commit: git add . && git commit -m "Refactor to lambda-per-file structure"')
}
console.log(`\n📊 Final Stats:`)
console.log(` Files refactored: ${summary.completed}`)
console.log(` Files skipped: ${summary.skipped}`)
console.log(` Files failed: ${summary.failed}`)
if (summary.failed > 0) {
console.log(`\n❌ Failed files:`)
files.filter(f => f.status === 'failed').forEach(f => {
console.log(` - ${f.path}: ${f.error}`)
})
}
}
if (require.main === module) {
main().catch(console.error)
}

View File

@@ -0,0 +1,243 @@
#!/usr/bin/env ts-node
/**
* Refactor large TypeScript files into lambda-per-file structure
*
* This tool helps identify files exceeding 150 lines and tracks refactoring progress.
*/
import { exec } from 'child_process'
import { promisify } from 'util'
import * as fs from 'fs/promises'
import * as path from 'path'
const execAsync = promisify(exec)
interface FileInfo {
path: string
lines: number
category: 'component' | 'library' | 'test' | 'tool' | 'dbal' | 'type' | 'other'
priority: number
status: 'pending' | 'in-progress' | 'completed' | 'skipped'
reason?: string
}
async function countLines(filePath: string): Promise<number> {
try {
const content = await fs.readFile(filePath, 'utf-8')
return content.split('\n').length
} catch {
return 0
}
}
function categorizeFile(filePath: string): FileInfo['category'] {
if (filePath.includes('.test.')) return 'test'
if (filePath.endsWith('.tsx')) return 'component'
if (filePath.includes('/tools/')) return 'tool'
if (filePath.includes('/dbal/')) return 'dbal'
if (filePath.includes('/types/') || filePath.endsWith('.d.ts')) return 'type'
if (filePath.includes('/lib/') && filePath.endsWith('.ts')) return 'library'
return 'other'
}
function calculatePriority(file: FileInfo): number {
// Higher priority for library files (easiest to refactor)
// Lower priority for components (need more complex refactoring)
// Skip tests and types
const categoryPriority = {
library: 10,
tool: 8,
dbal: 6,
component: 4,
test: 0, // Skip
type: 0, // Skip
other: 2,
}
const base = categoryPriority[file.category]
// Prioritize moderately large files over extremely large ones
// (easier to refactor step-by-step)
if (file.lines > 1000) return base - 3
if (file.lines > 500) return base - 1
if (file.lines > 300) return base
return base + 1
}
async function findLargeFiles(rootDir: string, minLines: number = 150): Promise<FileInfo[]> {
const { stdout } = await execAsync(
`find ${rootDir} \\( -name "*.ts" -o -name "*.tsx" \\) ` +
`-not -path "*/node_modules/*" ` +
`-not -path "*/.next/*" ` +
`-not -path "*/dist/*" ` +
`-not -path "*/build/*" ` +
`-exec sh -c 'lines=$(wc -l < "$1"); if [ "$lines" -gt ${minLines} ]; then echo "$lines $1"; fi' _ {} \\;`
)
const files: FileInfo[] = []
for (const line of stdout.trim().split('\n').filter(Boolean)) {
const [linesStr, filePath] = line.trim().split(' ', 2)
const lines = parseInt(linesStr, 10)
const category = categorizeFile(filePath)
const fileInfo: FileInfo = {
path: filePath.replace(rootDir + '/', ''),
lines,
category,
priority: 0,
status: category === 'test' || category === 'type' ? 'skipped' : 'pending',
reason: category === 'test' ? 'Test files can remain large for comprehensive coverage' :
category === 'type' ? 'Type definition files are typically large' : undefined
}
fileInfo.priority = calculatePriority(fileInfo)
files.push(fileInfo)
}
return files.sort((a, b) => b.priority - a.priority || b.lines - a.lines)
}
async function generateReport(files: FileInfo[]): Promise<string> {
const total = files.length
const byCategory = files.reduce((acc, f) => {
acc[f.category] = (acc[f.category] || 0) + 1
return acc
}, {} as Record<string, number>)
const byStatus = files.reduce((acc, f) => {
acc[f.status] = (acc[f.status] || 0) + 1
return acc
}, {} as Record<string, number>)
let report = '# Lambda-per-File Refactoring Progress\n\n'
report += `**Generated:** ${new Date().toISOString()}\n\n`
report += `## Summary\n\n`
report += `- **Total files > 150 lines:** ${total}\n`
report += `- **Pending:** ${byStatus.pending || 0}\n`
report += `- **In Progress:** ${byStatus['in-progress'] || 0}\n`
report += `- **Completed:** ${byStatus.completed || 0}\n`
report += `- **Skipped:** ${byStatus.skipped || 0}\n\n`
report += `## By Category\n\n`
for (const [category, count] of Object.entries(byCategory).sort((a, b) => b[1] - a[1])) {
report += `- **${category}:** ${count}\n`
}
report += `\n## Refactoring Queue\n\n`
report += `Files are prioritized by ease of refactoring and impact.\n\n`
// Group by priority
const highPriority = files.filter(f => f.priority >= 8 && f.status === 'pending')
const medPriority = files.filter(f => f.priority >= 4 && f.priority < 8 && f.status === 'pending')
const lowPriority = files.filter(f => f.priority < 4 && f.status === 'pending')
if (highPriority.length > 0) {
report += `### High Priority (${highPriority.length} files)\n\n`
report += `Library and tool files - easiest to refactor\n\n`
for (const file of highPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (highPriority.length > 20) {
report += `- ... and ${highPriority.length - 20} more\n`
}
report += `\n`
}
if (medPriority.length > 0) {
report += `### Medium Priority (${medPriority.length} files)\n\n`
report += `DBAL and component files - moderate complexity\n\n`
for (const file of medPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (medPriority.length > 20) {
report += `- ... and ${medPriority.length - 20} more\n`
}
report += `\n`
}
if (lowPriority.length > 0) {
report += `### Low Priority (${lowPriority.length} files)\n\n`
for (const file of lowPriority.slice(0, 20)) {
report += `- [ ] \`${file.path}\` (${file.lines} lines)\n`
}
if (lowPriority.length > 20) {
report += `- ... and ${lowPriority.length - 20} more\n`
}
report += `\n`
}
// Skipped files
const skipped = files.filter(f => f.status === 'skipped')
if (skipped.length > 0) {
report += `### Skipped Files (${skipped.length})\n\n`
report += `These files do not need refactoring:\n\n`
for (const file of skipped.slice(0, 10)) {
report += `- \`${file.path}\` (${file.lines} lines) - ${file.reason}\n`
}
if (skipped.length > 10) {
report += `- ... and ${skipped.length - 10} more\n`
}
report += `\n`
}
report += `## Refactoring Patterns\n\n`
report += `### For Library Files\n`
report += `1. Create a \`functions/\` subdirectory\n`
report += `2. Extract each function to its own file\n`
report += `3. Create a class wrapper (like SchemaUtils)\n`
report += `4. Update main file to re-export\n`
report += `5. Verify tests still pass\n\n`
report += `### For Components\n`
report += `1. Extract hooks into separate files\n`
report += `2. Extract sub-components\n`
report += `3. Extract utility functions\n`
report += `4. Keep main component < 150 lines\n\n`
report += `### For DBAL Files\n`
report += `1. Split adapters by operation type\n`
report += `2. Extract provider implementations\n`
report += `3. Keep interfaces separate from implementations\n\n`
report += `## Example: SchemaUtils Pattern\n\n`
report += `The \`frontends/nextjs/src/lib/schema/\` directory demonstrates the lambda-per-file pattern:\n\n`
report += `\`\`\`\n`
report += `schema/\n`
report += `├── functions/\n`
report += `│ ├── field/\n`
report += `│ │ ├── get-field-label.ts\n`
report += `│ │ ├── validate-field.ts\n`
report += `│ │ └── ...\n`
report += `│ ├── model/\n`
report += `│ │ ├── find-model.ts\n`
report += `│ │ └── ...\n`
report += `│ └── index.ts (re-exports all)\n`
report += `├── SchemaUtils.ts (class wrapper)\n`
report += `└── schema-utils.ts (backward compat re-exports)\n`
report += `\`\`\`\n\n`
return report
}
async function main() {
const rootDir = process.cwd()
console.log('Scanning for TypeScript files exceeding 150 lines...')
const files = await findLargeFiles(rootDir, 150)
console.log(`Found ${files.length} files`)
const report = await generateReport(files)
const outputPath = path.join(rootDir, 'docs', 'todo', 'LAMBDA_REFACTOR_PROGRESS.md')
await fs.writeFile(outputPath, report, 'utf-8')
console.log(`Report generated: ${outputPath}`)
console.log(`\nSummary:`)
console.log(`- Total files: ${files.length}`)
console.log(`- Pending refactor: ${files.filter(f => f.status === 'pending').length}`)
console.log(`- Skipped: ${files.filter(f => f.status === 'skipped').length}`)
}
if (require.main === module) {
main().catch(console.error)
}
export { findLargeFiles, generateReport }

View File

@@ -79,7 +79,7 @@ class ErrorAsTodoRefactor {
category: 'parse_error',
severity: 'high',
message: 'Could not load progress report - run refactor-to-lambda.ts first',
suggestion: 'npx tsx tools/refactoring/refactor-to-lambda.ts'
suggestion: 'npx tsx tools/refactoring/cli/refactor-to-lambda.ts'
})
return []
}

View File

@@ -0,0 +1,209 @@
import * as fs from 'fs/promises'
import * as path from 'path'
import { DependencyInfo, FunctionInfo, RefactorResult } from './types'
type ModuleContext = {
dir: string
basename: string
functions: FunctionInfo[]
functionsDir: string
dependencies: DependencyInfo
result: RefactorResult
}
export class CppLambdaRefactor {
constructor(private readonly options: { dryRun: boolean; log: (message: string) => void }) {}
getFunctionExtension() {
return '.cpp'
}
async extractFunctions(filePath: string): Promise<FunctionInfo[]> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const functions: FunctionInfo[] = []
const functionRegex = /^([a-zA-Z_][a-zA-Z0-9_:<>*&\s]*?)\s+([a-zA-Z_][a-zA-Z0-9_:]*)\s*(\([^)]*\))\s*(const)?\s*(noexcept)?\s*\{/
let i = 0
let currentNamespace = ''
while (i < lines.length) {
const line = lines[i]
const namespaceMatch = line.match(/^namespace\s+([a-zA-Z0-9_]+)/)
if (namespaceMatch) {
currentNamespace = namespaceMatch[1]
}
const funcMatch = line.match(functionRegex)
if (funcMatch) {
const returnType = funcMatch[1].trim()
const fullName = funcMatch[2]
const params = funcMatch[3]
const isConst = !!funcMatch[4]
const nameParts = fullName.split('::')
const name = nameParts[nameParts.length - 1]
const className = nameParts.length > 1 ? nameParts[0] : undefined
const isMethod = !!className
const comments: string[] = []
let commentLine = i - 1
while (commentLine >= 0 && (lines[commentLine].trim().startsWith('//') ||
lines[commentLine].trim().startsWith('/*') ||
lines[commentLine].trim().startsWith('*'))) {
comments.unshift(lines[commentLine])
commentLine--
}
let braceCount = 1
let bodyLines: string[] = [line]
let j = i + 1
while (j < lines.length && braceCount > 0) {
bodyLines.push(lines[j])
for (const char of lines[j]) {
if (char === '{') braceCount++
if (char === '}') braceCount--
if (braceCount === 0) break
}
j++
}
functions.push({
name,
isAsync: false,
isExported: true,
params,
returnType,
body: bodyLines.join('\n'),
startLine: i,
endLine: j - 1,
comments,
isMethod,
isStatic: false,
isConst,
namespace: currentNamespace || undefined,
className,
})
i = j
} else {
i++
}
}
return functions
}
async extractDependencies(filePath: string): Promise<DependencyInfo> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const imports: string[] = []
const types: string[] = []
for (const line of lines) {
const trimmed = line.trim()
if (trimmed.startsWith('#include')) {
imports.push(line)
}
if (trimmed.startsWith('struct ') || trimmed.startsWith('class ') ||
trimmed.startsWith('using ') || trimmed.startsWith('typedef ')) {
types.push(line)
}
}
return { imports, types }
}
generateFunctionFile(func: FunctionInfo, includes: string[]): string {
let content = ''
if (includes.length > 0) {
content += includes.join('\n') + '\n\n'
}
if (func.namespace) {
content += `namespace ${func.namespace} {\n\n`
}
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
const constKeyword = func.isConst ? ' const' : ''
content += `${func.returnType} ${func.name}${func.params}${constKeyword} {\n`
const bodyLines = func.body.split('\n')
const actualBody = bodyLines.slice(1, -1).join('\n')
content += actualBody + '\n'
content += '}\n'
if (func.namespace) {
content += `\n} // namespace ${func.namespace}\n`
}
return content
}
async generateModule(context: ModuleContext) {
const { dir, basename, functions, dependencies, result } = context
const headerFilePath = path.join(dir, basename, `${basename}.hpp`)
const headerContent = this.generateHeaderFile(functions, dependencies.imports, basename)
if (!this.options.dryRun) {
await fs.writeFile(headerFilePath, headerContent, 'utf-8')
}
result.newFiles.push(headerFilePath)
this.options.log(`${basename}.hpp (header)`)
const includeContent = `// This file has been refactored into modular functions\n` +
`#include "${basename}/${basename}.hpp"\n`
if (!this.options.dryRun) {
await fs.writeFile(path.join(dir, `${basename}.cpp`), includeContent, 'utf-8')
}
this.options.log(` ✓ Updated ${basename}.cpp to include header`)
}
private generateHeaderFile(functions: FunctionInfo[], includes: string[], basename: string): string {
const guard = `${basename.toUpperCase()}_HPP_INCLUDED`
let content = ''
content += `#ifndef ${guard}\n`
content += `#define ${guard}\n\n`
if (includes.length > 0) {
content += includes.join('\n') + '\n\n'
}
const namespace = functions[0]?.namespace
if (namespace) {
content += `namespace ${namespace} {\n\n`
}
for (const func of functions) {
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
const constKeyword = func.isConst ? ' const' : ''
content += `${func.returnType} ${func.name}${func.params}${constKeyword};\n\n`
}
if (namespace) {
content += `} // namespace ${namespace}\n\n`
}
content += `#endif // ${guard}\n`
return content
}
}

View File

@@ -0,0 +1,30 @@
export type Language = 'typescript' | 'cpp'
export interface FunctionInfo {
name: string
isAsync: boolean
isExported: boolean
params: string
returnType: string
body: string
startLine: number
endLine: number
comments: string[]
isMethod: boolean
isStatic: boolean
isConst: boolean
namespace?: string
className?: string
}
export interface DependencyInfo {
imports: string[]
types: string[]
}
export interface RefactorResult {
success: boolean
originalFile: string
newFiles: string[]
errors: string[]
}

View File

@@ -0,0 +1,219 @@
import * as fs from 'fs/promises'
import * as path from 'path'
import { DependencyInfo, FunctionInfo, RefactorResult } from './types'
type ModuleContext = {
dir: string
basename: string
functions: FunctionInfo[]
functionsDir: string
dependencies: DependencyInfo
result: RefactorResult
}
export class TypeScriptLambdaRefactor {
constructor(private readonly options: { dryRun: boolean; log: (message: string) => void }) {}
getFunctionExtension() {
return '.ts'
}
async extractFunctions(filePath: string): Promise<FunctionInfo[]> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const functions: FunctionInfo[] = []
const functionRegex = /^(export\s+)?(async\s+)?function\s+([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
const methodRegex = /^\s*(public|private|protected)?\s*(static\s+)?(async\s+)?([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
let i = 0
while (i < lines.length) {
const line = lines[i]
const funcMatch = line.match(functionRegex)
const methodMatch = line.match(methodRegex)
if (funcMatch || methodMatch) {
const isMethod = !!methodMatch
const match = funcMatch || methodMatch!
const isExported = funcMatch ? !!match[1] : true
const isStatic = methodMatch ? !!match[2] : false
const isAsync = funcMatch ? !!match[2] : !!match[3]
const name = funcMatch ? match[3] : match[4]
const params = funcMatch ? match[4] : match[5]
const returnType = (funcMatch ? match[5] : match[6]) || ''
const comments: string[] = []
let commentLine = i - 1
while (commentLine >= 0 && (lines[commentLine].trim().startsWith('//') ||
lines[commentLine].trim().startsWith('*') ||
lines[commentLine].trim().startsWith('/*'))) {
comments.unshift(lines[commentLine])
commentLine--
}
let braceCount = 1
let bodyLines: string[] = [line]
let j = i + 1
while (j < lines.length && braceCount > 0) {
bodyLines.push(lines[j])
for (const char of lines[j]) {
if (char === '{') braceCount++
if (char === '}') braceCount--
if (braceCount === 0) break
}
j++
}
functions.push({
name,
isAsync,
isExported,
params,
returnType,
body: bodyLines.join('\n'),
startLine: i,
endLine: j - 1,
comments,
isMethod,
isStatic,
isConst: false,
})
i = j
} else {
i++
}
}
return functions
}
async extractDependencies(filePath: string): Promise<DependencyInfo> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const imports: string[] = []
const types: string[] = []
let inImport = false
let currentImport = ''
for (const line of lines) {
const trimmed = line.trim()
if (trimmed.startsWith('import ') || inImport) {
currentImport += line + '\n'
if (trimmed.includes('}') || (!trimmed.includes('{') && trimmed.endsWith("'"))) {
imports.push(currentImport.trim())
currentImport = ''
inImport = false
} else {
inImport = true
}
}
if (trimmed.startsWith('export type ') || trimmed.startsWith('export interface ') ||
trimmed.startsWith('type ') || trimmed.startsWith('interface ')) {
types.push(line)
}
}
return { imports, types }
}
generateFunctionFile(func: FunctionInfo, imports: string[]): string {
let content = ''
if (imports.length > 0) {
content += imports.join('\n') + '\n\n'
}
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
const asyncKeyword = func.isAsync ? 'async ' : ''
const exportKeyword = 'export '
content += `${exportKeyword}${asyncKeyword}function ${func.name}${func.params}${func.returnType} {\n`
const bodyLines = func.body.split('\n')
const actualBody = bodyLines.slice(1, -1).join('\n')
content += actualBody + '\n'
content += '}\n'
return content
}
async generateModule(context: ModuleContext) {
const { dir, basename, functions, result } = context
const className = basename.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join('') + 'Utils'
const classFilePath = path.join(dir, basename, `${className}.ts`)
const classContent = this.generateClassWrapper(className, functions)
if (!this.options.dryRun) {
await fs.writeFile(classFilePath, classContent, 'utf-8')
}
result.newFiles.push(classFilePath)
this.options.log(`${className}.ts (class wrapper)`)
const indexFilePath = path.join(dir, basename, 'index.ts')
const indexContent = this.generateIndexFile(functions, className)
if (!this.options.dryRun) {
await fs.writeFile(indexFilePath, indexContent, 'utf-8')
}
result.newFiles.push(indexFilePath)
this.options.log(` ✓ index.ts (re-exports)`)
const reexportContent = `// This file has been refactored into modular functions\n` +
`export * from './${basename}'\n`
if (!this.options.dryRun) {
await fs.writeFile(path.join(dir, `${basename}.ts`), reexportContent, 'utf-8')
}
this.options.log(` ✓ Updated ${basename}.ts to re-export`)
}
private generateClassWrapper(className: string, functions: FunctionInfo[]): string {
let content = '// Auto-generated class wrapper\n\n'
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `import { ${func.name} } from './functions/${kebabName}'\n`
}
content += `\nexport class ${className} {\n`
for (const func of functions) {
const asyncKeyword = func.isAsync ? 'async ' : ''
content += ` static ${asyncKeyword}${func.name}(...args: any[]) {\n`
content += ` return ${func.isAsync ? 'await ' : ''}${func.name}(...args)\n`
content += ` }\n\n`
}
content += '}\n'
return content
}
private generateIndexFile(functions: FunctionInfo[], className: string): string {
let content = '// Auto-generated re-exports\n\n'
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `export { ${func.name} } from './functions/${kebabName}'\n`
}
content += `\nexport { ${className} } from './${className}'\n`
return content
}
}

View File

@@ -1,50 +1,28 @@
#!/usr/bin/env tsx
/**
* Multi-Language Lambda Refactoring Tool
*
*
* Supports both TypeScript and C++ refactoring into lambda-per-file structure
*/
import * as fs from 'fs/promises'
import * as path from 'path'
import { exec } from 'child_process'
import { promisify } from 'util'
const execAsync = promisify(exec)
interface FunctionInfo {
name: string
isAsync: boolean
isExported: boolean
params: string
returnType: string
body: string
startLine: number
endLine: number
comments: string[]
isMethod: boolean
isStatic: boolean
isConst: boolean
namespace?: string
className?: string
}
interface RefactorResult {
success: boolean
originalFile: string
newFiles: string[]
errors: string[]
}
type Language = 'typescript' | 'cpp'
import { CppLambdaRefactor } from './languages/cpp-refactor'
import { TypeScriptLambdaRefactor } from './languages/typescript-refactor'
import { DependencyInfo, FunctionInfo, Language, RefactorResult } from './languages/types'
class MultiLanguageLambdaRefactor {
private dryRun: boolean = false
private verbose: boolean = false
private readonly services: Record<Language, { getFunctionExtension: () => string; extractFunctions(filePath: string): Promise<FunctionInfo[]>; extractDependencies(filePath: string): Promise<DependencyInfo>; generateFunctionFile(func: FunctionInfo, imports: string[]): string; generateModule(context: { dir: string; basename: string; functions: FunctionInfo[]; functionsDir: string; dependencies: DependencyInfo; result: RefactorResult }): Promise<void> }>
constructor(options: { dryRun?: boolean; verbose?: boolean } = {}) {
this.dryRun = options.dryRun || false
this.verbose = options.verbose || false
this.services = {
typescript: new TypeScriptLambdaRefactor({ dryRun: this.dryRun, log: this.log.bind(this) }),
cpp: new CppLambdaRefactor({ dryRun: this.dryRun, log: this.log.bind(this) }),
}
}
private log(message: string) {
@@ -53,9 +31,6 @@ class MultiLanguageLambdaRefactor {
}
}
/**
* Detect language from file extension
*/
detectLanguage(filePath: string): Language {
const ext = path.extname(filePath).toLowerCase()
if (ext === '.cpp' || ext === '.cc' || ext === '.cxx' || ext === '.hpp' || ext === '.h') {
@@ -64,336 +39,6 @@ class MultiLanguageLambdaRefactor {
return 'typescript'
}
/**
* Extract functions from TypeScript file
*/
async extractTypeScriptFunctions(filePath: string): Promise<FunctionInfo[]> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const functions: FunctionInfo[] = []
const functionRegex = /^(export\s+)?(async\s+)?function\s+([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
const methodRegex = /^\s*(public|private|protected)?\s*(static\s+)?(async\s+)?([a-zA-Z0-9_]+)\s*(\([^)]*\))(\s*:\s*[^{]+)?\s*\{/
let i = 0
while (i < lines.length) {
const line = lines[i]
const funcMatch = line.match(functionRegex)
const methodMatch = line.match(methodRegex)
if (funcMatch || methodMatch) {
const isMethod = !!methodMatch
const match = funcMatch || methodMatch!
const isExported = funcMatch ? !!match[1] : true
const isStatic = methodMatch ? !!match[2] : false
const isAsync = funcMatch ? !!match[2] : !!match[3]
const name = funcMatch ? match[3] : match[4]
const params = funcMatch ? match[4] : match[5]
const returnType = (funcMatch ? match[5] : match[6]) || ''
const comments: string[] = []
let commentLine = i - 1
while (commentLine >= 0 && (lines[commentLine].trim().startsWith('//') ||
lines[commentLine].trim().startsWith('*') ||
lines[commentLine].trim().startsWith('/*'))) {
comments.unshift(lines[commentLine])
commentLine--
}
let braceCount = 1
let bodyLines: string[] = [line]
let j = i + 1
while (j < lines.length && braceCount > 0) {
bodyLines.push(lines[j])
for (const char of lines[j]) {
if (char === '{') braceCount++
if (char === '}') braceCount--
if (braceCount === 0) break
}
j++
}
functions.push({
name,
isAsync,
isExported,
params,
returnType: returnType.trim(),
body: bodyLines.join('\n'),
startLine: i,
endLine: j - 1,
comments,
isMethod,
isStatic,
isConst: false,
})
i = j
} else {
i++
}
}
return functions
}
/**
* Extract functions from C++ file
*/
async extractCppFunctions(filePath: string): Promise<FunctionInfo[]> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const functions: FunctionInfo[] = []
// Match C++ function definitions
// ReturnType functionName(params) { or ReturnType ClassName::functionName(params) {
const functionRegex = /^([a-zA-Z_][a-zA-Z0-9_:<>*&\s]*?)\s+([a-zA-Z_][a-zA-Z0-9_:]*)\s*(\([^)]*\))\s*(const)?\s*(noexcept)?\s*\{/
let i = 0
let currentNamespace = ''
while (i < lines.length) {
const line = lines[i]
// Track namespace
const namespaceMatch = line.match(/^namespace\s+([a-zA-Z0-9_]+)/)
if (namespaceMatch) {
currentNamespace = namespaceMatch[1]
}
const funcMatch = line.match(functionRegex)
if (funcMatch) {
const returnType = funcMatch[1].trim()
const fullName = funcMatch[2]
const params = funcMatch[3]
const isConst = !!funcMatch[4]
// Parse class name if present (ClassName::methodName)
const nameParts = fullName.split('::')
const name = nameParts[nameParts.length - 1]
const className = nameParts.length > 1 ? nameParts[0] : undefined
const isMethod = !!className
// Collect comments
const comments: string[] = []
let commentLine = i - 1
while (commentLine >= 0 && (lines[commentLine].trim().startsWith('//') ||
lines[commentLine].trim().startsWith('/*') ||
lines[commentLine].trim().startsWith('*'))) {
comments.unshift(lines[commentLine])
commentLine--
}
// Find function body
let braceCount = 1
let bodyLines: string[] = [line]
let j = i + 1
while (j < lines.length && braceCount > 0) {
bodyLines.push(lines[j])
for (const char of lines[j]) {
if (char === '{') braceCount++
if (char === '}') braceCount--
if (braceCount === 0) break
}
j++
}
functions.push({
name,
isAsync: false, // C++ doesn't have async keyword like TS
isExported: true, // In C++, visibility is different
params,
returnType,
body: bodyLines.join('\n'),
startLine: i,
endLine: j - 1,
comments,
isMethod,
isStatic: false,
isConst,
namespace: currentNamespace || undefined,
className,
})
i = j
} else {
i++
}
}
return functions
}
/**
* Extract imports/includes and types
*/
async extractDependencies(filePath: string, language: Language): Promise<{
imports: string[]
types: string[]
}> {
const content = await fs.readFile(filePath, 'utf-8')
const lines = content.split('\n')
const imports: string[] = []
const types: string[] = []
if (language === 'typescript') {
let inImport = false
let currentImport = ''
for (const line of lines) {
const trimmed = line.trim()
if (trimmed.startsWith('import ') || inImport) {
currentImport += line + '\n'
if (trimmed.includes('}') || (!trimmed.includes('{') && trimmed.endsWith("'"))) {
imports.push(currentImport.trim())
currentImport = ''
inImport = false
} else {
inImport = true
}
}
if (trimmed.startsWith('export type ') || trimmed.startsWith('export interface ') ||
trimmed.startsWith('type ') || trimmed.startsWith('interface ')) {
types.push(line)
}
}
} else {
// C++
for (const line of lines) {
const trimmed = line.trim()
// Collect #include statements
if (trimmed.startsWith('#include')) {
imports.push(line)
}
// Collect type definitions (struct, class, using, typedef)
if (trimmed.startsWith('struct ') || trimmed.startsWith('class ') ||
trimmed.startsWith('using ') || trimmed.startsWith('typedef ')) {
types.push(line)
}
}
}
return { imports, types }
}
/**
* Generate TypeScript function file
*/
generateTypeScriptFunctionFile(func: FunctionInfo, imports: string[]): string {
let content = ''
if (imports.length > 0) {
content += imports.join('\n') + '\n\n'
}
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
const asyncKeyword = func.isAsync ? 'async ' : ''
const exportKeyword = 'export '
content += `${exportKeyword}${asyncKeyword}function ${func.name}${func.params}${func.returnType} {\n`
const bodyLines = func.body.split('\n')
const actualBody = bodyLines.slice(1, -1).join('\n')
content += actualBody + '\n'
content += '}\n'
return content
}
/**
* Generate C++ function file (.cpp)
*/
generateCppFunctionFile(func: FunctionInfo, includes: string[]): string {
let content = ''
// Add includes
if (includes.length > 0) {
content += includes.join('\n') + '\n\n'
}
// Add namespace if present
if (func.namespace) {
content += `namespace ${func.namespace} {\n\n`
}
// Add comments
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
// Add function
const constKeyword = func.isConst ? ' const' : ''
content += `${func.returnType} ${func.name}${func.params}${constKeyword} {\n`
const bodyLines = func.body.split('\n')
const actualBody = bodyLines.slice(1, -1).join('\n')
content += actualBody + '\n'
content += '}\n'
if (func.namespace) {
content += `\n} // namespace ${func.namespace}\n`
}
return content
}
/**
* Generate C++ header file (.hpp)
*/
generateCppHeaderFile(functions: FunctionInfo[], includes: string[], basename: string): string {
const guard = `${basename.toUpperCase()}_HPP_INCLUDED`
let content = ''
content += `#ifndef ${guard}\n`
content += `#define ${guard}\n\n`
// Add includes
if (includes.length > 0) {
content += includes.join('\n') + '\n\n'
}
// Determine namespace
const namespace = functions[0]?.namespace
if (namespace) {
content += `namespace ${namespace} {\n\n`
}
// Add function declarations
for (const func of functions) {
if (func.comments.length > 0) {
content += func.comments.join('\n') + '\n'
}
const constKeyword = func.isConst ? ' const' : ''
content += `${func.returnType} ${func.name}${func.params}${constKeyword};\n\n`
}
if (namespace) {
content += `} // namespace ${namespace}\n\n`
}
content += `#endif // ${guard}\n`
return content
}
/**
* Refactor a file (auto-detects language)
*/
async refactorFile(filePath: string): Promise<RefactorResult> {
const result: RefactorResult = {
success: false,
@@ -404,13 +49,11 @@ class MultiLanguageLambdaRefactor {
try {
const language = this.detectLanguage(filePath)
const service = this.services[language]
this.log(`\n🔍 Analyzing ${filePath} (${language})...`)
// Extract functions based on language
const functions = language === 'typescript'
? await this.extractTypeScriptFunctions(filePath)
: await this.extractCppFunctions(filePath)
const functions = await service.extractFunctions(filePath)
if (functions.length === 0) {
result.errors.push('No functions found to extract')
return result
@@ -422,51 +65,39 @@ class MultiLanguageLambdaRefactor {
}
this.log(` Found ${functions.length} functions: ${functions.map(f => f.name).join(', ')}`)
// Extract dependencies
const { imports, types } = await this.extractDependencies(filePath, language)
// Create directories
const dependencies = await service.extractDependencies(filePath)
const dir = path.dirname(filePath)
const ext = path.extname(filePath)
const basename = path.basename(filePath, ext)
const functionsDir = path.join(dir, basename, 'functions')
if (!this.dryRun) {
await fs.mkdir(functionsDir, { recursive: true })
}
this.log(` Creating functions directory: ${functionsDir}`)
// Generate function files
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
const funcExt = language === 'typescript' ? '.ts' : '.cpp'
const funcExt = service.getFunctionExtension()
const funcFilePath = path.join(functionsDir, `${kebabName}${funcExt}`)
const content = language === 'typescript'
? this.generateTypeScriptFunctionFile(func, imports)
: this.generateCppFunctionFile(func, imports)
const content = service.generateFunctionFile(func, dependencies.imports)
if (!this.dryRun) {
await fs.writeFile(funcFilePath, content, 'utf-8')
}
result.newFiles.push(funcFilePath)
this.log(`${kebabName}${funcExt}`)
}
if (language === 'typescript') {
// Generate TypeScript index and class wrapper
await this.generateTypeScriptModule(dir, basename, functions, functionsDir, result)
} else {
// Generate C++ header and module files
await this.generateCppModule(dir, basename, functions, imports, functionsDir, result)
}
await service.generateModule({ dir, basename, functions, functionsDir, dependencies, result })
result.success = true
this.log(` ✅ Successfully refactored into ${result.newFiles.length} files`)
} catch (error) {
result.errors.push(`Error: ${error instanceof Error ? error.message : String(error)}`)
this.log(` ❌ Failed: ${result.errors[0]}`)
@@ -475,112 +106,6 @@ class MultiLanguageLambdaRefactor {
return result
}
private async generateTypeScriptModule(
dir: string,
basename: string,
functions: FunctionInfo[],
functionsDir: string,
result: RefactorResult
) {
// Generate class wrapper
const className = basename.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join('') + 'Utils'
const classFilePath = path.join(dir, basename, `${className}.ts`)
const classContent = this.generateTypeScriptClassWrapper(className, functions)
if (!this.dryRun) {
await fs.writeFile(classFilePath, classContent, 'utf-8')
}
result.newFiles.push(classFilePath)
this.log(`${className}.ts (class wrapper)`)
// Generate index file
const indexFilePath = path.join(dir, basename, 'index.ts')
const indexContent = this.generateTypeScriptIndexFile(functions, className)
if (!this.dryRun) {
await fs.writeFile(indexFilePath, indexContent, 'utf-8')
}
result.newFiles.push(indexFilePath)
this.log(` ✓ index.ts (re-exports)`)
// Update original file
const reexportContent = `// This file has been refactored into modular functions\n` +
`export * from './${basename}'\n`
if (!this.dryRun) {
await fs.writeFile(path.join(dir, `${basename}.ts`), reexportContent, 'utf-8')
}
this.log(` ✓ Updated ${basename}.ts to re-export`)
}
private async generateCppModule(
dir: string,
basename: string,
functions: FunctionInfo[],
includes: string[],
functionsDir: string,
result: RefactorResult
) {
// Generate header file
const headerFilePath = path.join(dir, basename, `${basename}.hpp`)
const headerContent = this.generateCppHeaderFile(functions, includes, basename)
if (!this.dryRun) {
await fs.writeFile(headerFilePath, headerContent, 'utf-8')
}
result.newFiles.push(headerFilePath)
this.log(`${basename}.hpp (header)`)
// Update original file to include the new header
const includeContent = `// This file has been refactored into modular functions\n` +
`#include "${basename}/${basename}.hpp"\n`
if (!this.dryRun) {
await fs.writeFile(path.join(dir, `${basename}.cpp`), includeContent, 'utf-8')
}
this.log(` ✓ Updated ${basename}.cpp to include header`)
}
private generateTypeScriptClassWrapper(className: string, functions: FunctionInfo[]): string {
let content = '// Auto-generated class wrapper\n\n'
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `import { ${func.name} } from './functions/${kebabName}'\n`
}
content += `\nexport class ${className} {\n`
for (const func of functions) {
const asyncKeyword = func.isAsync ? 'async ' : ''
content += ` static ${asyncKeyword}${func.name}(...args: any[]) {\n`
content += ` return ${func.isAsync ? 'await ' : ''}${func.name}(...args)\n`
content += ` }\n\n`
}
content += '}\n'
return content
}
private generateTypeScriptIndexFile(functions: FunctionInfo[], className: string): string {
let content = '// Auto-generated re-exports\n\n'
for (const func of functions) {
const kebabName = func.name.replace(/([A-Z])/g, '-$1').toLowerCase().replace(/^-/, '')
content += `export { ${func.name} } from './functions/${kebabName}'\n`
}
content += `\nexport { ${className} } from './${className}'\n`
return content
}
async bulkRefactor(files: string[]): Promise<RefactorResult[]> {
console.log(`\n📦 Multi-Language Lambda Refactoring`)
console.log(` Mode: ${this.dryRun ? 'DRY RUN' : 'LIVE'}`)
@@ -590,14 +115,14 @@ class MultiLanguageLambdaRefactor {
let successCount = 0
let skipCount = 0
let errorCount = 0
for (let i = 0; i < files.length; i++) {
const file = files[i]
console.log(`[${i + 1}/${files.length}] Processing: ${file}`)
const result = await this.refactorFile(file)
results.push(result)
if (result.success) {
successCount++
} else if (result.errors.some(e => e.includes('skipping'))) {
@@ -605,54 +130,17 @@ class MultiLanguageLambdaRefactor {
} else {
errorCount++
}
await new Promise(resolve => setTimeout(resolve, 100))
}
console.log(`\n📊 Summary:`)
console.log(` ✅ Success: ${successCount}`)
console.log(` ⏭️ Skipped: ${skipCount}`)
console.log(` ❌ Errors: ${errorCount}`)
return results
}
}
// CLI
async function main() {
const args = process.argv.slice(2)
if (args.includes('--help') || args.includes('-h') || args.length === 0) {
console.log('Multi-Language Lambda Refactoring Tool\n')
console.log('Supports: TypeScript (.ts, .tsx) and C++ (.cpp, .hpp, .cc, .h)\n')
console.log('Usage: tsx multi-lang-refactor.ts [options] <file>')
console.log('\nOptions:')
console.log(' -d, --dry-run Preview without writing')
console.log(' -v, --verbose Verbose output')
console.log(' -h, --help Show help')
console.log('\nExamples:')
console.log(' tsx multi-lang-refactor.ts --dry-run src/utils.ts')
console.log(' tsx multi-lang-refactor.ts --verbose dbal/src/adapter.cpp')
process.exit(0)
}
const dryRun = args.includes('--dry-run') || args.includes('-d')
const verbose = args.includes('--verbose') || args.includes('-v')
const files = args.filter(a => !a.startsWith('-'))
if (files.length === 0) {
console.error('Error: Please provide file(s) to refactor')
process.exit(1)
}
const refactor = new MultiLanguageLambdaRefactor({ dryRun, verbose })
await refactor.bulkRefactor(files)
console.log('\n✨ Done!')
}
if (require.main === module) {
main().catch(console.error)
}
export { MultiLanguageLambdaRefactor }