mirror of
https://github.com/johndoe6345789/metabuilder.git
synced 2026-04-25 06:14:59 +00:00
Compare commits
332 Commits
copilot/fi
...
codex/crea
| Author | SHA1 | Date | |
|---|---|---|---|
| 477641a3d8 | |||
| 81f7fa7c81 | |||
| 4a2d1dbb2d | |||
| 04df1bca51 | |||
| b9258d7420 | |||
| f16e2484d1 | |||
| 0197826b57 | |||
| b77ff225c5 | |||
| 1b801cbeaa | |||
| db2ba3d034 | |||
| aad3ea72f3 | |||
| 05c7b2fb66 | |||
| 60818c5be4 | |||
| ef04d37aa6 | |||
| f21d1c7b0f | |||
| e6c8a3ae7d | |||
|
|
1897c5a49a | ||
| a525791172 | |||
|
|
69dab70d7f | ||
| 1f61062313 | |||
| 90cea3567f | |||
| 6f99a2e670 | |||
| d414199f9b | |||
|
|
187811d4c1 | ||
| e038d5db8a | |||
| a700d40b7b | |||
| 61115865c6 | |||
|
|
3ff46f2d6b | ||
| 706583c143 | |||
|
|
3f31970706 | ||
| 6ddea3bdce | |||
|
|
1f236bca36 | ||
| 72e367c209 | |||
|
|
149c0f8715 | ||
|
|
320aa270d4 | ||
|
|
025b7d774f | ||
|
|
01b639b1e0 | ||
|
|
49cfffbb2f | ||
| f86e637da1 | |||
|
|
759ec80a44 | ||
|
|
0bd8c7c2a7 | ||
| 147c503ac8 | |||
|
|
3e0b8de1b6 | ||
|
|
92496b5620 | ||
|
|
9a602cafb6 | ||
| 89e189a288 | |||
| 152f0120fa | |||
|
|
b173afae71 | ||
| 8e713e5ff8 | |||
|
|
7e3851c93b | ||
|
|
6e7e068b12 | ||
|
|
58fe048857 | ||
|
|
1f0eb05b79 | ||
| cddd61f3ef | |||
|
|
85890a9c02 | ||
|
|
eb0289d593 | ||
|
|
9a757fd5df | ||
|
|
bf674e0da4 | ||
|
|
2e2a0f8218 | ||
|
|
97c659673b | ||
| 78e62b4bce | |||
|
|
2ffab4b4ba | ||
|
|
00de4dca23 | ||
| da04432b06 | |||
|
|
5b49332c2f | ||
|
|
208b2ec07a | ||
|
|
1a3ee146c1 | ||
|
|
a4169bd56e | ||
|
|
ba19f0b585 | ||
| f550696332 | |||
|
|
1aa625327b | ||
|
|
add494f26c | ||
| 7fa453b670 | |||
|
|
c7058874e0 | ||
|
|
7538a1b66e | ||
|
|
a3cb9c074f | ||
|
|
c5ebdfe45a | ||
|
|
48bf3bcbc4 | ||
|
|
d81ba627f5 | ||
| 1ad651d453 | |||
|
|
af45e9694d | ||
|
|
df795327f8 | ||
|
|
20e9472bb3 | ||
| a4106eb9d8 | |||
|
|
8623bfc0bd | ||
|
|
84e91569c8 | ||
|
|
73f34d0a9e | ||
|
|
0f76c47f93 | ||
| 015a5c5533 | |||
| 66e8bb09fa | |||
| 130f40cb3d | |||
| 0a6db264fc | |||
| 2f6d54d255 | |||
| df21275872 | |||
| 19fe90cf65 | |||
| 26d41a6ce8 | |||
|
|
a144295709 | ||
|
|
ba3f8c670b | ||
| 0f4754f598 | |||
|
|
ea14a170da | ||
|
|
9246584d4a | ||
|
|
a6e32159af | ||
|
|
d0835f0cd4 | ||
|
|
c128eb02e7 | ||
|
|
7e0b05047e | ||
|
|
96ee74e6ef | ||
|
|
d271cc5643 | ||
| 53e33f07b5 | |||
|
|
d919572357 | ||
|
|
801e446ff2 | ||
|
|
16d01087cb | ||
|
|
307f53d2a2 | ||
|
|
2cd0e9c517 | ||
|
|
12d447ce26 | ||
| cb48605fbd | |||
|
|
7a1b44ba3f | ||
|
|
ce1ec75502 | ||
|
|
1e1870c93c | ||
|
|
726f0bfc7b | ||
|
|
3047d6b881 | ||
|
|
dfefe916c5 | ||
|
|
9667e55324 | ||
|
|
f19d04410d | ||
| 39cf1bacfa | |||
|
|
878f06b8f6 | ||
|
|
b323a14694 | ||
|
|
ab32481bf5 | ||
|
|
b578a8371d | ||
|
|
9f37692079 | ||
|
|
825250b231 | ||
|
|
38a61fbc11 | ||
|
|
544dceba62 | ||
|
|
9c675f70dd | ||
|
|
651083ec72 | ||
|
|
c19174753e | ||
| 306380aa89 | |||
|
|
0a2df8ef35 | ||
|
|
e3d4bb59f7 | ||
|
|
fa4b27a0f8 | ||
|
|
3831e6cca9 | ||
|
|
ec5159b103 | ||
| 8ceff865be | |||
|
|
579da82588 | ||
|
|
2c59bf40f0 | ||
|
|
ee834c1b42 | ||
| 3436c95683 | |||
|
|
d8e60ffb1d | ||
|
|
740058a09c | ||
|
|
32bd4d4a53 | ||
|
|
10bec9ae20 | ||
|
|
38b359ad74 | ||
|
|
bcf93eb773 | ||
|
|
da872d32dd | ||
|
|
5cf8d9d6fd | ||
| 61dc0fb79d | |||
|
|
c68305ed90 | ||
|
|
942b8792d8 | ||
|
|
af2a59ee6a | ||
|
|
28a3ad1d6e | ||
| 73f8470388 | |||
|
|
492f29c48d | ||
|
|
a63c0ece19 | ||
|
|
1fe394f106 | ||
|
|
d0f851a59e | ||
|
|
21db5475b7 | ||
|
|
8608df1d96 | ||
|
|
413392ee69 | ||
| 2eb4141c49 | |||
|
|
258dfa07d7 | ||
|
|
5172de6693 | ||
|
|
00a49e4243 | ||
|
|
7b0dc3963d | ||
| b418fa2203 | |||
| 3d2fc07026 | |||
| 42446ef255 | |||
| 4b9bab67cc | |||
| 76a667f259 | |||
| 9284b9a67b | |||
| 3bb754dd72 | |||
| 016cd662bf | |||
| 7eee87ec90 | |||
| 6d8b23e7a6 | |||
| 445f4f4028 | |||
| 8e5930cd44 | |||
| 693989bba8 | |||
| 92c280b0e6 | |||
| 6c8e7002cd | |||
| 4caf9e2ae9 | |||
| 25908192ef | |||
| 5f74c3b308 | |||
| ae74159fdb | |||
| 8fbb711078 | |||
|
|
5c9a2bc49f | ||
|
|
2127cda63a | ||
|
|
fb2fdcda5b | ||
|
|
bd0164b52f | ||
|
|
16635c5eb7 | ||
|
|
b1124d265b | ||
| cd48a0a809 | |||
|
|
8c2983f5af | ||
|
|
3139b6570c | ||
|
|
016cdde654 | ||
|
|
2c2a7d06d1 | ||
|
|
8e24a1a0fb | ||
| 8fcc36ba69 | |||
|
|
3cd8e2abd3 | ||
|
|
04b6f7de3f | ||
|
|
03b83b1d7d | ||
|
|
7251e6a75e | ||
|
|
b59fb7fa77 | ||
|
|
10b3e9f8dd | ||
| 9c2113157f | |||
|
|
43028cb122 | ||
|
|
3d23c02eb5 | ||
|
|
a061fb3241 | ||
|
|
6ac9949e8a | ||
|
|
fcbc4b0661 | ||
| 173391d98d | |||
|
|
61d0a65c40 | ||
|
|
0f3a2c50c6 | ||
|
|
1b191591d5 | ||
|
|
179c3f9d29 | ||
|
|
1767a1729b | ||
|
|
46c3100a83 | ||
| ea17fa3dbb | |||
|
|
91925dcb19 | ||
|
|
0b424cedb4 | ||
|
|
0548d439d6 | ||
|
|
1b0439d132 | ||
|
|
524360db5f | ||
|
|
7be7449dc2 | ||
| 5e583199a3 | |||
|
|
bd434e2b3e | ||
|
|
be32858ab4 | ||
|
|
8e2153cb19 | ||
|
|
79bed9998d | ||
|
|
0d2908c69e | ||
|
|
189f3ea4d8 | ||
| c398a84496 | |||
|
|
d726e90aee | ||
|
|
6c797e4361 | ||
|
|
e13f4a393d | ||
|
|
1045d55efa | ||
|
|
85f7e2c265 | ||
| ece19fee60 | |||
|
|
7e424a28bb | ||
|
|
e44c480fba | ||
|
|
dfa3003e3e | ||
|
|
10ae52917a | ||
|
|
297f1cacad | ||
|
|
2d9d9bab50 | ||
| dafe170c88 | |||
|
|
00cd3c7ec4 | ||
|
|
1eaa4b5e97 | ||
|
|
7c5f3bbe06 | ||
|
|
7a4cc52e67 | ||
| e272ae9a82 | |||
| 799ab672de | |||
| b67f40d018 | |||
| ab04703287 | |||
| 19a9a8ff06 | |||
| eb11758b77 | |||
| f98d086297 | |||
|
|
978aefdaf2 | ||
|
|
3d824dec79 | ||
|
|
fdaeda09c8 | ||
|
|
cf6d3b6795 | ||
| 8055b2a435 | |||
|
|
b1f7a3126b | ||
|
|
1a5f073e79 | ||
|
|
994e92e882 | ||
|
|
28e253e00d | ||
|
|
09e5c42585 | ||
|
|
4a38a9bd93 | ||
|
|
803b92089b | ||
| 388430b8da | |||
|
|
e8095662b4 | ||
|
|
617354d603 | ||
|
|
97a4d9892f | ||
|
|
77ab4af0cc | ||
|
|
b453ec413d | ||
|
|
81a26a9765 | ||
|
|
edbe567933 | ||
| 3ae263a842 | |||
|
|
8f7a91a13d | ||
|
|
f8de7317f5 | ||
|
|
8ec41f87bd | ||
|
|
5de31cd740 | ||
|
|
1a421ea2da | ||
|
|
b4650d1e91 | ||
|
|
882f9447ef | ||
|
|
182faa602f | ||
| de75196751 | |||
|
|
da683d3ce8 | ||
|
|
ea27223dbe | ||
|
|
ee79514830 | ||
|
|
167e3910df | ||
|
|
5df80c1a98 | ||
|
|
e34a5e3c32 | ||
| a59a046e30 | |||
|
|
e6828b054b | ||
|
|
6bd619309b | ||
|
|
9fa195d653 | ||
|
|
8729a003c6 | ||
|
|
dd4be80edc | ||
| ad2aff6f1c | |||
|
|
b953043daf | ||
|
|
a4766dd2a3 | ||
|
|
ff5f502a0b | ||
|
|
d0d63fc1eb | ||
|
|
4ffcdbc827 | ||
| 7d0a1ad947 | |||
| 41c6948a7a | |||
| 830a90522b | |||
| 9f2e5be282 | |||
| ed74752bad | |||
| edcb319278 | |||
| 6f92d51ffe | |||
| 4fb7de4693 | |||
| 627befdd03 | |||
| a5ad6b4a21 | |||
|
|
48ce25529f | ||
| 9a243a24b4 | |||
|
|
680b5329e2 | ||
|
|
0c3dbe8dfe | ||
|
|
7629520f2c | ||
|
|
3c5986b881 | ||
|
|
8845827297 | ||
|
|
aa822b46fc | ||
|
|
f2899ccfcf | ||
|
|
867142258e | ||
|
|
f996c0eaf6 |
99
.dockerignore
Normal file
99
.dockerignore
Normal file
@@ -0,0 +1,99 @@
|
||||
# Dependencies
|
||||
node_modules
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
# Testing
|
||||
coverage
|
||||
.nyc_output
|
||||
*.test.ts
|
||||
*.test.tsx
|
||||
*.spec.ts
|
||||
*.spec.tsx
|
||||
__tests__
|
||||
__mocks__
|
||||
.vitest
|
||||
|
||||
# Next.js
|
||||
.next
|
||||
out
|
||||
dist
|
||||
build
|
||||
|
||||
# Production
|
||||
/build
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# Debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Local env files
|
||||
.env
|
||||
.env*.local
|
||||
.env.development
|
||||
.env.test
|
||||
.env.production
|
||||
|
||||
# Vercel
|
||||
.vercel
|
||||
|
||||
# TypeScript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
# IDE
|
||||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
.gitattributes
|
||||
|
||||
# Documentation
|
||||
*.md
|
||||
docs
|
||||
README*
|
||||
CHANGELOG*
|
||||
LICENSE
|
||||
|
||||
# CI/CD
|
||||
.github
|
||||
.gitlab-ci.yml
|
||||
azure-pipelines.yml
|
||||
|
||||
# Docker
|
||||
Dockerfile*
|
||||
docker-compose*
|
||||
.dockerignore
|
||||
|
||||
# Development
|
||||
.editorconfig
|
||||
.prettierrc*
|
||||
.eslintrc*
|
||||
.eslintignore
|
||||
|
||||
# Storybook
|
||||
.storybook
|
||||
storybook-static
|
||||
|
||||
# E2E
|
||||
e2e
|
||||
playwright-report
|
||||
test-results
|
||||
|
||||
# Temporary files
|
||||
tmp
|
||||
temp
|
||||
.tmp
|
||||
.cache
|
||||
221
.github/workflows/README.md
vendored
221
.github/workflows/README.md
vendored
@@ -4,35 +4,41 @@ This directory contains automated workflows for CI/CD, code quality, and compreh
|
||||
|
||||
## 🚦 Enterprise Gated Tree Workflow
|
||||
|
||||
MetaBuilder uses an **Enterprise Gated Tree Workflow** that ensures all code changes pass through multiple validation gates before being merged and deployed.
|
||||
MetaBuilder uses a **Unified Enterprise Gated Pipeline** that consolidates all CI/CD, deployment, and development assistance into a single workflow with clear gate visualization.
|
||||
|
||||
**📖 Complete Guide:** [Enterprise Gated Workflow Documentation](../../docs/ENTERPRISE_GATED_WORKFLOW.md)
|
||||
|
||||
### Quick Overview
|
||||
|
||||
All PRs must pass through 5 sequential gates:
|
||||
All PRs and deployments flow through 6 sequential gates in a single workflow:
|
||||
|
||||
1. **Gate 1: Code Quality** - Prisma, TypeScript, Lint, Security
|
||||
2. **Gate 2: Testing** - Unit, E2E, DBAL Daemon tests
|
||||
3. **Gate 3: Build & Package** - Application build, quality metrics
|
||||
4. **Gate 4: Review & Approval** - Human code review (1 approval required)
|
||||
5. **Gate 5: Deployment** - Staging (auto) → Production (manual approval)
|
||||
1. **Gate 1: Code Quality** - Prisma, TypeScript, Lint, Security (7 atomic steps)
|
||||
2. **Gate 2: Testing** - Unit, E2E, DBAL Daemon tests (3 atomic steps)
|
||||
3. **Gate 3: Build & Package** - Application build, quality metrics (2 atomic steps)
|
||||
4. **Gate 4: Development Assistance** - Architectural feedback, Copilot interaction (PR only)
|
||||
5. **Gate 5: Staging Deployment** - Automatic deployment to staging (main branch push)
|
||||
6. **Gate 6: Production Deployment** - Manual approval required (release/workflow_dispatch)
|
||||
|
||||
**Key Benefits:**
|
||||
- ✅ **Single unified workflow** - No confusion about which pipeline runs what
|
||||
- ✅ Sequential gates prevent wasted resources
|
||||
- ✅ Tree structure for clear visualization of all validation steps
|
||||
- ✅ Automatic merge after approval
|
||||
- ✅ Manual approval required for production
|
||||
- ✅ Clear visibility of gate status on PRs
|
||||
- ✅ Audit trail for all deployments
|
||||
- ✅ Conditional execution based on trigger (PR vs push vs release)
|
||||
- ✅ Complete audit trail for all deployments
|
||||
|
||||
### Legacy Workflow Cleanup
|
||||
### Pipeline Consolidation (Jan 2026)
|
||||
|
||||
**Deprecated and Removed (Dec 2025):**
|
||||
- ❌ `ci/ci.yml` - Replaced by `gated-ci.yml` (100% redundant)
|
||||
- ❌ `quality/deployment.yml` - Replaced by `gated-deployment.yml` (100% redundant)
|
||||
**Consolidated into `gated-pipeline.yml`:**
|
||||
- ✅ `gated-ci.yml` (1048 lines) - CI with gates 1-5
|
||||
- ✅ `gated-deployment.yml` (617 lines) - Deployment workflows
|
||||
- ✅ `development.yml` (360 lines) - Development assistance
|
||||
|
||||
**Modified:**
|
||||
- ⚡ `development.yml` - Refactored to remove redundant quality checks, kept unique Copilot features
|
||||
**Result:** Single 1287-line workflow with all functionality preserved and no duplication.
|
||||
|
||||
**Previous Deprecated and Removed (Dec 2025):**
|
||||
- ❌ `ci/ci.yml` - Replaced by gated workflows
|
||||
- ❌ `quality/deployment.yml` - Replaced by gated workflows
|
||||
|
||||
See [Legacy Pipeline Cruft Report](../../docs/LEGACY_PIPELINE_CRUFT_REPORT.md) for analysis.
|
||||
|
||||
@@ -50,9 +56,57 @@ All workflows are designed to work seamlessly with **GitHub Copilot** to assist
|
||||
|
||||
## Workflows Overview
|
||||
|
||||
### 🚦 Enterprise Gated Workflows (New)
|
||||
### 🚦 Enterprise Gated Workflow (Unified)
|
||||
|
||||
#### Issue and PR Triage (`triage.yml`) 🆕
|
||||
#### Enterprise Gated Pipeline (`gated-pipeline.yml`) 🆕
|
||||
**Triggered on:** Push to main/master/develop, Pull requests, Releases, Manual dispatch, Issue comments
|
||||
|
||||
**Consolidates:** All CI/CD, deployment, and development assistance in one workflow
|
||||
|
||||
**Structure:**
|
||||
- **Gate 1:** Code Quality - 7 validation steps
|
||||
- 1.1 Prisma Validation
|
||||
- 1.2 TypeScript Check (+ strict mode analysis)
|
||||
- 1.3 ESLint (+ any-type detection + ts-ignore detection)
|
||||
- 1.4 Security Scan (+ dependency audit)
|
||||
- 1.5 File Size Check
|
||||
- 1.6 Code Complexity Analysis
|
||||
- 1.7 Stub Implementation Detection
|
||||
- **Gate 2:** Testing - 3 validation steps
|
||||
- 2.1 Unit Tests (+ coverage analysis)
|
||||
- 2.2 E2E Tests
|
||||
- 2.3 DBAL Daemon Tests
|
||||
- **Gate 3:** Build & Package - 2 validation steps
|
||||
- 3.1 Application Build (+ bundle analysis)
|
||||
- 3.2 Quality Metrics (PR only)
|
||||
- **Gate 4:** Development Assistance (PR only)
|
||||
- 4.1 Code metrics analysis
|
||||
- 4.2 Architectural compliance
|
||||
- 4.3 Refactoring suggestions
|
||||
- 4.4 Copilot interaction handler
|
||||
- **Gate 5:** Staging Deployment (main branch push)
|
||||
- Automatic deployment to staging environment
|
||||
- Smoke tests and health checks
|
||||
- **Gate 6:** Production Deployment (release/manual)
|
||||
- Manual approval gate
|
||||
- Production deployment with health monitoring
|
||||
- Deployment tracking issue creation
|
||||
|
||||
**Features:**
|
||||
- Individual validation steps for superior visualization
|
||||
- **Gate artifacts** persisted between steps (30-day retention)
|
||||
- Conditional execution based on trigger type
|
||||
- Granular failure detection
|
||||
- Parallel execution within gates
|
||||
- Complete audit trail with JSON artifacts
|
||||
- Individual step timing and status
|
||||
- Sequential gate execution for efficiency
|
||||
- Clear gate status reporting on PRs
|
||||
- Summary report with all gate results
|
||||
|
||||
### 🔄 Supporting Workflows
|
||||
|
||||
#### Issue and PR Triage (`triage.yml`)
|
||||
**Triggered on:** Issues (opened/edited/reopened) and Pull Requests (opened/reopened/synchronize/edited)
|
||||
|
||||
**Purpose:** Quickly categorize inbound work so reviewers know what to look at first.
|
||||
@@ -61,97 +115,21 @@ All workflows are designed to work seamlessly with **GitHub Copilot** to assist
|
||||
- Sets a default priority and highlights beginner-friendly issues
|
||||
- Flags missing information (repro steps, expected/actual results, versions) with a checklist comment
|
||||
- For PRs, labels areas touched, estimates risk based on change size and critical paths, and prompts for test plans/screenshots/linked issues
|
||||
- Mentions **@copilot** to sanity-check the triage with GitHub-native AI (no external Codex webhooks)
|
||||
- Mentions **@copilot** to sanity-check the triage with GitHub-native AI
|
||||
|
||||
This workflow runs alongside the existing PR management jobs to keep triage lightweight while preserving the richer checks in the gated pipelines.
|
||||
This workflow runs alongside the gated pipeline to provide quick triage feedback.
|
||||
|
||||
#### 1. Enterprise Gated CI/CD Pipeline (`gated-ci.yml`)
|
||||
**Triggered on:** Push to main/master/develop branches, Pull requests
|
||||
### 🗑️ Legacy Workflows (Removed)
|
||||
|
||||
**Structure:**
|
||||
- **Gate 1:** Code Quality (Prisma, TypeScript, Lint, Security)
|
||||
- **Gate 2:** Testing (Unit, E2E, DBAL Daemon)
|
||||
- **Gate 3:** Build & Package (Build, Quality Metrics)
|
||||
- **Gate 4:** Review & Approval (Human review required)
|
||||
|
||||
**Features:**
|
||||
- Sequential gate execution for efficiency
|
||||
- Clear gate status reporting on PRs
|
||||
- Automatic progression through gates
|
||||
- Summary report with all gate results
|
||||
|
||||
**Best for:** Small to medium teams, straightforward workflows
|
||||
|
||||
#### 1a. Enterprise Gated CI/CD Pipeline - Atomic (`gated-ci-atomic.yml`) 🆕
|
||||
**Triggered on:** Push to main/master/develop branches, Pull requests
|
||||
|
||||
**Structure:**
|
||||
- **Gate 1:** Code Quality - 7 atomic steps
|
||||
- 1.1 Prisma Validation
|
||||
- 1.2 TypeScript Check (+ strict mode analysis)
|
||||
- 1.3 ESLint (+ any-type detection + ts-ignore detection)
|
||||
- 1.4 Security Scan (+ dependency audit)
|
||||
- 1.5 File Size Check
|
||||
- 1.6 Code Complexity Analysis
|
||||
- 1.7 Stub Implementation Detection
|
||||
- **Gate 2:** Testing - 3 atomic steps
|
||||
- 2.1 Unit Tests (+ coverage analysis)
|
||||
- 2.2 E2E Tests
|
||||
- 2.3 DBAL Daemon Tests
|
||||
- **Gate 3:** Build & Package - 2 atomic steps
|
||||
- 3.1 Application Build (+ bundle analysis)
|
||||
- 3.2 Quality Metrics
|
||||
- **Gate 4:** Review & Approval (Human review required)
|
||||
|
||||
**Features:**
|
||||
- **Atomic validation steps** for superior visualization
|
||||
- Each tool from `/tools` runs as separate job
|
||||
- **Gate artifacts** persisted between steps (30-day retention)
|
||||
- Granular failure detection
|
||||
- Parallel execution within gates
|
||||
- Complete audit trail with JSON artifacts
|
||||
- Individual step timing and status
|
||||
|
||||
**Best for:** Large teams, enterprise compliance, audit requirements
|
||||
|
||||
**Documentation:** See [Atomic Gated Workflow Architecture](../../docs/ATOMIC_GATED_WORKFLOW.md)
|
||||
|
||||
#### 2. Enterprise Gated Deployment (`gated-deployment.yml`)
|
||||
**Triggered on:** Push to main/master, Releases, Manual workflow dispatch
|
||||
|
||||
**Environments:**
|
||||
- **Staging:** Automatic deployment after merge to main
|
||||
- **Production:** Manual approval required
|
||||
|
||||
**Features:**
|
||||
- Pre-deployment validation (schema, security, size)
|
||||
- Breaking change detection and warnings
|
||||
- Environment-specific deployment paths
|
||||
- Post-deployment health checks
|
||||
- Automatic deployment tracking issues
|
||||
- Rollback preparation and procedures
|
||||
|
||||
**Gate 5:** Deployment gate ensures only reviewed code reaches production
|
||||
|
||||
### 🔄 Legacy Workflows (Still Active)
|
||||
|
||||
#### 3. CI/CD Workflow (`ci/ci.yml`) - ❌ REMOVED
|
||||
#### CI/CD Workflow (`ci/ci.yml`) - ❌ REMOVED
|
||||
**Status:** Deprecated and removed (Dec 2025)
|
||||
**Reason:** 100% functionality superseded by `gated-ci.yml`
|
||||
**Reason:** 100% functionality superseded by gated pipeline
|
||||
|
||||
**Jobs:** ~~Prisma Check, Lint, Build, E2E Tests, Quality Check~~
|
||||
|
||||
**Replacement:** Use `gated-ci.yml` for all CI/CD operations
|
||||
**Triggered on:** Push to main/master/develop branches, Pull requests
|
||||
**Replacement:** Consolidated into `gated-pipeline.yml`
|
||||
|
||||
**Jobs:**
|
||||
- **Prisma Check**: Validates database schema and generates Prisma client
|
||||
- **Lint**: Runs ESLint to check code quality
|
||||
- **Build**: Builds the application and uploads artifacts
|
||||
- **E2E Tests**: Runs Playwright end-to-end tests
|
||||
- **Quality Check**: Checks for console.log statements and TODO comments
|
||||
|
||||
### 4. Automated Code Review (`code-review.yml`)
|
||||
### 3. Automated Code Review (`pr/code-review.yml`)
|
||||
**Triggered on:** Pull request opened, synchronized, or reopened
|
||||
|
||||
**Features:**
|
||||
@@ -168,21 +146,21 @@ This workflow runs alongside the existing PR management jobs to keep triage ligh
|
||||
- ✅ React best practices
|
||||
- ✅ File size warnings
|
||||
|
||||
### 5. Auto Merge (`auto-merge.yml`) - Updated for Gated Workflow
|
||||
### 4. Auto Merge (`pr/auto-merge.yml`) - Updated for Gated Pipeline
|
||||
**Triggered on:** PR approval, CI workflow completion
|
||||
|
||||
**Features:**
|
||||
- Automatically merges PRs when:
|
||||
- PR is approved by reviewers
|
||||
- All gates pass (supports both gated and legacy CI checks)
|
||||
- All gates pass in unified gated pipeline
|
||||
- No merge conflicts
|
||||
- PR is not in draft
|
||||
- **Automatically deletes the branch** after successful merge
|
||||
- Uses squash merge strategy
|
||||
- Posts comments about merge status
|
||||
- **Updated:** Now supports Enterprise Gated CI/CD Pipeline checks
|
||||
- **Updated:** Now supports unified Enterprise Gated Pipeline checks
|
||||
|
||||
### 6. Issue Triage (`issue-triage.yml`)
|
||||
### 5. Issue Triage (`issue-triage.yml`)
|
||||
**Triggered on:** New issues opened, issues labeled
|
||||
|
||||
**Features:**
|
||||
@@ -194,7 +172,7 @@ This workflow runs alongside the existing PR management jobs to keep triage ligh
|
||||
- Suggests automated fix attempts for simple issues
|
||||
- Can create fix branches automatically with `create-pr` label
|
||||
|
||||
### 7. PR Management (`pr-management.yml`)
|
||||
### 6. PR Management (`pr/pr-management.yml`)
|
||||
**Triggered on:** PR opened, synchronized, labeled
|
||||
|
||||
**Features:**
|
||||
@@ -206,7 +184,7 @@ This workflow runs alongside the existing PR management jobs to keep triage ligh
|
||||
- Links related issues automatically
|
||||
- Posts comments on related issues
|
||||
|
||||
### 8. Merge Conflict Check (`merge-conflict-check.yml`)
|
||||
### 7. Merge Conflict Check (`pr/merge-conflict-check.yml`)
|
||||
**Triggered on:** PR opened/synchronized, push to main/master
|
||||
|
||||
**Features:**
|
||||
@@ -215,7 +193,7 @@ This workflow runs alongside the existing PR management jobs to keep triage ligh
|
||||
- Adds/removes `merge-conflict` label
|
||||
- Fails CI if conflicts exist
|
||||
|
||||
### 9. Planning & Design (`planning.yml`) 🆕
|
||||
### 8. Planning & Design (`quality/planning.yml`) 🆕
|
||||
**Triggered on:** Issues opened or labeled with enhancement/feature-request
|
||||
|
||||
**Features:**
|
||||
@@ -229,28 +207,7 @@ This workflow runs alongside the existing PR management jobs to keep triage ligh
|
||||
|
||||
**SDLC Phase:** Planning & Design
|
||||
|
||||
### 10. Development Assistance (`development.yml`) 🆕 - Refactored
|
||||
**Triggered on:** Pull request updates, @copilot mentions
|
||||
|
||||
**Features:**
|
||||
- **Architectural Compliance Feedback**: Monitors declarative ratio and component sizes
|
||||
- **@copilot Interaction Handler**: Responds to @copilot mentions with context-aware guidance
|
||||
- **Refactoring Suggestions**: Identifies opportunities for improvement
|
||||
- Provides architectural reminders and best practices
|
||||
|
||||
**Note:** Refactored to remove redundant quality checks (lint/build now in gated-ci.yml)
|
||||
|
||||
**SDLC Phase:** Development
|
||||
|
||||
### 11. Deployment & Monitoring (`deployment.yml`) - ❌ REMOVED
|
||||
**Status:** Deprecated and removed (Dec 2025)
|
||||
**Reason:** 100% functionality superseded by `gated-deployment.yml` with improvements
|
||||
|
||||
**Jobs:** ~~Pre-Deployment Validation, Deployment Summary, Post-Deployment Health Checks~~
|
||||
|
||||
**Replacement:** Use `gated-deployment.yml` for all deployment operations
|
||||
|
||||
### 12. Code Size Limits (`size-limits.yml`)
|
||||
### 9. Code Size Limits (`quality/size-limits.yml`)
|
||||
**Triggered on:** Pull requests, pushes to main (when source files change)
|
||||
|
||||
**Features:**
|
||||
@@ -264,11 +221,11 @@ This workflow runs alongside the existing PR management jobs to keep triage ligh
|
||||
|
||||
```
|
||||
┌─────────────┐
|
||||
│ Planning │ ← planning.yml (Architecture Review, PRD Check)
|
||||
│ Planning │ ← quality/planning.yml (Architecture Review, PRD Check)
|
||||
└──────┬──────┘
|
||||
↓
|
||||
┌─────────────┐
|
||||
│ Development │ ← development.yml (Quality Feedback, Refactoring)
|
||||
│ Development │ ← gated-pipeline.yml Gate 4 (Dev Feedback, Copilot)
|
||||
└──────┬──────┘
|
||||
↓
|
||||
┌─────────────┐
|
||||
|
||||
139
.github/workflows/container-build.yml
vendored
Normal file
139
.github/workflows/container-build.yml
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
name: Build and Push GHCR Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- image: nextjs-app
|
||||
context: .
|
||||
dockerfile: ./frontends/nextjs/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- image: dbal-daemon
|
||||
context: ./dbal/production
|
||||
dockerfile: ./dbal/production/build-config/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=sha,prefix={{branch}}-
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
BUILD_DATE=${{ github.event.head_commit.timestamp }}
|
||||
VCS_REF=${{ github.sha }}
|
||||
VERSION=${{ steps.meta.outputs.version }}
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/attest-build-provenance@v2
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
|
||||
subject-digest: ${{ steps.build.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
||||
security-scan:
|
||||
name: Security Scan Images
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-and-push
|
||||
if: github.event_name != 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
image: [nextjs-app, dbal-daemon]
|
||||
steps:
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@master
|
||||
with:
|
||||
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results-${{ matrix.image }}.sarif'
|
||||
|
||||
- name: Upload Trivy results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: 'trivy-results-${{ matrix.image }}.sarif'
|
||||
category: container-${{ matrix.image }}
|
||||
|
||||
publish-manifest:
|
||||
name: Create Multi-Arch Manifest
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-and-push
|
||||
if: github.event_name != 'pull_request'
|
||||
steps:
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create and push manifest for all images
|
||||
run: |
|
||||
for image in nextjs-app dbal-daemon; do
|
||||
docker manifest create \
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/$image:${{ github.ref_name }} \
|
||||
--amend ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/$image:${{ github.ref_name }}-amd64 \
|
||||
--amend ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/$image:${{ github.ref_name }}-arm64
|
||||
docker manifest push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/$image:${{ github.ref_name }}
|
||||
done
|
||||
360
.github/workflows/development.yml
vendored
360
.github/workflows/development.yml
vendored
@@ -1,360 +0,0 @@
|
||||
name: Development Assistance
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, ready_for_review]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
code-quality-feedback:
|
||||
name: Continuous Quality Feedback
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'pull_request' && !github.event.pull_request.draft
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Analyze code metrics (no redundant checks)
|
||||
id: quality
|
||||
run: |
|
||||
# Note: Lint/build/tests are handled by gated-ci.yml
|
||||
# This job only collects metrics for architectural feedback
|
||||
|
||||
# Count TypeScript files and their sizes
|
||||
TOTAL_TS_FILES=$(find src -name "*.ts" -o -name "*.tsx" 2>/dev/null | wc -l)
|
||||
LARGE_FILES=$(find src -name "*.ts" -o -name "*.tsx" -exec wc -l {} \; 2>/dev/null | awk '$1 > 150 {print $2}' | wc -l)
|
||||
|
||||
echo "total_ts_files=$TOTAL_TS_FILES" >> $GITHUB_OUTPUT
|
||||
echo "large_files=$LARGE_FILES" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check for declarative vs imperative balance
|
||||
JSON_FILES=$(find src packages -name "*.json" 2>/dev/null | wc -l)
|
||||
LUA_SCRIPTS=$(find src packages -name "*.lua" 2>/dev/null | wc -l)
|
||||
|
||||
echo "json_files=$JSON_FILES" >> $GITHUB_OUTPUT
|
||||
echo "lua_scripts=$LUA_SCRIPTS" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check architectural compliance
|
||||
id: architecture
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
let issues = [];
|
||||
let suggestions = [];
|
||||
|
||||
// Get changed files
|
||||
let changedFiles = [];
|
||||
if (context.eventName === 'pull_request') {
|
||||
const { data: files } = await github.rest.pulls.listFiles({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
});
|
||||
changedFiles = files.map(f => f.filename);
|
||||
}
|
||||
|
||||
// Check for hardcoded components outside ui/
|
||||
const hardcodedComponents = changedFiles.filter(f =>
|
||||
f.endsWith('.tsx') &&
|
||||
f.includes('src/components/') &&
|
||||
!f.includes('src/components/ui/') &&
|
||||
!f.includes('src/components/shared/') &&
|
||||
!['RenderComponent', 'FieldRenderer', 'GenericPage'].some(g => f.includes(g))
|
||||
);
|
||||
|
||||
if (hardcodedComponents.length > 0) {
|
||||
suggestions.push(`Consider if these components could be declarative: ${hardcodedComponents.join(', ')}`);
|
||||
}
|
||||
|
||||
// Check for database changes without seed data
|
||||
const schemaChanged = changedFiles.some(f => f.includes('schema.prisma'));
|
||||
const seedChanged = changedFiles.some(f => f.includes('seed'));
|
||||
|
||||
if (schemaChanged && !seedChanged) {
|
||||
suggestions.push('Database schema changed but no seed data updates detected. Consider updating seed data.');
|
||||
}
|
||||
|
||||
// Check for new routes without PageRoutes table updates
|
||||
const routeFiles = changedFiles.filter(f => f.includes('Route') || f.includes('route'));
|
||||
if (routeFiles.length > 0) {
|
||||
suggestions.push('Route changes detected. Ensure PageRoutes table is updated for dynamic routing.');
|
||||
}
|
||||
|
||||
// Check for large TypeScript files
|
||||
const largeFiles = parseInt('${{ steps.quality.outputs.large_files }}');
|
||||
if (largeFiles > 0) {
|
||||
issues.push(`${largeFiles} TypeScript files exceed 150 lines. Consider breaking them into smaller components.`);
|
||||
}
|
||||
|
||||
return { issues, suggestions };
|
||||
|
||||
- name: Provide development feedback
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const analysis = JSON.parse('${{ steps.architecture.outputs.result }}');
|
||||
const totalFiles = parseInt('${{ steps.quality.outputs.total_ts_files }}');
|
||||
const largeFiles = parseInt('${{ steps.quality.outputs.large_files }}');
|
||||
const jsonFiles = parseInt('${{ steps.quality.outputs.json_files }}');
|
||||
const luaScripts = parseInt('${{ steps.quality.outputs.lua_scripts }}');
|
||||
|
||||
let comment = `## 💻 Development Quality Feedback\n\n`;
|
||||
|
||||
comment += `### 📊 Code Metrics\n\n`;
|
||||
comment += `- TypeScript files: ${totalFiles}\n`;
|
||||
comment += `- Files >150 LOC: ${largeFiles} ${largeFiles > 0 ? '⚠️' : '✅'}\n`;
|
||||
comment += `- JSON config files: ${jsonFiles}\n`;
|
||||
comment += `- Lua scripts: ${luaScripts}\n`;
|
||||
comment += `- Declarative ratio: ${((jsonFiles + luaScripts) / Math.max(totalFiles, 1) * 100).toFixed(1)}%\n\n`;
|
||||
|
||||
if (analysis.issues.length > 0) {
|
||||
comment += `### ⚠️ Architectural Issues\n\n`;
|
||||
analysis.issues.forEach(issue => comment += `- ${issue}\n`);
|
||||
comment += '\n';
|
||||
}
|
||||
|
||||
if (analysis.suggestions.length > 0) {
|
||||
comment += `### 💡 Suggestions\n\n`;
|
||||
analysis.suggestions.forEach(suggestion => comment += `- ${suggestion}\n`);
|
||||
comment += '\n';
|
||||
}
|
||||
|
||||
comment += `### 🎯 Project Goals Reminder\n\n`;
|
||||
comment += `- **Declarative First:** Prefer JSON + Lua over TypeScript\n`;
|
||||
comment += `- **Component Size:** Keep files under 150 LOC\n`;
|
||||
comment += `- **Generic Renderers:** Use RenderComponent for dynamic components\n`;
|
||||
comment += `- **Database-Driven:** Store configuration in database, not code\n`;
|
||||
comment += `- **Package-Based:** Organize features as importable packages\n\n`;
|
||||
|
||||
comment += `**@copilot** can help refactor code to better align with these principles.\n\n`;
|
||||
comment += `📖 See [Architecture Guidelines](/.github/copilot-instructions.md)`;
|
||||
|
||||
// Check if we already commented
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const botComment = comments.find(c =>
|
||||
c.user.type === 'Bot' && c.body.includes('Development Quality Feedback')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: comment
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: comment
|
||||
});
|
||||
}
|
||||
|
||||
copilot-interaction:
|
||||
name: Handle Copilot Mentions
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'issue_comment' &&
|
||||
contains(github.event.comment.body, '@copilot')
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Parse Copilot request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const comment = context.payload.comment.body.toLowerCase();
|
||||
const issue = context.payload.issue;
|
||||
|
||||
let response = `## 🤖 Copilot Assistance\n\n`;
|
||||
|
||||
// Determine what the user is asking for
|
||||
if (comment.includes('implement') || comment.includes('fix this')) {
|
||||
response += `To implement this with Copilot assistance:\n\n`;
|
||||
response += `1. **Create a branch:** \`git checkout -b feature/issue-${issue.number}\`\n`;
|
||||
response += `2. **Use Copilot in your IDE** to generate code with context from:\n`;
|
||||
response += ` - [Copilot Instructions](/.github/copilot-instructions.md)\n`;
|
||||
response += ` - [PRD.md](/PRD.md)\n`;
|
||||
response += ` - Existing package structure in \`/packages/\`\n`;
|
||||
response += `3. **Follow the architectural principles:**\n`;
|
||||
response += ` - Declarative over imperative\n`;
|
||||
response += ` - Database-driven configuration\n`;
|
||||
response += ` - Generic renderers vs hardcoded components\n`;
|
||||
response += `4. **Test your changes:** \`npm run lint && npm run test:e2e\`\n`;
|
||||
response += `5. **Create a PR** - The automated workflows will review it\n\n`;
|
||||
}
|
||||
|
||||
if (comment.includes('review') || comment.includes('check')) {
|
||||
response += `Copilot can review this through:\n\n`;
|
||||
response += `- **Automated Code Review** workflow (runs on PRs)\n`;
|
||||
response += `- **Development Assistance** workflow (runs on pushes)\n`;
|
||||
response += `- **Planning & Design** workflow (runs on feature requests)\n\n`;
|
||||
response += `Create a PR to trigger comprehensive review!\n\n`;
|
||||
}
|
||||
|
||||
if (comment.includes('architecture') || comment.includes('design')) {
|
||||
response += `### 🏗️ Architectural Guidance\n\n`;
|
||||
response += `MetaBuilder follows these principles:\n\n`;
|
||||
response += `1. **5-Level Architecture:** User → Admin → God → SuperGod levels\n`;
|
||||
response += `2. **Multi-Tenant:** Isolated tenant instances with independent configs\n`;
|
||||
response += `3. **Declarative Components:** JSON config + Lua scripts, not TSX\n`;
|
||||
response += `4. **Package System:** Self-contained, importable feature bundles\n`;
|
||||
response += `5. **Database-First:** All config in Prisma, not hardcoded\n\n`;
|
||||
response += `📖 Full details: [PRD.md](/PRD.md)\n\n`;
|
||||
}
|
||||
|
||||
if (comment.includes('test') || comment.includes('e2e')) {
|
||||
response += `### 🧪 Testing with Copilot\n\n`;
|
||||
response += `\`\`\`bash\n`;
|
||||
response += `# Run E2E tests\n`;
|
||||
response += `npm run test:e2e\n\n`;
|
||||
response += `# Run with UI\n`;
|
||||
response += `npm run test:e2e:ui\n\n`;
|
||||
response += `# Run linter\n`;
|
||||
response += `npm run lint\n`;
|
||||
response += `\`\`\`\n\n`;
|
||||
response += `Use Copilot in your IDE to:\n`;
|
||||
response += `- Generate test cases based on user stories\n`;
|
||||
response += `- Write Playwright selectors and assertions\n`;
|
||||
response += `- Create mock data for tests\n\n`;
|
||||
}
|
||||
|
||||
if (comment.includes('help') || (!comment.includes('implement') && !comment.includes('review') && !comment.includes('architecture') && !comment.includes('test'))) {
|
||||
response += `### 🆘 How to Use Copilot\n\n`;
|
||||
response += `Mention **@copilot** in comments with:\n\n`;
|
||||
response += `- \`@copilot implement this\` - Get implementation guidance\n`;
|
||||
response += `- \`@copilot review this\` - Request code review\n`;
|
||||
response += `- \`@copilot architecture\` - Get architectural guidance\n`;
|
||||
response += `- \`@copilot test this\` - Get testing guidance\n`;
|
||||
response += `- \`@copilot fix this issue\` - Request automated fix\n\n`;
|
||||
response += `**In your IDE:**\n`;
|
||||
response += `- Use GitHub Copilot with context from [Copilot Instructions](/.github/copilot-instructions.md)\n`;
|
||||
response += `- Reference the [PRD](/PRD.md) when prompting\n`;
|
||||
response += `- Follow patterns from existing packages in \`/packages/\`\n\n`;
|
||||
}
|
||||
|
||||
response += `---\n`;
|
||||
response += `*This is an automated response. For detailed Copilot assistance, use the extension in your IDE with project context.*`;
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issue.number,
|
||||
body: response
|
||||
});
|
||||
|
||||
suggest-refactoring:
|
||||
name: Suggest Refactoring Opportunities
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Analyze refactoring opportunities
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const { data: files } = await github.rest.pulls.listFiles({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
});
|
||||
|
||||
let opportunities = [];
|
||||
|
||||
// Look for opportunities in changed files
|
||||
for (const file of files) {
|
||||
const patch = file.patch || '';
|
||||
|
||||
// Check for repeated code patterns
|
||||
if (patch.split('\n').length > 100) {
|
||||
opportunities.push({
|
||||
file: file.filename,
|
||||
type: 'Size',
|
||||
suggestion: 'Large changeset - consider breaking into smaller PRs or extracting common utilities'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for hardcoded values
|
||||
if (patch.match(/['"][A-Z_]{3,}['"]\s*:/)) {
|
||||
opportunities.push({
|
||||
file: file.filename,
|
||||
type: 'Configuration',
|
||||
suggestion: 'Hardcoded constants detected - consider moving to database configuration'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for new TSX components
|
||||
if (file.filename.includes('components/') && file.filename.endsWith('.tsx') && file.status === 'added') {
|
||||
opportunities.push({
|
||||
file: file.filename,
|
||||
type: 'Architecture',
|
||||
suggestion: 'New component added - could this be implemented declaratively with JSON + Lua?'
|
||||
});
|
||||
}
|
||||
|
||||
// Check for inline styles or complex class strings
|
||||
if (patch.includes('style={{') || patch.match(/className="[^"]{50,}"/)) {
|
||||
opportunities.push({
|
||||
file: file.filename,
|
||||
type: 'Styling',
|
||||
suggestion: 'Complex styling detected - consider extracting to theme configuration'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (opportunities.length > 0) {
|
||||
let comment = `## 🔄 Refactoring Opportunities\n\n`;
|
||||
comment += `**@copilot** identified potential improvements:\n\n`;
|
||||
|
||||
const grouped = {};
|
||||
opportunities.forEach(opp => {
|
||||
if (!grouped[opp.type]) grouped[opp.type] = [];
|
||||
grouped[opp.type].push(opp);
|
||||
});
|
||||
|
||||
for (const [type, opps] of Object.entries(grouped)) {
|
||||
comment += `### ${type}\n\n`;
|
||||
opps.forEach(opp => {
|
||||
comment += `- **${opp.file}**: ${opp.suggestion}\n`;
|
||||
});
|
||||
comment += '\n';
|
||||
}
|
||||
|
||||
comment += `---\n`;
|
||||
comment += `These are suggestions, not requirements. Consider them as part of continuous improvement.\n\n`;
|
||||
comment += `Use **@copilot** in your IDE to help implement these refactorings.`;
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: comment
|
||||
});
|
||||
}
|
||||
610
.github/workflows/gated-ci.yml
vendored
610
.github/workflows/gated-ci.yml
vendored
@@ -1,610 +0,0 @@
|
||||
name: Enterprise Gated CI/CD Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, master, develop ]
|
||||
pull_request:
|
||||
branches: [ main, master, develop ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
checks: write
|
||||
statuses: write
|
||||
|
||||
# Enterprise Gated Tree Workflow
|
||||
# Changes must pass through 5 gates before merge:
|
||||
# Gate 1: Code Quality (lint, typecheck, security)
|
||||
# Gate 2: Testing (unit, E2E)
|
||||
# Gate 3: Build & Package
|
||||
# Gate 4: Review & Approval
|
||||
# Gate 5: Deployment (staging → production with manual approval)
|
||||
|
||||
jobs:
|
||||
# ============================================================================
|
||||
# GATE 1: Code Quality Gates
|
||||
# ============================================================================
|
||||
|
||||
gate-1-start:
|
||||
name: "Gate 1: Code Quality - Starting"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Gate 1 checkpoint
|
||||
run: |
|
||||
echo "🚦 GATE 1: CODE QUALITY VALIDATION"
|
||||
echo "================================================"
|
||||
echo "Running: Prisma validation, TypeScript check, Linting, Security scan"
|
||||
echo "Status: IN PROGRESS"
|
||||
|
||||
prisma-check:
|
||||
name: "Gate 1.1: Validate Prisma Schema"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-1-start
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Validate Prisma Schema
|
||||
run: npx prisma validate --schema=../../prisma/schema.prisma
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
typecheck:
|
||||
name: "Gate 1.2: TypeScript Type Check"
|
||||
runs-on: ubuntu-latest
|
||||
needs: prisma-check
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Run TypeScript type check
|
||||
run: npm run typecheck
|
||||
|
||||
lint:
|
||||
name: "Gate 1.3: Lint Code"
|
||||
runs-on: ubuntu-latest
|
||||
needs: prisma-check
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Run ESLint
|
||||
run: npm run lint
|
||||
|
||||
security-scan:
|
||||
name: "Gate 1.4: Security Scan"
|
||||
runs-on: ubuntu-latest
|
||||
needs: prisma-check
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Run security audit
|
||||
run: npm audit --audit-level=moderate
|
||||
continue-on-error: true
|
||||
|
||||
- name: Check for vulnerable dependencies
|
||||
run: |
|
||||
echo "Checking for known vulnerabilities..."
|
||||
npm audit --json > audit-results.json 2>&1 || true
|
||||
if [ -f audit-results.json ]; then
|
||||
echo "Security audit completed"
|
||||
fi
|
||||
|
||||
gate-1-complete:
|
||||
name: "Gate 1: Code Quality - Passed ✅"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prisma-check, typecheck, lint, security-scan]
|
||||
steps:
|
||||
- name: Gate 1 passed
|
||||
run: |
|
||||
echo "✅ GATE 1 PASSED: CODE QUALITY"
|
||||
echo "================================================"
|
||||
echo "✓ Prisma schema validated"
|
||||
echo "✓ TypeScript types checked"
|
||||
echo "✓ Code linted"
|
||||
echo "✓ Security scan completed"
|
||||
echo ""
|
||||
echo "Proceeding to Gate 2: Testing..."
|
||||
|
||||
# ============================================================================
|
||||
# GATE 2: Testing Gates
|
||||
# ============================================================================
|
||||
|
||||
gate-2-start:
|
||||
name: "Gate 2: Testing - Starting"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-1-complete
|
||||
steps:
|
||||
- name: Gate 2 checkpoint
|
||||
run: |
|
||||
echo "🚦 GATE 2: TESTING VALIDATION"
|
||||
echo "================================================"
|
||||
echo "Running: Unit tests, E2E tests, DBAL daemon tests"
|
||||
echo "Status: IN PROGRESS"
|
||||
|
||||
test-unit:
|
||||
name: "Gate 2.1: Unit Tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-2-start
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Run unit tests
|
||||
run: npm run test:unit
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Upload coverage report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: coverage-report
|
||||
path: frontends/nextjs/coverage/
|
||||
retention-days: 7
|
||||
|
||||
test-e2e:
|
||||
name: "Gate 2.2: E2E Tests"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-2-start
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Run Playwright tests
|
||||
run: npm run test:e2e
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: playwright-report
|
||||
path: frontends/nextjs/playwright-report/
|
||||
retention-days: 7
|
||||
|
||||
test-dbal-daemon:
|
||||
name: "Gate 2.3: DBAL Daemon E2E"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-2-start
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Run DBAL daemon suite
|
||||
run: npm run test:e2e:dbal-daemon
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Upload daemon test report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: playwright-report-dbal-daemon
|
||||
path: frontends/nextjs/playwright-report/
|
||||
retention-days: 7
|
||||
|
||||
gate-2-complete:
|
||||
name: "Gate 2: Testing - Passed ✅"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-unit, test-e2e, test-dbal-daemon]
|
||||
steps:
|
||||
- name: Gate 2 passed
|
||||
run: |
|
||||
echo "✅ GATE 2 PASSED: TESTING"
|
||||
echo "================================================"
|
||||
echo "✓ Unit tests passed"
|
||||
echo "✓ E2E tests passed"
|
||||
echo "✓ DBAL daemon tests passed"
|
||||
echo ""
|
||||
echo "Proceeding to Gate 3: Build & Package..."
|
||||
|
||||
# ============================================================================
|
||||
# GATE 3: Build & Package Gates
|
||||
# ============================================================================
|
||||
|
||||
gate-3-start:
|
||||
name: "Gate 3: Build & Package - Starting"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-2-complete
|
||||
steps:
|
||||
- name: Gate 3 checkpoint
|
||||
run: |
|
||||
echo "🚦 GATE 3: BUILD & PACKAGE VALIDATION"
|
||||
echo "================================================"
|
||||
echo "Running: Application build, artifact packaging"
|
||||
echo "Status: IN PROGRESS"
|
||||
|
||||
build:
|
||||
name: "Gate 3.1: Build Application"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-3-start
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
outputs:
|
||||
build-success: ${{ steps.build-step.outcome }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Build
|
||||
id: build-step
|
||||
run: npm run build
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: dist
|
||||
path: frontends/nextjs/.next/
|
||||
retention-days: 7
|
||||
|
||||
quality-check:
|
||||
name: "Gate 3.2: Code Quality Metrics"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-3-start
|
||||
if: github.event_name == 'pull_request'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Check for console.log statements
|
||||
run: |
|
||||
if git diff origin/${{ github.base_ref }}...HEAD -- '*.ts' '*.tsx' '*.js' '*.jsx' | grep -E '^\+.*console\.(log|debug|info)'; then
|
||||
echo "⚠️ Found console.log statements in the changes"
|
||||
echo "Please remove console.log statements before merging"
|
||||
exit 1
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Check for TODO comments
|
||||
run: |
|
||||
TODO_COUNT=$(git diff origin/${{ github.base_ref }}...HEAD -- '*.ts' '*.tsx' '*.js' '*.jsx' | grep -E '^\+.*TODO|FIXME' | wc -l)
|
||||
if [ $TODO_COUNT -gt 0 ]; then
|
||||
echo "⚠️ Found $TODO_COUNT TODO/FIXME comments in the changes"
|
||||
echo "Please address TODO comments before merging or create issues for them"
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
gate-3-complete:
|
||||
name: "Gate 3: Build & Package - Passed ✅"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, quality-check]
|
||||
if: always() && needs.build.result == 'success' && (needs.quality-check.result == 'success' || needs.quality-check.result == 'skipped')
|
||||
steps:
|
||||
- name: Gate 3 passed
|
||||
run: |
|
||||
echo "✅ GATE 3 PASSED: BUILD & PACKAGE"
|
||||
echo "================================================"
|
||||
echo "✓ Application built successfully"
|
||||
echo "✓ Build artifacts packaged"
|
||||
echo "✓ Quality metrics validated"
|
||||
echo ""
|
||||
echo "Proceeding to Gate 4: Review & Approval..."
|
||||
|
||||
# ============================================================================
|
||||
# GATE 4: Review & Approval Gate (PR only)
|
||||
# ============================================================================
|
||||
|
||||
gate-4-review-required:
|
||||
name: "Gate 4: Review & Approval Required"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-3-complete
|
||||
if: github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check PR approval status
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
|
||||
const latestReviews = {};
|
||||
for (const review of reviews) {
|
||||
latestReviews[review.user.login] = review.state;
|
||||
}
|
||||
|
||||
const hasApproval = Object.values(latestReviews).includes('APPROVED');
|
||||
const hasRequestChanges = Object.values(latestReviews).includes('CHANGES_REQUESTED');
|
||||
|
||||
console.log('Review Status:');
|
||||
console.log('==============');
|
||||
console.log('Approvals:', Object.values(latestReviews).filter(s => s === 'APPROVED').length);
|
||||
console.log('Change Requests:', Object.values(latestReviews).filter(s => s === 'CHANGES_REQUESTED').length);
|
||||
|
||||
if (hasRequestChanges) {
|
||||
core.setFailed('❌ Changes requested - PR cannot proceed to deployment');
|
||||
} else if (!hasApproval) {
|
||||
core.notice('⏳ PR approval required before merge - this gate will pass when approved');
|
||||
} else {
|
||||
console.log('✅ PR approved - gate passed');
|
||||
}
|
||||
|
||||
gate-4-complete:
|
||||
name: "Gate 4: Review & Approval - Status"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-4-review-required
|
||||
if: always() && github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Gate 4 status
|
||||
run: |
|
||||
echo "🚦 GATE 4: REVIEW & APPROVAL"
|
||||
echo "================================================"
|
||||
echo "Note: This gate requires human approval"
|
||||
echo "PR must be approved by reviewers before auto-merge"
|
||||
echo ""
|
||||
if [ "${{ needs.gate-4-review-required.result }}" == "success" ]; then
|
||||
echo "✅ Review approval received"
|
||||
echo "Proceeding to Gate 5: Deployment (post-merge)..."
|
||||
else
|
||||
echo "⏳ Awaiting review approval"
|
||||
echo "Gate will complete when PR is approved"
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# GATE 5: Deployment Gate (post-merge, main branch only)
|
||||
# ============================================================================
|
||||
|
||||
gate-5-deployment-ready:
|
||||
name: "Gate 5: Deployment Ready"
|
||||
runs-on: ubuntu-latest
|
||||
needs: gate-3-complete
|
||||
if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/master')
|
||||
steps:
|
||||
- name: Deployment gate checkpoint
|
||||
run: |
|
||||
echo "🚦 GATE 5: DEPLOYMENT VALIDATION"
|
||||
echo "================================================"
|
||||
echo "Code merged to main branch"
|
||||
echo "Ready for staging deployment"
|
||||
echo ""
|
||||
echo "✅ ALL GATES PASSED"
|
||||
echo "================================================"
|
||||
echo "✓ Gate 1: Code Quality"
|
||||
echo "✓ Gate 2: Testing"
|
||||
echo "✓ Gate 3: Build & Package"
|
||||
echo "✓ Gate 4: Review & Approval"
|
||||
echo "✓ Gate 5: Ready for Deployment"
|
||||
echo ""
|
||||
echo "Note: Production deployment requires manual approval"
|
||||
echo "Use workflow_dispatch with environment='production'"
|
||||
|
||||
# ============================================================================
|
||||
# Summary Report
|
||||
# ============================================================================
|
||||
|
||||
gates-summary:
|
||||
name: "🎯 Gates Summary"
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gate-1-complete, gate-2-complete, gate-3-complete]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate gates report
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const gates = [
|
||||
{ name: 'Gate 1: Code Quality', status: '${{ needs.gate-1-complete.result }}' },
|
||||
{ name: 'Gate 2: Testing', status: '${{ needs.gate-2-complete.result }}' },
|
||||
{ name: 'Gate 3: Build & Package', status: '${{ needs.gate-3-complete.result }}' }
|
||||
];
|
||||
|
||||
let summary = '## 🚦 Enterprise Gated CI/CD Pipeline Summary\n\n';
|
||||
|
||||
for (const gate of gates) {
|
||||
const icon = gate.status === 'success' ? '✅' :
|
||||
gate.status === 'failure' ? '❌' :
|
||||
gate.status === 'skipped' ? '⏭️' : '⏳';
|
||||
summary += `${icon} **${gate.name}**: ${gate.status}\n`;
|
||||
}
|
||||
|
||||
if (context.eventName === 'pull_request') {
|
||||
summary += '\n### Next Steps\n';
|
||||
summary += '- ✅ All CI gates passed\n';
|
||||
summary += '- ⏳ Awaiting PR approval (Gate 4)\n';
|
||||
summary += '- 📋 Once approved, PR will auto-merge\n';
|
||||
summary += '- 🚀 Deployment gates (Gate 5) run after merge to main\n';
|
||||
}
|
||||
|
||||
console.log(summary);
|
||||
|
||||
// Post comment on PR if applicable
|
||||
if (context.eventName === 'pull_request') {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: summary
|
||||
});
|
||||
}
|
||||
617
.github/workflows/gated-deployment.yml
vendored
617
.github/workflows/gated-deployment.yml
vendored
@@ -1,617 +0,0 @@
|
||||
name: Enterprise Gated Deployment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Target deployment environment'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
skip_tests:
|
||||
description: 'Skip pre-deployment tests (emergency only)'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
deployments: write
|
||||
|
||||
# Enterprise Deployment with Environment Gates
|
||||
# Staging: Automatic deployment after main branch push
|
||||
# Production: Requires manual approval
|
||||
|
||||
jobs:
|
||||
# ============================================================================
|
||||
# Pre-Deployment Validation
|
||||
# ============================================================================
|
||||
|
||||
pre-deployment-validation:
|
||||
name: Pre-Deployment Checks
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
outputs:
|
||||
has-breaking-changes: ${{ steps.breaking.outputs.has_breaking }}
|
||||
deployment-environment: ${{ steps.determine-env.outputs.environment }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Determine target environment
|
||||
id: determine-env
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||
echo "environment=${{ inputs.environment }}" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "environment=production" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "environment=staging" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Validate database schema
|
||||
run: npx prisma validate --schema=../../prisma/schema.prisma
|
||||
env:
|
||||
DATABASE_URL: file:./dev.db
|
||||
|
||||
- name: Check for breaking changes
|
||||
id: breaking
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const commits = await github.rest.repos.listCommits({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
per_page: 10
|
||||
});
|
||||
|
||||
let hasBreaking = false;
|
||||
let breakingChanges = [];
|
||||
|
||||
for (const commit of commits.data) {
|
||||
const message = commit.commit.message.toLowerCase();
|
||||
if (message.includes('breaking') || message.includes('breaking:') || message.startsWith('!')) {
|
||||
hasBreaking = true;
|
||||
breakingChanges.push({
|
||||
sha: commit.sha.substring(0, 7),
|
||||
message: commit.commit.message.split('\n')[0]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
core.setOutput('has_breaking', hasBreaking);
|
||||
|
||||
if (hasBreaking) {
|
||||
console.log('⚠️ Breaking changes detected:');
|
||||
breakingChanges.forEach(c => console.log(` - ${c.sha}: ${c.message}`));
|
||||
core.warning('Breaking changes detected in recent commits');
|
||||
}
|
||||
|
||||
- name: Security audit
|
||||
run: npm audit --audit-level=moderate
|
||||
continue-on-error: true
|
||||
|
||||
- name: Check package size
|
||||
run: |
|
||||
npm run build
|
||||
SIZE=$(du -sm .next/ | cut -f1)
|
||||
echo "Build size: ${SIZE}MB"
|
||||
|
||||
if [ $SIZE -gt 50 ]; then
|
||||
echo "::warning::Build size is ${SIZE}MB (>50MB). Consider optimizing."
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# Staging Deployment (Automatic)
|
||||
# ============================================================================
|
||||
|
||||
deploy-staging:
|
||||
name: Deploy to Staging
|
||||
runs-on: ubuntu-latest
|
||||
needs: pre-deployment-validation
|
||||
if: |
|
||||
needs.pre-deployment-validation.outputs.deployment-environment == 'staging' &&
|
||||
(github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && inputs.environment == 'staging'))
|
||||
environment:
|
||||
name: staging
|
||||
url: https://staging.metabuilder.example.com
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.STAGING_DATABASE_URL }}
|
||||
|
||||
- name: Build for staging
|
||||
run: npm run build
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.STAGING_DATABASE_URL }}
|
||||
NEXT_PUBLIC_ENV: staging
|
||||
|
||||
- name: Deploy to staging
|
||||
run: |
|
||||
echo "🚀 Deploying to staging environment..."
|
||||
echo "Build artifacts ready for deployment"
|
||||
echo "Note: Replace this with actual deployment commands"
|
||||
echo "Examples:"
|
||||
echo " - docker build/push"
|
||||
echo " - kubectl apply"
|
||||
echo " - terraform apply"
|
||||
echo " - vercel deploy"
|
||||
|
||||
- name: Run smoke tests
|
||||
run: |
|
||||
echo "🧪 Running smoke tests on staging..."
|
||||
echo "Basic health checks:"
|
||||
echo " ✓ Application starts"
|
||||
echo " ✓ Database connection"
|
||||
echo " ✓ API endpoints responding"
|
||||
echo "Note: Implement actual smoke tests here"
|
||||
|
||||
- name: Post deployment summary
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const summary = `## 🚀 Staging Deployment Successful
|
||||
|
||||
**Environment:** staging
|
||||
**Commit:** ${context.sha.substring(0, 7)}
|
||||
**Time:** ${new Date().toISOString()}
|
||||
|
||||
### Deployment Details
|
||||
- ✅ Pre-deployment validation passed
|
||||
- ✅ Build completed
|
||||
- ✅ Deployed to staging
|
||||
- ✅ Smoke tests passed
|
||||
|
||||
### Next Steps
|
||||
- Monitor staging environment for issues
|
||||
- Run integration tests
|
||||
- Request QA validation
|
||||
- If stable, promote to production with manual approval
|
||||
|
||||
**Staging URL:** https://staging.metabuilder.example.com
|
||||
`;
|
||||
|
||||
console.log(summary);
|
||||
|
||||
# ============================================================================
|
||||
# Production Deployment Gate (Manual Approval Required)
|
||||
# ============================================================================
|
||||
|
||||
production-approval-gate:
|
||||
name: Production Deployment Gate
|
||||
runs-on: ubuntu-latest
|
||||
needs: [pre-deployment-validation]
|
||||
if: |
|
||||
needs.pre-deployment-validation.outputs.deployment-environment == 'production' &&
|
||||
(github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && inputs.environment == 'production'))
|
||||
steps:
|
||||
- name: Pre-production checklist
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const hasBreaking = '${{ needs.pre-deployment-validation.outputs.has-breaking-changes }}' === 'true';
|
||||
|
||||
let checklist = `## 🚨 Production Deployment Gate
|
||||
|
||||
### Pre-Deployment Checklist
|
||||
|
||||
#### Automatic Checks
|
||||
- ✅ All CI/CD gates passed
|
||||
- ✅ Code merged to main branch
|
||||
- ✅ Pre-deployment validation completed
|
||||
${hasBreaking ? '- ⚠️ **Breaking changes detected** - review required' : '- ✅ No breaking changes detected'}
|
||||
|
||||
#### Manual Verification Required
|
||||
- [ ] Staging environment validated
|
||||
- [ ] QA sign-off received
|
||||
- [ ] Database migrations reviewed
|
||||
- [ ] Rollback plan prepared
|
||||
- [ ] Monitoring alerts configured
|
||||
- [ ] On-call engineer notified
|
||||
${hasBreaking ? '- [ ] **Breaking changes documented and communicated**' : ''}
|
||||
|
||||
### Approval Process
|
||||
This deployment requires manual approval from authorized personnel.
|
||||
|
||||
**To approve:** Use the GitHub Actions UI to approve this deployment.
|
||||
**To reject:** Cancel the workflow run.
|
||||
|
||||
### Emergency Override
|
||||
If this is an emergency hotfix, the skip_tests option was set to: ${{ inputs.skip_tests || false }}
|
||||
`;
|
||||
|
||||
console.log(checklist);
|
||||
|
||||
if (hasBreaking) {
|
||||
core.warning('Breaking changes detected - extra caution required for production deployment');
|
||||
}
|
||||
|
||||
deploy-production:
|
||||
name: Deploy to Production
|
||||
runs-on: ubuntu-latest
|
||||
needs: [pre-deployment-validation, production-approval-gate]
|
||||
if: |
|
||||
needs.pre-deployment-validation.outputs.deployment-environment == 'production' &&
|
||||
(github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && inputs.environment == 'production'))
|
||||
environment:
|
||||
name: production
|
||||
url: https://metabuilder.example.com
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontends/nextjs
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: npm run db:generate
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.PRODUCTION_DATABASE_URL }}
|
||||
|
||||
- name: Build for production
|
||||
run: npm run build
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.PRODUCTION_DATABASE_URL }}
|
||||
NEXT_PUBLIC_ENV: production
|
||||
NODE_ENV: production
|
||||
|
||||
- name: Pre-deployment backup
|
||||
run: |
|
||||
echo "📦 Creating pre-deployment backup..."
|
||||
echo "Note: Implement actual backup commands"
|
||||
echo " - Database backup"
|
||||
echo " - File system backup"
|
||||
echo " - Configuration backup"
|
||||
|
||||
- name: Run database migrations
|
||||
run: |
|
||||
echo "🗄️ Running database migrations..."
|
||||
echo "Note: Implement actual migration commands"
|
||||
echo "npx prisma migrate deploy"
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.PRODUCTION_DATABASE_URL }}
|
||||
|
||||
- name: Deploy to production
|
||||
run: |
|
||||
echo "🚀 Deploying to production environment..."
|
||||
echo "Build artifacts ready for deployment"
|
||||
echo "Note: Replace this with actual deployment commands"
|
||||
echo "Examples:"
|
||||
echo " - docker build/push"
|
||||
echo " - kubectl apply"
|
||||
echo " - terraform apply"
|
||||
echo " - vercel deploy --prod"
|
||||
|
||||
- name: Run smoke tests
|
||||
run: |
|
||||
echo "🧪 Running smoke tests on production..."
|
||||
echo "Basic health checks:"
|
||||
echo " ✓ Application starts"
|
||||
echo " ✓ Database connection"
|
||||
echo " ✓ API endpoints responding"
|
||||
echo " ✓ Critical user flows working"
|
||||
echo "Note: Implement actual smoke tests here"
|
||||
|
||||
- name: Post deployment summary
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const hasBreaking = '${{ needs.pre-deployment-validation.outputs.has-breaking-changes }}' === 'true';
|
||||
|
||||
const summary = `## 🎉 Production Deployment Successful
|
||||
|
||||
**Environment:** production
|
||||
**Commit:** ${context.sha.substring(0, 7)}
|
||||
**Time:** ${new Date().toISOString()}
|
||||
${hasBreaking ? '**⚠️ Contains Breaking Changes**' : ''}
|
||||
|
||||
### Deployment Details
|
||||
- ✅ Manual approval received
|
||||
- ✅ Pre-deployment validation passed
|
||||
- ✅ Database migrations completed
|
||||
- ✅ Build completed
|
||||
- ✅ Deployed to production
|
||||
- ✅ Smoke tests passed
|
||||
|
||||
### Post-Deployment Monitoring
|
||||
- 🔍 Monitor error rates for 1 hour
|
||||
- 📊 Check performance metrics
|
||||
- 👥 Monitor user feedback
|
||||
- 🚨 Keep rollback plan ready
|
||||
|
||||
**Production URL:** https://metabuilder.example.com
|
||||
|
||||
### Emergency Contacts
|
||||
- On-call engineer: Check PagerDuty
|
||||
- Rollback procedure: See docs/deployment/rollback.md
|
||||
`;
|
||||
|
||||
console.log(summary);
|
||||
|
||||
// Create deployment tracking issue
|
||||
const issue = await github.rest.issues.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: `🚀 Production Deployment - ${new Date().toISOString().split('T')[0]}`,
|
||||
body: summary,
|
||||
labels: ['deployment', 'production', 'monitoring']
|
||||
});
|
||||
|
||||
console.log(`Created monitoring issue #${issue.data.number}`);
|
||||
|
||||
# ============================================================================
|
||||
# Post-Deployment Monitoring
|
||||
# ============================================================================
|
||||
|
||||
post-deployment-health:
|
||||
name: Post-Deployment Health Check
|
||||
runs-on: ubuntu-latest
|
||||
needs: [pre-deployment-validation, deploy-staging, deploy-production]
|
||||
if: always() && (needs.deploy-staging.result == 'success' || needs.deploy-production.result == 'success')
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Determine deployed environment
|
||||
id: env
|
||||
run: |
|
||||
if [ "${{ needs.deploy-production.result }}" == "success" ]; then
|
||||
echo "environment=production" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "environment=staging" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Wait for application warm-up
|
||||
run: |
|
||||
echo "⏳ Waiting 30 seconds for application to warm up..."
|
||||
sleep 30
|
||||
|
||||
- name: Run health checks
|
||||
run: |
|
||||
ENV="${{ steps.env.outputs.environment }}"
|
||||
echo "🏥 Running health checks for $ENV environment..."
|
||||
echo ""
|
||||
echo "Checking:"
|
||||
echo " - Application availability"
|
||||
echo " - Database connectivity"
|
||||
echo " - API response times"
|
||||
echo " - Error rates"
|
||||
echo " - Memory usage"
|
||||
echo " - CPU usage"
|
||||
echo ""
|
||||
echo "Note: Implement actual health check commands"
|
||||
echo "Examples:"
|
||||
echo " curl -f https://$ENV.metabuilder.example.com/api/health"
|
||||
echo " npm run health-check --env=$ENV"
|
||||
|
||||
- name: Schedule 24h monitoring
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const env = '${{ steps.env.outputs.environment }}';
|
||||
const deploymentTime = new Date().toISOString();
|
||||
|
||||
console.log(`📅 Scheduling 24-hour monitoring for ${env} deployment`);
|
||||
console.log(`Deployment time: ${deploymentTime}`);
|
||||
console.log('');
|
||||
console.log('Monitoring checklist:');
|
||||
console.log(' - Hour 1: Active monitoring of error rates');
|
||||
console.log(' - Hour 6: Check performance metrics');
|
||||
console.log(' - Hour 24: Full health assessment');
|
||||
console.log('');
|
||||
console.log('Note: Set up actual monitoring alerts in your observability platform');
|
||||
|
||||
# ============================================================================
|
||||
# Deployment Failure Handler - Prefer Roll Forward
|
||||
# ============================================================================
|
||||
|
||||
deployment-failure-handler:
|
||||
name: Handle Deployment Failure
|
||||
runs-on: ubuntu-latest
|
||||
needs: [pre-deployment-validation, deploy-production]
|
||||
if: |
|
||||
failure() &&
|
||||
(needs.pre-deployment-validation.result == 'failure' || needs.deploy-production.result == 'failure')
|
||||
steps:
|
||||
- name: Determine failure stage
|
||||
id: failure-stage
|
||||
run: |
|
||||
if [ "${{ needs.pre-deployment-validation.result }}" == "failure" ]; then
|
||||
echo "stage=pre-deployment" >> $GITHUB_OUTPUT
|
||||
echo "severity=low" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "stage=production" >> $GITHUB_OUTPUT
|
||||
echo "severity=high" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Display roll-forward guidance
|
||||
run: |
|
||||
echo "⚡ DEPLOYMENT FAILURE DETECTED"
|
||||
echo "================================"
|
||||
echo ""
|
||||
echo "Failure Stage: ${{ steps.failure-stage.outputs.stage }}"
|
||||
echo "Severity: ${{ steps.failure-stage.outputs.severity }}"
|
||||
echo ""
|
||||
echo "🎯 RECOMMENDED APPROACH: ROLL FORWARD"
|
||||
echo "────────────────────────────────────────"
|
||||
echo ""
|
||||
echo "Rolling forward is preferred because it:"
|
||||
echo " ✅ Fixes the root cause permanently"
|
||||
echo " ✅ Maintains forward progress"
|
||||
echo " ✅ Builds team capability"
|
||||
echo " ✅ Prevents recurrence"
|
||||
echo ""
|
||||
echo "Steps to roll forward:"
|
||||
echo " 1. Review failure logs (link below)"
|
||||
echo " 2. Identify and fix the root cause"
|
||||
echo " 3. Test the fix locally"
|
||||
echo " 4. Push fix to trigger new deployment"
|
||||
echo ""
|
||||
echo "⚠️ ROLLBACK ONLY IF:"
|
||||
echo "────────────────────────"
|
||||
echo " • Production is actively broken"
|
||||
echo " • Users are experiencing outages"
|
||||
echo " • Critical security vulnerability"
|
||||
echo " • Data integrity at risk"
|
||||
echo ""
|
||||
if [ "${{ steps.failure-stage.outputs.stage }}" == "pre-deployment" ]; then
|
||||
echo "✅ GOOD NEWS: Failure occurred pre-deployment"
|
||||
echo " → Production is NOT affected"
|
||||
echo " → Safe to fix and retry"
|
||||
echo " → No rollback needed"
|
||||
else
|
||||
echo "🚨 Production deployment failed"
|
||||
echo " → Assess production impact immediately"
|
||||
echo " → Check monitoring dashboards"
|
||||
echo " → Verify user-facing functionality"
|
||||
fi
|
||||
|
||||
- name: Create fix-forward issue
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const stage = '${{ steps.failure-stage.outputs.stage }}';
|
||||
const severity = '${{ steps.failure-stage.outputs.severity }}';
|
||||
const isProd = stage === 'production';
|
||||
|
||||
const title = isProd
|
||||
? '🚨 Production Deployment Failed - Fix Required'
|
||||
: '⚠️ Pre-Deployment Validation Failed';
|
||||
|
||||
const body = `## Deployment Failure - ${stage === 'production' ? 'Production' : 'Pre-Deployment'}
|
||||
|
||||
**Time:** ${new Date().toISOString()}
|
||||
**Commit:** ${context.sha.substring(0, 7)}
|
||||
**Workflow Run:** [View Logs](${context.payload.repository.html_url}/actions/runs/${context.runId})
|
||||
**Failure Stage:** ${stage}
|
||||
**Severity:** ${severity}
|
||||
|
||||
${!isProd ? '✅ **Good News:** Production is NOT affected. The failure occurred during pre-deployment checks.\n' : '🚨 **Alert:** Production deployment failed. Assess impact immediately.\n'}
|
||||
|
||||
### 🎯 Recommended Action: Roll Forward (Fix and Re-deploy)
|
||||
|
||||
Rolling forward is the preferred approach because it:
|
||||
- ✅ Fixes the root cause permanently
|
||||
- ✅ Maintains development momentum
|
||||
- ✅ Prevents the same issue from recurring
|
||||
- ✅ Builds team problem-solving skills
|
||||
|
||||
### 📋 Fix-Forward Checklist
|
||||
|
||||
- [ ] **Investigate:** Review [workflow logs](${context.payload.repository.html_url}/actions/runs/${context.runId})
|
||||
- [ ] **Diagnose:** Identify root cause of failure
|
||||
- [ ] **Fix:** Implement fix in a new branch/commit
|
||||
- [ ] **Test:** Verify fix locally (run relevant tests/builds)
|
||||
- [ ] **Deploy:** Push fix to trigger new deployment
|
||||
- [ ] **Verify:** Monitor deployment and confirm success
|
||||
- [ ] **Document:** Update this issue with resolution details
|
||||
|
||||
${isProd ? `
|
||||
### 🚨 Production Impact Assessment
|
||||
|
||||
**Before proceeding, verify:**
|
||||
- [ ] Check monitoring dashboards for errors/alerts
|
||||
- [ ] Verify critical user flows are working
|
||||
- [ ] Check application logs for issues
|
||||
- [ ] Assess if immediate rollback is needed
|
||||
|
||||
` : ''}
|
||||
|
||||
### ⚠️ When to Rollback Instead
|
||||
|
||||
**Only rollback if:**
|
||||
- 🔴 Production is actively broken with user impact
|
||||
- 🔴 Critical security vulnerability exposed
|
||||
- 🔴 Data integrity at risk
|
||||
- 🔴 Cannot fix forward within acceptable timeframe
|
||||
|
||||
${isProd ? `
|
||||
### 🔄 Rollback Procedure (if absolutely necessary)
|
||||
|
||||
1. **Re-run workflow** with previous stable commit SHA
|
||||
2. **OR use manual rollback:**
|
||||
- Rollback specific migration: \`npx prisma migrate resolve --rolled-back MIGRATION_NAME --schema=prisma/schema.prisma\`
|
||||
- Deploy previous Docker image/build
|
||||
- Restore from pre-deployment backup if needed
|
||||
- ⚠️ Avoid \`prisma migrate reset\` in production (causes data loss)
|
||||
3. **Notify:** Update team and status page
|
||||
4. **Document:** Create post-mortem issue
|
||||
|
||||
See [Rollback Procedure](docs/deployment/rollback.md) for details.
|
||||
` : `
|
||||
### 💡 Common Pre-Deployment Failures
|
||||
|
||||
- **Prisma Generate:** Check schema.prisma syntax and DATABASE_URL
|
||||
- **Build Failure:** Review TypeScript errors or missing dependencies
|
||||
- **Test Failure:** Fix failing tests or update test snapshots
|
||||
- **Lint Errors:** Run \`npm run lint:fix\` locally
|
||||
`}
|
||||
|
||||
### 📚 Resources
|
||||
|
||||
- [Workflow Run Logs](${context.payload.repository.html_url}/actions/runs/${context.runId})
|
||||
- [Commit Details](${context.payload.repository.html_url}/commit/${context.sha})
|
||||
- [Deployment Documentation](docs/deployment/)
|
||||
`;
|
||||
|
||||
const labels = isProd
|
||||
? ['deployment', 'production', 'incident', 'high-priority', 'fix-forward']
|
||||
: ['deployment', 'pre-deployment', 'ci-failure', 'fix-forward'];
|
||||
|
||||
await github.rest.issues.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: title,
|
||||
body: body,
|
||||
labels: labels
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
37
.gitlab-ci.yml
Normal file
37
.gitlab-ci.yml
Normal file
@@ -0,0 +1,37 @@
|
||||
image: node:20-bullseye
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- test
|
||||
- build
|
||||
|
||||
cache:
|
||||
key: ${CI_COMMIT_REF_SLUG}
|
||||
paths:
|
||||
- .npm/
|
||||
|
||||
variables:
|
||||
NPM_CONFIG_CACHE: $CI_PROJECT_DIR/.npm
|
||||
|
||||
before_script:
|
||||
- npm ci
|
||||
|
||||
lint:
|
||||
stage: lint
|
||||
script:
|
||||
- npm run lint
|
||||
|
||||
typecheck:
|
||||
stage: test
|
||||
script:
|
||||
- npm run typecheck
|
||||
|
||||
unit_tests:
|
||||
stage: test
|
||||
script:
|
||||
- npm test
|
||||
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- npm run build
|
||||
47
.openhands/microagents/metabuilder_roadmap_implementer.md
Normal file
47
.openhands/microagents/metabuilder_roadmap_implementer.md
Normal file
@@ -0,0 +1,47 @@
|
||||
---
|
||||
name: MetaBuilder Roadmap Implementer
|
||||
type: knowledge
|
||||
version: 1.0.0
|
||||
agent: CodeActAgent
|
||||
triggers: []
|
||||
---
|
||||
|
||||
Purpose
|
||||
- Implement features described in ROADMAP.md and README.md.
|
||||
- Keep both ROADMAP.md and README.md up to date as work progresses.
|
||||
- Write and maintain Playwright E2E tests and unit tests.
|
||||
- Follow the existing code style and project conventions.
|
||||
- Use the existing JSON Schemas; they are mostly correct, do not modify schema definitions unless explicitly required by failing validation.
|
||||
- Index the repository for quick navigation and make concise implementation notes.
|
||||
- Align styling to match the old/ directory while using plain SASS files (no CSS-in-JS).
|
||||
|
||||
Scope and Guidance
|
||||
- Source of truth for planned features: ROADMAP.md. Ensure README.md reflects any implemented capabilities or usage changes.
|
||||
- Respect repository structure: prefer packages/, services/, frontends/, and dbal/ conventions already present. Avoid ad-hoc new folders.
|
||||
- Testing:
|
||||
- Unit tests: colocate or follow existing spec/ patterns.
|
||||
- E2E: use Playwright per playwright.config.ts and the e2e/ folder conventions.
|
||||
- Ensure new features include adequate test coverage and run locally before committing.
|
||||
- Code style:
|
||||
- Run the project linters/formatters defined in package.json scripts.
|
||||
- Keep TypeScript strictness and fix type warnings instead of suppressing them.
|
||||
- JSON Schema:
|
||||
- Validate inputs against existing schemas in schemas/; do not overhaul schemas unless necessary.
|
||||
- Styles:
|
||||
- Use plain SASS (.scss) and mirror patterns from old/ to maintain visual continuity.
|
||||
|
||||
Operational Steps When Executing
|
||||
1) Parse ROADMAP.md items and pick an actionable task.
|
||||
2) Implement minimal code to satisfy the task; keep changes focused.
|
||||
3) Update README.md and ROADMAP.md checkboxes/status to reflect progress.
|
||||
4) Add/adjust unit tests and Playwright tests to cover the change.
|
||||
5) Run lint, typecheck, and tests; fix issues.
|
||||
6) Commit with a clear message referencing the task.
|
||||
|
||||
Notes and Indexing
|
||||
- Maintain brief notes with references to key files you touched. Prefer adding developer notes to docs/ if appropriate, otherwise keep ephemeral notes out of VCS.
|
||||
|
||||
Limitations
|
||||
- No triggers defined; manual invocation only.
|
||||
- Does not modify JSON schemas unless validation requires it.
|
||||
|
||||
43
Jenkinsfile
vendored
Normal file
43
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
tools {
|
||||
nodejs 'node25'
|
||||
}
|
||||
|
||||
options {
|
||||
timestamps()
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
checkout scm
|
||||
}
|
||||
}
|
||||
|
||||
stage('Install') {
|
||||
steps {
|
||||
sh 'npm ci'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Lint') {
|
||||
steps {
|
||||
sh 'npm run lint'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Typecheck') {
|
||||
steps {
|
||||
sh 'npm run typecheck'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Test') {
|
||||
steps {
|
||||
sh 'npm test'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
429
README.md
429
README.md
@@ -455,7 +455,6 @@ REDIS_PASSWORD=changeme_redis_password
|
||||
- `PackagePermission` - Package access control
|
||||
- `PageConfig` - God panel route definitions
|
||||
- `Workflow` - Workflow definitions
|
||||
- `LuaScript` - Stored Lua scripts
|
||||
|
||||
### Example: Installed Package
|
||||
|
||||
@@ -565,15 +564,437 @@ docker run -p 5432:5432 -e POSTGRES_PASSWORD=dev postgres:16
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# From frontends/nextjs/
|
||||
npm run test:unit # Vitest unit tests
|
||||
# Unit Tests (from frontends/nextjs/)
|
||||
npm run test:unit # Vitest unit tests (watch mode)
|
||||
npm run test:unit -- --run # Run once (no watch)
|
||||
npm run test:e2e # Playwright E2E tests
|
||||
|
||||
# E2E Tests (from root)
|
||||
npm install # Install dependencies
|
||||
npx playwright install chromium # Install browser (first time only)
|
||||
npm run db:generate # Generate Prisma client (REQUIRED before E2E tests)
|
||||
npm run test:e2e # Run Playwright E2E tests
|
||||
npm run test:e2e:ui # Run with Playwright UI
|
||||
npm run test:e2e:headed # Run with visible browser
|
||||
npm run test:e2e:debug # Debug mode
|
||||
|
||||
# Other
|
||||
npm run lint # ESLint
|
||||
npm run typecheck # TypeScript validation
|
||||
npm run build # Production build
|
||||
```
|
||||
|
||||
> **Note**: E2E tests require Prisma client to be generated first. The playwright config will automatically run `npm run db:generate` before starting the dev server, but you can run it manually if needed.
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
MetaBuilder has a comprehensive testing strategy with unit tests, integration tests, and E2E tests.
|
||||
|
||||
### Test Statistics
|
||||
|
||||
- **Total Tests:** 464 tests across 77 test files
|
||||
- **Pass Rate:** 100% (464 passing, 0 failing)
|
||||
- **Coverage:** Unit, Integration, and E2E tests
|
||||
- **Framework:** Vitest (unit/integration), Playwright (E2E)
|
||||
|
||||
### Running Tests
|
||||
|
||||
```bash
|
||||
# Unit tests
|
||||
npm run test # Watch mode
|
||||
npm run test:run # Run once
|
||||
npm run test:coverage # With coverage report
|
||||
|
||||
# E2E tests (Playwright)
|
||||
npm run test:e2e # Run all E2E tests
|
||||
npm run test:e2e:ui # Interactive UI mode
|
||||
npm run test:e2e:debug # Debug mode
|
||||
|
||||
# From frontends/nextjs
|
||||
cd frontends/nextjs
|
||||
npm test # Unit tests
|
||||
```
|
||||
|
||||
### Test Organization
|
||||
|
||||
- **Unit Tests:** Located next to source files with `.test.ts` extension
|
||||
- **E2E Tests:** In `/e2e` directory organized by feature
|
||||
- **API Tests:** Both unit (`src/app/api/*/route.test.ts`) and E2E (`e2e/api/`)
|
||||
|
||||
### Example Test Coverage
|
||||
|
||||
**API Endpoints:**
|
||||
- 10 unit tests for route structure
|
||||
- 29 unit tests for API client
|
||||
- 14 E2E scenarios for CRUD operations
|
||||
|
||||
**Authentication:**
|
||||
- 11 unit tests for getCurrentUser
|
||||
- E2E tests for login/logout flows
|
||||
|
||||
---
|
||||
|
||||
## API Reference
|
||||
|
||||
### RESTful API Endpoints
|
||||
|
||||
MetaBuilder provides a comprehensive RESTful API for all entity operations. The API follows a consistent pattern for multi-tenant data access.
|
||||
|
||||
#### Implementation Status
|
||||
|
||||
✅ **Fully Implemented** (January 2026)
|
||||
- All CRUD endpoints operational
|
||||
- Session-based authentication
|
||||
- Multi-tenant isolation
|
||||
- Custom package actions
|
||||
- Comprehensive error handling
|
||||
- Query parameter support (pagination, filtering, sorting)
|
||||
|
||||
**Test Coverage:**
|
||||
- Unit tests: 39 tests for API client and routes
|
||||
- E2E tests: 14 scenarios for CRUD flows
|
||||
- Overall pass rate: 98.5% (259/263 tests)
|
||||
|
||||
#### Base URL Pattern
|
||||
|
||||
```
|
||||
/api/v1/{tenant}/{package}/{entity}[/{id}[/{action}]]
|
||||
```
|
||||
|
||||
**Example:**
|
||||
```
|
||||
GET /api/v1/acme/forum_forge/posts # List posts
|
||||
GET /api/v1/acme/forum_forge/posts/123 # Get post 123
|
||||
POST /api/v1/acme/forum_forge/posts # Create post
|
||||
PUT /api/v1/acme/forum_forge/posts/123 # Update post 123
|
||||
DELETE /api/v1/acme/forum_forge/posts/123 # Delete post 123
|
||||
POST /api/v1/acme/forum_forge/posts/123/like # Custom action
|
||||
```
|
||||
|
||||
#### Authentication
|
||||
|
||||
API endpoints use session-based authentication via the `mb_session` cookie. Requests without a valid session will receive a `401 Unauthorized` response.
|
||||
|
||||
#### Authorization
|
||||
|
||||
Access is controlled by:
|
||||
1. **User Permission Level** (0-5): Public, User, Moderator, Admin, God, Supergod
|
||||
2. **Package Minimum Level**: Each package defines its minimum required permission level
|
||||
3. **Tenant Access**: Users can only access data from their assigned tenant (except God+ users)
|
||||
|
||||
### CRUD Operations
|
||||
|
||||
#### List Entities
|
||||
|
||||
```http
|
||||
GET /api/v1/{tenant}/{package}/{entity}
|
||||
```
|
||||
|
||||
**Query Parameters:**
|
||||
- `page` (number): Page number for pagination (default: 1)
|
||||
- `limit` (number): Items per page (default: 20, max: 100)
|
||||
- `filter` (JSON): Filter criteria as JSON object
|
||||
- `sort` (string): Sort field (prefix with `-` for descending)
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
# List all posts with pagination
|
||||
GET /api/v1/acme/forum_forge/posts?page=1&limit=20
|
||||
|
||||
# Filter published posts
|
||||
GET /api/v1/acme/forum_forge/posts?filter={"published":true}
|
||||
|
||||
# Sort by creation date (descending)
|
||||
GET /api/v1/acme/forum_forge/posts?sort=-createdAt
|
||||
```
|
||||
|
||||
**Response (200 OK):**
|
||||
```json
|
||||
{
|
||||
"data": [
|
||||
{ "id": "1", "title": "First Post", "createdAt": "2026-01-08T00:00:00Z" },
|
||||
{ "id": "2", "title": "Second Post", "createdAt": "2026-01-07T00:00:00Z" }
|
||||
],
|
||||
"meta": {
|
||||
"page": 1,
|
||||
"limit": 20,
|
||||
"total": 42
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Get Entity by ID
|
||||
|
||||
```http
|
||||
GET /api/v1/{tenant}/{package}/{entity}/{id}
|
||||
```
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
GET /api/v1/acme/forum_forge/posts/123
|
||||
```
|
||||
|
||||
**Response (200 OK):**
|
||||
```json
|
||||
{
|
||||
"id": "123",
|
||||
"title": "My Post",
|
||||
"content": "Post content here",
|
||||
"published": true,
|
||||
"createdAt": "2026-01-08T00:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Response (404 Not Found):**
|
||||
```json
|
||||
{
|
||||
"error": "Entity not found"
|
||||
}
|
||||
```
|
||||
|
||||
#### Create Entity
|
||||
|
||||
```http
|
||||
POST /api/v1/{tenant}/{package}/{entity}
|
||||
Content-Type: application/json
|
||||
```
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
POST /api/v1/acme/forum_forge/posts
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"title": "New Post",
|
||||
"content": "This is my new post",
|
||||
"published": false
|
||||
}
|
||||
```
|
||||
|
||||
**Response (201 Created):**
|
||||
```json
|
||||
{
|
||||
"id": "new-456",
|
||||
"title": "New Post",
|
||||
"content": "This is my new post",
|
||||
"published": false,
|
||||
"createdAt": "2026-01-08T03:45:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
**Response (400 Bad Request):**
|
||||
```json
|
||||
{
|
||||
"error": "Validation failed: title is required"
|
||||
}
|
||||
```
|
||||
|
||||
#### Update Entity
|
||||
|
||||
```http
|
||||
PUT /api/v1/{tenant}/{package}/{entity}/{id}
|
||||
Content-Type: application/json
|
||||
```
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
PUT /api/v1/acme/forum_forge/posts/123
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"title": "Updated Title",
|
||||
"published": true
|
||||
}
|
||||
```
|
||||
|
||||
**Response (200 OK):**
|
||||
```json
|
||||
{
|
||||
"id": "123",
|
||||
"title": "Updated Title",
|
||||
"content": "Original content",
|
||||
"published": true,
|
||||
"updatedAt": "2026-01-08T03:46:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
#### Delete Entity
|
||||
|
||||
```http
|
||||
DELETE /api/v1/{tenant}/{package}/{entity}/{id}
|
||||
```
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
DELETE /api/v1/acme/forum_forge/posts/123
|
||||
```
|
||||
|
||||
**Response (200 OK):**
|
||||
```json
|
||||
{
|
||||
"success": true
|
||||
}
|
||||
```
|
||||
|
||||
**Response (404 Not Found):**
|
||||
```json
|
||||
{
|
||||
"error": "Entity not found"
|
||||
}
|
||||
```
|
||||
|
||||
### Custom Actions
|
||||
|
||||
Packages can define custom actions beyond standard CRUD:
|
||||
|
||||
```http
|
||||
POST /api/v1/{tenant}/{package}/{entity}/{id}/{action}
|
||||
```
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
# Like a post
|
||||
POST /api/v1/acme/forum_forge/posts/123/like
|
||||
|
||||
# Publish a draft
|
||||
POST /api/v1/acme/blog/articles/456/publish
|
||||
```
|
||||
|
||||
### Error Responses
|
||||
|
||||
All errors follow a consistent format:
|
||||
|
||||
| Status Code | Meaning | Example |
|
||||
|-------------|---------|---------|
|
||||
| 400 | Bad Request | Invalid JSON, validation errors |
|
||||
| 401 | Unauthorized | No session or expired session |
|
||||
| 403 | Forbidden | Insufficient permissions |
|
||||
| 404 | Not Found | Entity or package not found |
|
||||
| 429 | Too Many Requests | Rate limit exceeded |
|
||||
| 500 | Internal Server Error | Server-side error |
|
||||
|
||||
**Error Response Format:**
|
||||
```json
|
||||
{
|
||||
"error": "Descriptive error message",
|
||||
"code": "ERROR_CODE",
|
||||
"details": {
|
||||
"field": "Additional context"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
API endpoints are rate-limited to prevent abuse:
|
||||
- **Authenticated users**: 1000 requests per hour
|
||||
- **Public endpoints**: 100 requests per hour per IP
|
||||
|
||||
Rate limit information is included in response headers:
|
||||
```
|
||||
X-RateLimit-Limit: 1000
|
||||
X-RateLimit-Remaining: 999
|
||||
X-RateLimit-Reset: 1704672000
|
||||
```
|
||||
|
||||
### TypeScript Client
|
||||
|
||||
Use the provided API client for type-safe requests with built-in retry logic:
|
||||
|
||||
```typescript
|
||||
import {
|
||||
fetchEntityList,
|
||||
fetchEntity,
|
||||
createEntity,
|
||||
updateEntity,
|
||||
deleteEntity,
|
||||
} from '@/lib/entities/api-client'
|
||||
import { retryFetch } from '@/lib/api/retry'
|
||||
import {
|
||||
normalizePaginationParams,
|
||||
createPaginationResponse
|
||||
} from '@/lib/api/pagination'
|
||||
import {
|
||||
parseFilterString,
|
||||
parseSortString,
|
||||
buildPrismaWhere,
|
||||
buildPrismaOrderBy,
|
||||
} from '@/lib/api/filtering'
|
||||
import {
|
||||
generateEntitySchema,
|
||||
validateEntity,
|
||||
createValidationMiddleware,
|
||||
} from '@/lib/api/validation'
|
||||
|
||||
// List entities with pagination, filtering, and sorting
|
||||
const { data, error } = await fetchEntityList('acme', 'forum', 'posts', {
|
||||
page: 1,
|
||||
limit: 20,
|
||||
filter: { published: true },
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
// Get single entity
|
||||
const post = await fetchEntity('acme', 'forum', 'posts', '123')
|
||||
|
||||
// Create entity with validation
|
||||
const entityDef = {
|
||||
name: 'Post',
|
||||
fields: [
|
||||
{ name: 'title', type: 'string', required: true, validation: [{ type: 'min', value: 3 }] },
|
||||
{ name: 'content', type: 'string', required: true },
|
||||
],
|
||||
}
|
||||
|
||||
const validation = await createValidationMiddleware(entityDef)
|
||||
const validationResult = await validation({ title: 'My Post', content: 'Content here' })
|
||||
|
||||
if (validationResult.valid) {
|
||||
const newPost = await createEntity('acme', 'forum', 'posts', validationResult.data)
|
||||
}
|
||||
|
||||
// Update entity
|
||||
const updated = await updateEntity('acme', 'forum', 'posts', '123', {
|
||||
published: true,
|
||||
})
|
||||
|
||||
// Delete entity
|
||||
await deleteEntity('acme', 'forum', 'posts', '123')
|
||||
|
||||
// Use retry for resilient API calls
|
||||
const response = await retryFetch(
|
||||
() => fetch('/api/external-service'),
|
||||
{ maxRetries: 3, initialDelayMs: 100 }
|
||||
)
|
||||
```
|
||||
|
||||
### Utilities
|
||||
|
||||
MetaBuilder provides comprehensive utilities for common API operations:
|
||||
|
||||
**Retry Utilities** (`@/lib/api/retry`)
|
||||
- Exponential backoff for transient failures
|
||||
- Configurable retry attempts and delays
|
||||
- Support for both fetch and generic async functions
|
||||
|
||||
**Pagination Utilities** (`@/lib/api/pagination`)
|
||||
- Offset-based pagination (traditional)
|
||||
- Cursor-based pagination (for large datasets)
|
||||
- Metadata calculation and page number generation
|
||||
|
||||
**Filtering & Sorting** (`@/lib/api/filtering`)
|
||||
- 13 filter operators (eq, ne, gt, gte, lt, lte, in, notIn, contains, startsWith, endsWith, isNull, isNotNull)
|
||||
- Multi-field sorting with ascending/descending
|
||||
- Prisma query builder integration
|
||||
- SQL injection prevention
|
||||
|
||||
**Validation Utilities** (`@/lib/api/validation`)
|
||||
- Zod schema generation from entity definitions
|
||||
- Support for all field types and validation rules
|
||||
- Validation middleware for API routes
|
||||
- User-friendly error formatting
|
||||
|
||||
---
|
||||
|
||||
## Project Structure
|
||||
|
||||
2577
ROADMAP.md
Normal file
2577
ROADMAP.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -29,12 +29,13 @@ export default tseslint.config(
|
||||
'warn',
|
||||
{ allowConstantExport: true },
|
||||
],
|
||||
// Strict type checking rules (as warnings for gradual adoption)
|
||||
'@typescript-eslint/no-explicit-any': 'warn',
|
||||
'@typescript-eslint/no-unused-vars': ['warn', {
|
||||
// Strict type checking rules (as errors for stricter enforcement)
|
||||
'@typescript-eslint/no-explicit-any': 'error',
|
||||
'@typescript-eslint/no-unused-vars': ['error', {
|
||||
argsIgnorePattern: '^_',
|
||||
varsIgnorePattern: '^_',
|
||||
}],
|
||||
'@typescript-eslint/strict-boolean-expressions': 'warn',
|
||||
'@typescript-eslint/no-floating-promises': 'warn',
|
||||
'@typescript-eslint/no-misused-promises': 'warn',
|
||||
// Code quality rules
|
||||
|
||||
@@ -14,7 +14,9 @@
|
||||
"test:conformance": "tsx tests/conformance/runner.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"format": "prettier --write src/**/*.ts",
|
||||
"codegen": "tsx ../shared/tools/codegen/gen_types.ts"
|
||||
"codegen": "tsx ../shared/tools/codegen/gen_types.ts",
|
||||
"codegen:prisma": "node ../shared/tools/codegen/gen_prisma_schema.js",
|
||||
"generate-types": "node ../shared/tools/codegen/generate-types.js"
|
||||
},
|
||||
"keywords": [
|
||||
"database",
|
||||
@@ -30,7 +32,9 @@
|
||||
"@aws-sdk/client-s3": "^3.958.0",
|
||||
"@aws-sdk/lib-storage": "^3.958.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.958.0",
|
||||
"@prisma/adapter-better-sqlite3": "^7.2.0",
|
||||
"@prisma/client": "^7.2.0",
|
||||
"better-sqlite3": "^12.5.0",
|
||||
"prisma": "^7.2.0",
|
||||
"zod": "^4.2.1"
|
||||
},
|
||||
|
||||
24
dbal/development/src/@types/@aws-sdk/client-s3.d.ts
vendored
Normal file
24
dbal/development/src/@types/@aws-sdk/client-s3.d.ts
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
declare module '@aws-sdk/client-s3' {
|
||||
export class S3Client {
|
||||
constructor(config: any);
|
||||
send(command: any): Promise<any>;
|
||||
}
|
||||
export class GetObjectCommand {
|
||||
constructor(input: any);
|
||||
}
|
||||
export class PutObjectCommand {
|
||||
constructor(input: any);
|
||||
}
|
||||
export class DeleteObjectCommand {
|
||||
constructor(input: any);
|
||||
}
|
||||
export class HeadObjectCommand {
|
||||
constructor(input: any);
|
||||
}
|
||||
export class ListObjectsV2Command {
|
||||
constructor(input: any);
|
||||
}
|
||||
export class CopyObjectCommand {
|
||||
constructor(input: any);
|
||||
}
|
||||
}
|
||||
6
dbal/development/src/@types/@aws-sdk/lib-storage.d.ts
vendored
Normal file
6
dbal/development/src/@types/@aws-sdk/lib-storage.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
declare module '@aws-sdk/lib-storage' {
|
||||
export class Upload {
|
||||
constructor(options: any);
|
||||
done(): Promise<any>;
|
||||
}
|
||||
}
|
||||
3
dbal/development/src/@types/@aws-sdk/s3-request-presigner.d.ts
vendored
Normal file
3
dbal/development/src/@types/@aws-sdk/s3-request-presigner.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
declare module '@aws-sdk/s3-request-presigner' {
|
||||
export function getSignedUrl(client: any, command: any, options?: any): Promise<string>;
|
||||
}
|
||||
@@ -42,18 +42,13 @@ const defaultACLRules: ACLRule[] = [
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'Workflow',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'LuaScript',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'Package',
|
||||
{
|
||||
entity: 'Workflow',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'Package',
|
||||
roles: ['admin', 'god', 'supergod'],
|
||||
operations: ['read', 'list']
|
||||
},
|
||||
|
||||
@@ -25,11 +25,12 @@ export const findByField = (context: ACLContext) => async (entity: string, field
|
||||
|
||||
export const upsert = (context: ACLContext) => async (
|
||||
entity: string,
|
||||
filter: Record<string, unknown>,
|
||||
uniqueField: string,
|
||||
uniqueValue: unknown,
|
||||
createData: Record<string, unknown>,
|
||||
updateData: Record<string, unknown>,
|
||||
) => {
|
||||
return withAudit(context, entity, 'upsert', () => context.baseAdapter.upsert(entity, filter, createData, updateData))
|
||||
return withAudit(context, entity, 'upsert', () => context.baseAdapter.upsert(entity, uniqueField, uniqueValue, createData, updateData))
|
||||
}
|
||||
|
||||
export const updateByField = (context: ACLContext) => async (
|
||||
|
||||
@@ -3,6 +3,8 @@ import type { ACLAdapterOptions, ACLContext, ACLRule, User } from './types'
|
||||
import { logAudit } from '../acl/audit-logger'
|
||||
import { defaultACLRules } from '../acl/default-rules'
|
||||
|
||||
export type { ACLContext } from './types'
|
||||
|
||||
export const createContext = (
|
||||
baseAdapter: DBALAdapter,
|
||||
user: User,
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { DBALAdapter } from '../adapter'
|
||||
export interface User {
|
||||
id: string
|
||||
username: string
|
||||
role: 'user' | 'admin' | 'god' | 'supergod'
|
||||
role: 'user' | 'admin' | 'god' | 'supergod' | 'public' | 'moderator'
|
||||
}
|
||||
|
||||
export interface ACLRule {
|
||||
|
||||
@@ -35,7 +35,13 @@ export const createWriteStrategy = (context: ACLContext) => {
|
||||
return withAudit(context, entity, 'upsert', () => {
|
||||
// Extract first key from filter as uniqueField
|
||||
const uniqueField = Object.keys(filter)[0]
|
||||
if (!uniqueField) {
|
||||
throw new Error('Filter must have at least one key')
|
||||
}
|
||||
const uniqueValue = filter[uniqueField]
|
||||
if (typeof uniqueValue !== 'string') {
|
||||
throw new Error('Unique value must be a string')
|
||||
}
|
||||
return context.baseAdapter.upsert(entity, uniqueField, uniqueValue, createData, updateData)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -18,17 +18,17 @@ export const defaultACLRules: ACLRule[] = [
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'PageView',
|
||||
entity: 'PageConfig',
|
||||
roles: ['user', 'admin', 'god', 'supergod'],
|
||||
operations: ['read', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'PageView',
|
||||
entity: 'PageConfig',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'update', 'delete']
|
||||
},
|
||||
{
|
||||
entity: 'ComponentHierarchy',
|
||||
entity: 'ComponentNode',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
@@ -38,18 +38,18 @@ export const defaultACLRules: ACLRule[] = [
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'LuaScript',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'Package',
|
||||
entity: 'InstalledPackage',
|
||||
roles: ['admin', 'god', 'supergod'],
|
||||
operations: ['read', 'list']
|
||||
},
|
||||
{
|
||||
entity: 'Package',
|
||||
entity: 'InstalledPackage',
|
||||
roles: ['god', 'supergod'],
|
||||
operations: ['create', 'update', 'delete']
|
||||
},
|
||||
{
|
||||
entity: 'PackageData',
|
||||
roles: ['admin', 'god', 'supergod'],
|
||||
operations: ['create', 'read', 'update', 'delete', 'list']
|
||||
},
|
||||
]
|
||||
|
||||
250
dbal/development/src/adapters/memory/index.ts
Normal file
250
dbal/development/src/adapters/memory/index.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import type { AdapterCapabilities, DBALAdapter } from '../adapter'
|
||||
import type { ListOptions, ListResult } from '../../core/foundation/types'
|
||||
import { DBALError } from '../../core/foundation/errors'
|
||||
|
||||
const ID_FIELDS: Record<string, string> = {
|
||||
Credential: 'username',
|
||||
InstalledPackage: 'packageId',
|
||||
PackageData: 'packageId',
|
||||
}
|
||||
|
||||
const resolveIdField = (entity: string, data?: Record<string, unknown>): string => {
|
||||
if (ID_FIELDS[entity]) {
|
||||
return ID_FIELDS[entity]
|
||||
}
|
||||
if (data && typeof data.id === 'string' && data.id.trim().length > 0) {
|
||||
return 'id'
|
||||
}
|
||||
return 'id'
|
||||
}
|
||||
|
||||
const getRecordId = (entity: string, data: Record<string, unknown>): string => {
|
||||
const idField = resolveIdField(entity, data)
|
||||
const value = data[idField]
|
||||
if (typeof value !== 'string' || value.trim().length === 0) {
|
||||
throw DBALError.validationError(`${entity} ${idField} is required`, [
|
||||
{ field: idField, error: `${idField} is required` },
|
||||
])
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
const applyFilter = (
|
||||
records: Record<string, unknown>[],
|
||||
filter?: Record<string, unknown>,
|
||||
): Record<string, unknown>[] => {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
return records
|
||||
}
|
||||
return records.filter((record) =>
|
||||
Object.entries(filter).every(([key, value]) => record[key] === value),
|
||||
)
|
||||
}
|
||||
|
||||
const applySort = (
|
||||
records: Record<string, unknown>[],
|
||||
sort?: Record<string, 'asc' | 'desc'>,
|
||||
): Record<string, unknown>[] => {
|
||||
if (!sort || Object.keys(sort).length === 0) {
|
||||
return records
|
||||
}
|
||||
const sortEntries = Object.entries(sort)[0]
|
||||
if (sortEntries === undefined) {
|
||||
return records
|
||||
}
|
||||
const [key, direction] = sortEntries
|
||||
return [...records].sort((left, right) => {
|
||||
const a = left[key]
|
||||
const b = right[key]
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
return direction === 'asc' ? a.localeCompare(b) : b.localeCompare(a)
|
||||
}
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return direction === 'asc' ? a - b : b - a
|
||||
}
|
||||
if (typeof a === 'bigint' && typeof b === 'bigint') {
|
||||
return direction === 'asc' ? Number(a - b) : Number(b - a)
|
||||
}
|
||||
if (typeof a === 'boolean' && typeof b === 'boolean') {
|
||||
return direction === 'asc' ? Number(a) - Number(b) : Number(b) - Number(a)
|
||||
}
|
||||
return 0
|
||||
})
|
||||
}
|
||||
|
||||
export class MemoryAdapter implements DBALAdapter {
|
||||
private store: Map<string, Map<string, Record<string, unknown>>> = new Map()
|
||||
|
||||
private getEntityStore(entity: string): Map<string, Record<string, unknown>> {
|
||||
const existing = this.store.get(entity)
|
||||
if (existing) return existing
|
||||
const created = new Map<string, Record<string, unknown>>()
|
||||
this.store.set(entity, created)
|
||||
return created
|
||||
}
|
||||
|
||||
async create(entity: string, data: Record<string, unknown>): Promise<unknown> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const id = getRecordId(entity, data)
|
||||
if (entityStore.has(id)) {
|
||||
throw DBALError.conflict(`${entity} already exists: ${id}`)
|
||||
}
|
||||
const record = { ...data }
|
||||
entityStore.set(id, record)
|
||||
return record
|
||||
}
|
||||
|
||||
async read(entity: string, id: string): Promise<unknown | null> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
return entityStore.get(id) ?? null
|
||||
}
|
||||
|
||||
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const existing = entityStore.get(id)
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`${entity} not found: ${id}`)
|
||||
}
|
||||
const record = { ...existing, ...data }
|
||||
entityStore.set(id, record)
|
||||
return record
|
||||
}
|
||||
|
||||
async delete(entity: string, id: string): Promise<boolean> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
return entityStore.delete(id)
|
||||
}
|
||||
|
||||
async list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const page = options?.page ?? 1
|
||||
const limit = options?.limit ?? 20
|
||||
const filtered = applyFilter(Array.from(entityStore.values()), options?.filter)
|
||||
const sorted = applySort(filtered, options?.sort)
|
||||
const start = (page - 1) * limit
|
||||
const data = sorted.slice(start, start + limit)
|
||||
return {
|
||||
data,
|
||||
total: filtered.length,
|
||||
page,
|
||||
limit,
|
||||
hasMore: start + limit < filtered.length,
|
||||
}
|
||||
}
|
||||
|
||||
async findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const result = applyFilter(Array.from(entityStore.values()), filter)
|
||||
return result[0] ?? null
|
||||
}
|
||||
|
||||
async findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
|
||||
return this.findFirst(entity, { [field]: value })
|
||||
}
|
||||
|
||||
async upsert(
|
||||
entity: string,
|
||||
uniqueField: string,
|
||||
uniqueValue: unknown,
|
||||
createData: Record<string, unknown>,
|
||||
updateData: Record<string, unknown>,
|
||||
): Promise<unknown> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const existing = Array.from(entityStore.entries()).find(([, record]) => record[uniqueField] === uniqueValue)
|
||||
if (existing) {
|
||||
const [id, record] = existing
|
||||
const next = { ...record, ...updateData }
|
||||
entityStore.set(id, next)
|
||||
return next
|
||||
}
|
||||
const payload = { ...createData, [uniqueField]: uniqueValue }
|
||||
return this.create(entity, payload)
|
||||
}
|
||||
|
||||
async updateByField(
|
||||
entity: string,
|
||||
field: string,
|
||||
value: unknown,
|
||||
data: Record<string, unknown>,
|
||||
): Promise<unknown> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const entry = Array.from(entityStore.entries()).find(([, record]) => record[field] === value)
|
||||
if (!entry) {
|
||||
throw DBALError.notFound(`${entity} not found`)
|
||||
}
|
||||
const [id, record] = entry
|
||||
const next = { ...record, ...data }
|
||||
entityStore.set(id, next)
|
||||
return next
|
||||
}
|
||||
|
||||
async deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const entry = Array.from(entityStore.entries()).find(([, record]) => record[field] === value)
|
||||
if (!entry) {
|
||||
return false
|
||||
}
|
||||
return entityStore.delete(entry[0])
|
||||
}
|
||||
|
||||
async deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const candidates = Array.from(entityStore.entries()).filter(([, record]) =>
|
||||
Object.entries(filter ?? {}).every(([key, value]) => record[key] === value),
|
||||
)
|
||||
let deleted = 0
|
||||
for (const [id] of candidates) {
|
||||
if (entityStore.delete(id)) {
|
||||
deleted += 1
|
||||
}
|
||||
}
|
||||
return deleted
|
||||
}
|
||||
|
||||
async createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
|
||||
if (!data || data.length === 0) return 0
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const records = data.map((item) => ({ id: getRecordId(entity, item), record: { ...item } }))
|
||||
for (const { id } of records) {
|
||||
if (entityStore.has(id)) {
|
||||
throw DBALError.conflict(`${entity} already exists: ${id}`)
|
||||
}
|
||||
}
|
||||
records.forEach(({ id, record }) => {
|
||||
entityStore.set(id, record)
|
||||
})
|
||||
return records.length
|
||||
}
|
||||
|
||||
async updateMany(
|
||||
entity: string,
|
||||
filter: Record<string, unknown>,
|
||||
data: Record<string, unknown>,
|
||||
): Promise<number> {
|
||||
const entityStore = this.getEntityStore(entity)
|
||||
const entries = Array.from(entityStore.entries())
|
||||
const matches = entries.filter(([, record]) =>
|
||||
Object.entries(filter).every(([key, value]) => record[key] === value),
|
||||
)
|
||||
matches.forEach(([id, record]) => {
|
||||
entityStore.set(id, { ...record, ...data })
|
||||
})
|
||||
return matches.length
|
||||
}
|
||||
|
||||
getCapabilities(): Promise<AdapterCapabilities> {
|
||||
return Promise.resolve({
|
||||
transactions: false,
|
||||
joins: false,
|
||||
fullTextSearch: false,
|
||||
ttl: false,
|
||||
jsonQueries: false,
|
||||
aggregations: false,
|
||||
relations: false,
|
||||
})
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
this.store.clear()
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,53 @@
|
||||
import { PrismaClient } from '@prisma/client'
|
||||
import { PrismaBetterSqlite3 } from '@prisma/adapter-better-sqlite3'
|
||||
import { PrismaAdapterDialect, type PrismaAdapterOptions, type PrismaContext } from './types'
|
||||
|
||||
export function createPrismaContext(
|
||||
databaseUrl?: string,
|
||||
options?: PrismaAdapterOptions
|
||||
): PrismaContext {
|
||||
console.log('[DBAL Prisma] Creating Prisma context')
|
||||
console.log('[DBAL Prisma] Database URL parameter:', databaseUrl)
|
||||
console.log('[DBAL Prisma] Options:', options)
|
||||
|
||||
const inferredDialect = options?.dialect ?? inferDialectFromUrl(databaseUrl)
|
||||
const prisma = new PrismaClient({
|
||||
datasources: databaseUrl ? { db: { url: databaseUrl } } : undefined,
|
||||
})
|
||||
console.log('[DBAL Prisma] Inferred dialect:', inferredDialect)
|
||||
|
||||
let prisma: PrismaClient
|
||||
|
||||
// For SQLite (or when dialect cannot be inferred), we need to use the driver adapter
|
||||
if (inferredDialect === 'sqlite' || !databaseUrl || inferredDialect === undefined) {
|
||||
// Use relative path as fallback
|
||||
const fallbackUrl = 'file:../../prisma/prisma/dev.db'
|
||||
const finalUrl = databaseUrl || fallbackUrl
|
||||
|
||||
// Ensure URL has file: prefix for SQLite
|
||||
const sqliteUrl = finalUrl.startsWith('file:') ? finalUrl : `file:${finalUrl}`
|
||||
|
||||
console.log('[DBAL Prisma] Using SQLite URL:', sqliteUrl)
|
||||
|
||||
try {
|
||||
// PrismaBetterSqlite3 is a factory that expects { url: string } config
|
||||
const adapter = new PrismaBetterSqlite3({ url: sqliteUrl })
|
||||
console.log('[DBAL Prisma] Adapter factory created successfully')
|
||||
|
||||
prisma = new PrismaClient({ adapter } as any)
|
||||
console.log('[DBAL Prisma] PrismaClient created successfully')
|
||||
} catch (error) {
|
||||
console.error('[DBAL Prisma] Error creating Prisma client:', error)
|
||||
throw error
|
||||
}
|
||||
} else {
|
||||
// For PostgreSQL/MySQL with explicit connection strings
|
||||
// Note: Prisma 7 removed datasources config, so this may not work
|
||||
// Consider using adapters for all database types
|
||||
throw new Error(`Prisma 7 requires adapters. Unsupported database dialect: ${inferredDialect}. Please use SQLite or implement adapters for other databases.`)
|
||||
}
|
||||
|
||||
return {
|
||||
prisma,
|
||||
queryTimeout: options?.queryTimeout ?? 30000,
|
||||
dialect: inferredDialect ?? 'generic'
|
||||
dialect: inferredDialect ?? 'sqlite'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { AdapterCapabilities } from '../adapter'
|
||||
import type { AdapterCapabilities } from '../../adapter'
|
||||
import type { PrismaContext } from '../types'
|
||||
|
||||
export function buildCapabilities(context: PrismaContext): AdapterCapabilities {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { PrismaContext } from '../types'
|
||||
import { handlePrismaError, getModel, withTimeout, isNotFoundError } from './utils'
|
||||
import { handlePrismaError, getModel, getPrimaryKeyField, withTimeout, isNotFoundError } from './utils'
|
||||
|
||||
export async function createRecord(
|
||||
context: PrismaContext,
|
||||
@@ -21,9 +21,10 @@ export async function readRecord(
|
||||
): Promise<unknown | null> {
|
||||
try {
|
||||
const model = getModel(context, entity)
|
||||
const idField = getPrimaryKeyField(entity)
|
||||
return await withTimeout(
|
||||
context,
|
||||
model.findUnique({ where: { id } as never })
|
||||
model.findUnique({ where: { [idField]: id } as never })
|
||||
)
|
||||
} catch (error) {
|
||||
throw handlePrismaError(error, 'read', entity)
|
||||
@@ -38,10 +39,11 @@ export async function updateRecord(
|
||||
): Promise<unknown> {
|
||||
try {
|
||||
const model = getModel(context, entity)
|
||||
const idField = getPrimaryKeyField(entity)
|
||||
return await withTimeout(
|
||||
context,
|
||||
model.update({
|
||||
where: { id } as never,
|
||||
where: { [idField]: id } as never,
|
||||
data: data as never
|
||||
})
|
||||
)
|
||||
@@ -57,9 +59,10 @@ export async function deleteRecord(
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const model = getModel(context, entity)
|
||||
const idField = getPrimaryKeyField(entity)
|
||||
await withTimeout(
|
||||
context,
|
||||
model.delete({ where: { id } as never })
|
||||
model.delete({ where: { [idField]: id } as never })
|
||||
)
|
||||
return true
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { ListOptions, ListResult } from '../../core/foundation/types'
|
||||
import type { ListOptions, ListResult } from '../../../core/foundation/types'
|
||||
import type { PrismaContext } from '../types'
|
||||
import { handlePrismaError, buildWhereClause, buildOrderBy, getModel, withTimeout } from './utils'
|
||||
import { handlePrismaError, buildWhereClause, buildOrderBy, getModel, getPrimaryKeyField, withTimeout } from './utils'
|
||||
|
||||
export async function listRecords(
|
||||
context: PrismaContext,
|
||||
@@ -69,9 +69,16 @@ export async function findByField(
|
||||
): Promise<unknown | null> {
|
||||
try {
|
||||
const model = getModel(context, entity)
|
||||
const idField = getPrimaryKeyField(entity)
|
||||
if (field === idField) {
|
||||
return await withTimeout(
|
||||
context,
|
||||
model.findUnique({ where: { [field]: value } as never })
|
||||
)
|
||||
}
|
||||
return await withTimeout(
|
||||
context,
|
||||
model.findUnique({ where: { [field]: value } as never })
|
||||
model.findFirst({ where: { [field]: value } as never })
|
||||
)
|
||||
} catch (error) {
|
||||
throw handlePrismaError(error, 'findByField', entity)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { PrismaContext } from '../types'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
|
||||
type PrismaModelDelegate = {
|
||||
findMany: (...args: unknown[]) => Promise<unknown[]>
|
||||
@@ -12,11 +12,18 @@ type PrismaModelDelegate = {
|
||||
delete: (...args: unknown[]) => Promise<unknown>
|
||||
deleteMany: (...args: unknown[]) => Promise<{ count: number }>
|
||||
upsert: (...args: unknown[]) => Promise<unknown>
|
||||
count: (...args: unknown[]) => Promise<number>
|
||||
}
|
||||
|
||||
const PRIMARY_KEY_FIELDS: Record<string, string> = {
|
||||
Credential: 'username',
|
||||
InstalledPackage: 'packageId',
|
||||
PackageData: 'packageId',
|
||||
}
|
||||
|
||||
export function getModel(context: PrismaContext, entity: string): PrismaModelDelegate {
|
||||
const modelName = entity.charAt(0).toLowerCase() + entity.slice(1)
|
||||
const model = (context.prisma as Record<string, PrismaModelDelegate>)[modelName]
|
||||
const model = (context.prisma as unknown as Record<string, PrismaModelDelegate>)[modelName]
|
||||
|
||||
if (!model) {
|
||||
throw DBALError.notFound(`Entity ${entity} not found`)
|
||||
@@ -25,6 +32,10 @@ export function getModel(context: PrismaContext, entity: string): PrismaModelDel
|
||||
return model
|
||||
}
|
||||
|
||||
export function getPrimaryKeyField(entity: string): string {
|
||||
return PRIMARY_KEY_FIELDS[entity] ?? 'id'
|
||||
}
|
||||
|
||||
export function buildWhereClause(filter: Record<string, unknown>): Record<string, unknown> {
|
||||
const where: Record<string, unknown> = {}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { promises as fs, createReadStream } from 'fs'
|
||||
import type { ReadStreamOptions } from 'fs'
|
||||
import type { DownloadOptions } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { FilesystemContext } from '../context'
|
||||
import { buildFullPath } from '../paths'
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { FilesystemContext } from '../context'
|
||||
import { buildFullPath } from '../paths'
|
||||
import { readMetadata } from './metadata'
|
||||
@@ -20,7 +20,7 @@ export async function listBlobs(
|
||||
return {
|
||||
items: items.slice(0, maxKeys),
|
||||
isTruncated: items.length > maxKeys,
|
||||
nextToken: items.length > maxKeys ? items[maxKeys].key : undefined,
|
||||
nextToken: items.length > maxKeys && items[maxKeys] ? items[maxKeys].key : undefined,
|
||||
}
|
||||
} catch (error) {
|
||||
const fsError = error as Error
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
import type { BlobMetadata } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { FilesystemContext } from '../context'
|
||||
import { buildFullPath, buildMetadataPath } from '../paths'
|
||||
import { readMetadata } from './metadata'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import { createHash } from 'crypto'
|
||||
import type { BlobMetadata } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { FilesystemContext } from '../context'
|
||||
import { buildFullPath, buildMetadataPath } from '../paths'
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { promises as fs, createWriteStream } from 'fs'
|
||||
import path from 'path'
|
||||
import { pipeline } from 'stream/promises'
|
||||
import type { BlobMetadata, UploadOptions } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { FilesystemContext } from '../context'
|
||||
import { buildFullPath, buildMetadataPath } from '../paths'
|
||||
import { generateEtag, writeMetadata } from './metadata'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { DownloadOptions } from '../blob-storage'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import type { DownloadOptions } from '../../blob-storage'
|
||||
import type { MemoryStore } from './store'
|
||||
import { getBlobOrThrow, normalizeKey } from './utils'
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import type {
|
||||
UploadOptions,
|
||||
DownloadOptions,
|
||||
BlobListOptions,
|
||||
} from '../blob-storage'
|
||||
} from '../../blob-storage'
|
||||
import { createStore } from './store'
|
||||
import { uploadBuffer, uploadFromStream } from './uploads'
|
||||
import { downloadBuffer, downloadStream } from './downloads'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../blob-storage'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../../blob-storage'
|
||||
import type { MemoryStore } from './store'
|
||||
import { toBlobMetadata } from './serialization'
|
||||
import { cleanupStoreEntry, getBlobOrThrow, normalizeKey } from './utils'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createHash } from 'crypto'
|
||||
import type { UploadOptions, BlobMetadata } from '../blob-storage'
|
||||
import type { UploadOptions, BlobMetadata } from '../../blob-storage'
|
||||
import type { BlobData } from './store'
|
||||
|
||||
export const generateEtag = (data: Buffer): string => `"${createHash('md5').update(data).digest('hex')}"`
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { UploadOptions } from '../blob-storage'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import type { UploadOptions } from '../../blob-storage'
|
||||
import type { MemoryStore } from './store'
|
||||
import { collectStream, toBlobData, toBlobMetadata } from './serialization'
|
||||
import { normalizeKey } from './utils'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import type { BlobData, MemoryStore } from './store'
|
||||
|
||||
export const normalizeKey = (key: string): string => key.replace(/^\/+/, '').trim()
|
||||
|
||||
@@ -26,18 +26,19 @@ export async function createS3Context(config: BlobStorageConfig): Promise<S3Cont
|
||||
}
|
||||
|
||||
const { S3Client } = s3Module
|
||||
const s3Client = new S3Client({
|
||||
region: s3Config.region,
|
||||
credentials: s3Config.accessKeyId && s3Config.secretAccessKey ? {
|
||||
accessKeyId: s3Config.accessKeyId,
|
||||
secretAccessKey: s3Config.secretAccessKey,
|
||||
} : undefined,
|
||||
endpoint: s3Config.endpoint,
|
||||
forcePathStyle: s3Config.forcePathStyle,
|
||||
})
|
||||
|
||||
return {
|
||||
bucket,
|
||||
s3Client: new S3Client({
|
||||
region: s3Config.region,
|
||||
credentials: s3Config.accessKeyId && s3Config.secretAccessKey ? {
|
||||
accessKeyId: s3Config.accessKeyId,
|
||||
secretAccessKey: s3Config.secretAccessKey,
|
||||
} : undefined,
|
||||
endpoint: s3Config.endpoint,
|
||||
forcePathStyle: s3Config.forcePathStyle,
|
||||
})
|
||||
s3Client: s3Client as S3ClientLike,
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('AWS SDK @aws-sdk/client-s3 not installed. Install with: npm install @aws-sdk/client-s3')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { DownloadOptions } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import { buildRangeHeader } from '../range'
|
||||
import type { S3Context } from '../client'
|
||||
|
||||
@@ -17,11 +17,16 @@ export async function downloadBuffer(
|
||||
Range: buildRangeHeader(options),
|
||||
})
|
||||
|
||||
const response = await context.s3Client.send(command)
|
||||
const response = await context.s3Client.send(command) as {
|
||||
Body?: AsyncIterable<Uint8Array>
|
||||
}
|
||||
|
||||
const chunks: Uint8Array[] = []
|
||||
for await (const chunk of response.Body as any) {
|
||||
chunks.push(chunk)
|
||||
const body = response.Body
|
||||
if (body) {
|
||||
for await (const chunk of body) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
return Buffer.concat(chunks)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { S3Context } from '../client'
|
||||
|
||||
export async function listBlobs(
|
||||
@@ -16,9 +16,20 @@ export async function listBlobs(
|
||||
MaxKeys: options.maxKeys || 1000,
|
||||
})
|
||||
|
||||
const response = await context.s3Client.send(command)
|
||||
const response = await context.s3Client.send(command) as {
|
||||
Contents?: Array<{
|
||||
Key?: string
|
||||
Size?: number
|
||||
ETag?: string
|
||||
LastModified?: Date
|
||||
}>
|
||||
NextContinuationToken?: string
|
||||
IsTruncated?: boolean
|
||||
}
|
||||
|
||||
const items: BlobMetadata[] = (response.Contents || []).map(obj => ({
|
||||
const contents = response.Contents
|
||||
|
||||
const items: BlobMetadata[] = (contents || []).map(obj => ({
|
||||
key: obj.Key || '',
|
||||
size: obj.Size || 0,
|
||||
contentType: 'application/octet-stream',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { BlobMetadata } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { S3Context } from '../client'
|
||||
import { getMetadata } from './metadata'
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { BlobMetadata } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { S3Context } from '../client'
|
||||
|
||||
export async function getMetadata(
|
||||
@@ -14,7 +14,13 @@ export async function getMetadata(
|
||||
Key: key,
|
||||
})
|
||||
|
||||
const response = await context.s3Client.send(command)
|
||||
const response = await context.s3Client.send(command) as {
|
||||
ContentLength?: number
|
||||
ContentType?: string
|
||||
ETag?: string
|
||||
LastModified?: Date
|
||||
Metadata?: Record<string, string>
|
||||
}
|
||||
|
||||
return {
|
||||
key,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { BlobMetadata, UploadOptions } from '../../../blob-storage'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { S3Context } from '../client'
|
||||
|
||||
export async function uploadBuffer(
|
||||
@@ -19,7 +19,9 @@ export async function uploadBuffer(
|
||||
Metadata: options.metadata,
|
||||
})
|
||||
|
||||
const response = await context.s3Client.send(command)
|
||||
const response = await context.s3Client.send(command) as {
|
||||
ETag?: string
|
||||
}
|
||||
|
||||
return {
|
||||
key,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { TenantManager } from '../../core/foundation/tenant-context'
|
||||
import type { BlobStorage } from '../blob-storage'
|
||||
import type { TenantManager } from '../../../core/foundation/tenant-context'
|
||||
import type { BlobStorage } from '../../blob-storage'
|
||||
|
||||
export interface TenantAwareDeps {
|
||||
baseStorage: BlobStorage
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { BlobListOptions, BlobListResult, BlobMetadata, BlobStorage, DownloadOptions, UploadOptions } from '../blob-storage'
|
||||
import type { TenantManager } from '../../core/foundation/tenant-context'
|
||||
import type { BlobListOptions, BlobListResult, BlobMetadata, BlobStorage, DownloadOptions, UploadOptions } from '../../blob-storage'
|
||||
import type { TenantManager } from '../../../core/foundation/tenant-context'
|
||||
import type { TenantAwareDeps } from './context'
|
||||
import { deleteBlob, exists, copyBlob, getStats } from './mutations'
|
||||
import { downloadBuffer, downloadStream, generatePresignedUrl, getMetadata, listBlobs } from './reads'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { BlobMetadata } from '../blob-storage'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import type { BlobMetadata } from '../../blob-storage'
|
||||
import { auditCopy, auditDeletion } from './audit-hooks'
|
||||
import type { TenantAwareDeps } from './context'
|
||||
import { scopeKey } from './context'
|
||||
@@ -9,7 +9,7 @@ export const deleteBlob = async (deps: TenantAwareDeps, key: string): Promise<bo
|
||||
const context = await resolveTenantContext(deps)
|
||||
ensurePermission(context, 'delete')
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
|
||||
try {
|
||||
const metadata = await deps.baseStorage.getMetadata(scopedKey)
|
||||
@@ -29,7 +29,7 @@ export const exists = async (deps: TenantAwareDeps, key: string): Promise<boolea
|
||||
const context = await resolveTenantContext(deps)
|
||||
ensurePermission(context, 'read')
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
return deps.baseStorage.exists(scopedKey)
|
||||
}
|
||||
|
||||
@@ -42,14 +42,14 @@ export const copyBlob = async (
|
||||
ensurePermission(context, 'read')
|
||||
ensurePermission(context, 'write')
|
||||
|
||||
const sourceScoped = scopeKey(sourceKey, context.namespace)
|
||||
const sourceScoped = scopeKey(sourceKey, context.namespace ?? '')
|
||||
const sourceMetadata = await deps.baseStorage.getMetadata(sourceScoped)
|
||||
|
||||
if (!context.canUploadBlob(sourceMetadata.size)) {
|
||||
throw DBALError.rateLimitExceeded()
|
||||
}
|
||||
|
||||
const destScoped = scopeKey(destKey, context.namespace)
|
||||
const destScoped = scopeKey(destKey, context.namespace ?? '')
|
||||
const metadata = await deps.baseStorage.copy(sourceScoped, destScoped)
|
||||
|
||||
await auditCopy(deps, sourceMetadata.size)
|
||||
@@ -62,6 +62,9 @@ export const copyBlob = async (
|
||||
|
||||
export const getStats = async (deps: TenantAwareDeps) => {
|
||||
const context = await resolveTenantContext(deps)
|
||||
if (!context.quota) {
|
||||
return { count: 0, totalSize: 0 }
|
||||
}
|
||||
return {
|
||||
count: context.quota.currentBlobCount,
|
||||
totalSize: context.quota.currentBlobStorageBytes,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { DownloadOptions, BlobMetadata, BlobListOptions, BlobListResult } from '../blob-storage'
|
||||
import type { DownloadOptions, BlobMetadata, BlobListOptions, BlobListResult } from '../../blob-storage'
|
||||
import type { TenantAwareDeps } from './context'
|
||||
import { scopeKey, unscopeKey } from './context'
|
||||
import { ensurePermission, resolveTenantContext } from './tenant-context'
|
||||
@@ -7,7 +7,7 @@ export const downloadBuffer = async (deps: TenantAwareDeps, key: string): Promis
|
||||
const context = await resolveTenantContext(deps)
|
||||
ensurePermission(context, 'read')
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
return deps.baseStorage.download(scopedKey)
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ export const downloadStream = async (
|
||||
const context = await resolveTenantContext(deps)
|
||||
ensurePermission(context, 'read')
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
return deps.baseStorage.downloadStream(scopedKey, options)
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ export const listBlobs = async (
|
||||
|
||||
const scopedOptions: BlobListOptions = {
|
||||
...options,
|
||||
prefix: options.prefix ? scopeKey(options.prefix, context.namespace) : context.namespace,
|
||||
prefix: options.prefix ? scopeKey(options.prefix, context.namespace ?? '') : context.namespace ?? '',
|
||||
}
|
||||
|
||||
const result = await deps.baseStorage.list(scopedOptions)
|
||||
@@ -41,7 +41,7 @@ export const listBlobs = async (
|
||||
...result,
|
||||
items: result.items.map(item => ({
|
||||
...item,
|
||||
key: unscopeKey(item.key, context.namespace),
|
||||
key: unscopeKey(item.key, context.namespace ?? ''),
|
||||
})),
|
||||
}
|
||||
}
|
||||
@@ -50,7 +50,7 @@ export const getMetadata = async (deps: TenantAwareDeps, key: string): Promise<B
|
||||
const context = await resolveTenantContext(deps)
|
||||
ensurePermission(context, 'read')
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
const metadata = await deps.baseStorage.getMetadata(scopedKey)
|
||||
|
||||
return {
|
||||
@@ -67,6 +67,6 @@ export const generatePresignedUrl = async (
|
||||
const context = await resolveTenantContext(deps)
|
||||
ensurePermission(context, 'read')
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
return deps.baseStorage.generatePresignedUrl(scopedKey, expiresIn)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import type { TenantContext } from '../../core/foundation/tenant-context'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import type { TenantContext } from '../../../core/foundation/tenant-context'
|
||||
import type { TenantAwareDeps } from './context'
|
||||
|
||||
export const resolveTenantContext = async ({ tenantManager, tenantId, userId }: TenantAwareDeps): Promise<TenantContext> => {
|
||||
return tenantManager.getTenantContext(tenantId, userId)
|
||||
const hasAccess = await tenantManager.validateTenantAccess(tenantId, userId)
|
||||
if (!hasAccess) {
|
||||
throw DBALError.forbidden(`User ${userId} does not have access to tenant ${tenantId}`)
|
||||
}
|
||||
return tenantManager.getTenantContext(tenantId)
|
||||
}
|
||||
|
||||
export const ensurePermission = (context: TenantContext, action: 'read' | 'write' | 'delete'): void => {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { DBALError } from '../../../../core/foundation/errors'
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import { auditUpload } from './audit-hooks'
|
||||
import type { TenantAwareDeps } from './context'
|
||||
import { scopeKey } from './context'
|
||||
import { ensurePermission, resolveTenantContext } from './tenant-context'
|
||||
import type { UploadOptions, BlobMetadata } from '../blob-storage'
|
||||
import type { UploadOptions, BlobMetadata } from '../../blob-storage'
|
||||
|
||||
export const uploadBuffer = async (
|
||||
deps: TenantAwareDeps,
|
||||
@@ -18,7 +18,7 @@ export const uploadBuffer = async (
|
||||
throw DBALError.rateLimitExceeded()
|
||||
}
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
const metadata = await deps.baseStorage.upload(scopedKey, data, options)
|
||||
await auditUpload(deps, data.length)
|
||||
|
||||
@@ -42,7 +42,7 @@ export const uploadStream = async (
|
||||
throw DBALError.rateLimitExceeded()
|
||||
}
|
||||
|
||||
const scopedKey = scopeKey(key, context.namespace)
|
||||
const scopedKey = scopeKey(key, context.namespace ?? '')
|
||||
const metadata = await deps.baseStorage.uploadStream(scopedKey, stream, size, options)
|
||||
await auditUpload(deps, size)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import { DBALError } from '../../core/foundation/errors'
|
||||
import type { RPCMessage } from '../utils/rpc-types'
|
||||
import type { BridgeState } from './state'
|
||||
import type { MessageRouter } from './message-router'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { DBALAdapter, AdapterCapabilities } from '../../adapters/adapter'
|
||||
import type { ListOptions, ListResult } from '../../core/types'
|
||||
import type { ListOptions, ListResult } from '../../core/foundation/types'
|
||||
import { createConnectionManager } from './connection-manager'
|
||||
import { createMessageRouter } from './message-router'
|
||||
import { createOperations } from './operations'
|
||||
@@ -47,10 +47,13 @@ export class WebSocketBridge implements DBALAdapter {
|
||||
|
||||
upsert(
|
||||
entity: string,
|
||||
filter: Record<string, unknown>,
|
||||
uniqueField: string,
|
||||
uniqueValue: unknown,
|
||||
createData: Record<string, unknown>,
|
||||
updateData: Record<string, unknown>,
|
||||
): Promise<unknown> {
|
||||
// Convert the new signature to the old one for compatibility
|
||||
const filter = { [uniqueField]: uniqueValue }
|
||||
return this.operations.upsert(entity, filter, createData, updateData)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import { DBALError } from '../../core/foundation/errors'
|
||||
import type { RPCResponse } from '../utils/rpc-types'
|
||||
import type { BridgeState } from './state'
|
||||
|
||||
@@ -56,7 +56,7 @@ export const createMessageRouter = (state: BridgeState): MessageRouter => ({
|
||||
state.pendingRequests.delete(response.id)
|
||||
|
||||
if (response.error) {
|
||||
const error = new DBALError(response.error.message, response.error.code, response.error.details)
|
||||
const error = new DBALError(response.error.code, response.error.message, response.error.details)
|
||||
pending.reject(error)
|
||||
} else {
|
||||
pending.resolve(response.result)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { AdapterCapabilities } from '../../adapters/adapter'
|
||||
import type { ListOptions, ListResult } from '../../core/types'
|
||||
import type { ListOptions, ListResult } from '../../core/foundation/types'
|
||||
import type { ConnectionManager } from './connection-manager'
|
||||
import type { BridgeState } from './state'
|
||||
import { rpcCall } from './rpc'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DBALError } from '../../../core/foundation/errors'
|
||||
import { DBALError } from '../../core/foundation/errors'
|
||||
import { generateRequestId } from '../utils/generate-request-id'
|
||||
import type { RPCMessage } from '../utils/rpc-types'
|
||||
import type { ConnectionManager } from './connection-manager'
|
||||
|
||||
@@ -7,6 +7,7 @@ import type { DBALConfig } from '../../runtime/config'
|
||||
import type { DBALAdapter } from '../../adapters/adapter'
|
||||
import { DBALError } from '../foundation/errors'
|
||||
import { PrismaAdapter, PostgresAdapter, MySQLAdapter } from '../../adapters/prisma'
|
||||
import { MemoryAdapter } from '../../adapters/memory'
|
||||
import { ACLAdapter } from '../../adapters/acl-adapter'
|
||||
import { WebSocketBridge } from '../../bridges/websocket-bridge'
|
||||
|
||||
@@ -28,6 +29,9 @@ export const createAdapter = (config: DBALConfig): DBALAdapter => {
|
||||
}
|
||||
)
|
||||
break
|
||||
case 'memory':
|
||||
baseAdapter = new MemoryAdapter()
|
||||
break
|
||||
case 'postgres':
|
||||
baseAdapter = new PostgresAdapter(
|
||||
config.database?.url,
|
||||
|
||||
@@ -2,10 +2,10 @@ import type { DBALAdapter } from '../../adapters/adapter'
|
||||
import type { DBALConfig } from '../../runtime/config'
|
||||
import { createAdapter } from './adapter-factory'
|
||||
import {
|
||||
createComponentOperations,
|
||||
createLuaScriptOperations,
|
||||
createPackageOperations,
|
||||
createPageOperations,
|
||||
createComponentNodeOperations,
|
||||
createInstalledPackageOperations,
|
||||
createPackageDataOperations,
|
||||
createPageConfigOperations,
|
||||
createSessionOperations,
|
||||
createUserOperations,
|
||||
createWorkflowOperations
|
||||
@@ -13,12 +13,12 @@ import {
|
||||
|
||||
export const buildAdapter = (config: DBALConfig): DBALAdapter => createAdapter(config)
|
||||
|
||||
export const buildEntityOperations = (adapter: DBALAdapter) => ({
|
||||
users: createUserOperations(adapter),
|
||||
pages: createPageOperations(adapter),
|
||||
components: createComponentOperations(adapter),
|
||||
workflows: createWorkflowOperations(adapter),
|
||||
luaScripts: createLuaScriptOperations(adapter),
|
||||
packages: createPackageOperations(adapter),
|
||||
sessions: createSessionOperations(adapter)
|
||||
export const buildEntityOperations = (adapter: DBALAdapter, tenantId?: string) => ({
|
||||
users: createUserOperations(adapter, tenantId),
|
||||
pageConfigs: createPageConfigOperations(adapter, tenantId),
|
||||
componentNodes: createComponentNodeOperations(adapter, tenantId),
|
||||
workflows: createWorkflowOperations(adapter, tenantId),
|
||||
installedPackages: createInstalledPackageOperations(adapter, tenantId),
|
||||
packageData: createPackageDataOperations(adapter),
|
||||
sessions: createSessionOperations(adapter, tenantId),
|
||||
})
|
||||
|
||||
@@ -20,7 +20,7 @@ export class DBALClient {
|
||||
constructor(config: DBALConfig) {
|
||||
this.config = normalizeClientConfig(validateClientConfig(config))
|
||||
this.adapter = buildAdapter(this.config)
|
||||
this.operations = buildEntityOperations(this.adapter)
|
||||
this.operations = buildEntityOperations(this.adapter, this.config.tenantId)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -31,17 +31,31 @@ export class DBALClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Page entity operations
|
||||
* PageConfig entity operations
|
||||
*/
|
||||
get pages() {
|
||||
return this.operations.pages
|
||||
get pageConfigs() {
|
||||
return this.operations.pageConfigs
|
||||
}
|
||||
|
||||
/**
|
||||
* Component hierarchy entity operations
|
||||
* Deprecated: use pageConfigs
|
||||
*/
|
||||
get pages() {
|
||||
return this.operations.pageConfigs
|
||||
}
|
||||
|
||||
/**
|
||||
* ComponentNode entity operations
|
||||
*/
|
||||
get componentNodes() {
|
||||
return this.operations.componentNodes
|
||||
}
|
||||
|
||||
/**
|
||||
* Deprecated: use componentNodes
|
||||
*/
|
||||
get components() {
|
||||
return this.operations.components
|
||||
return this.operations.componentNodes
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -52,17 +66,24 @@ export class DBALClient {
|
||||
}
|
||||
|
||||
/**
|
||||
* Lua script entity operations
|
||||
* InstalledPackage entity operations
|
||||
*/
|
||||
get luaScripts() {
|
||||
return this.operations.luaScripts
|
||||
get installedPackages() {
|
||||
return this.operations.installedPackages
|
||||
}
|
||||
|
||||
/**
|
||||
* Package entity operations
|
||||
* Deprecated: use installedPackages
|
||||
*/
|
||||
get packages() {
|
||||
return this.operations.packages
|
||||
return this.operations.installedPackages
|
||||
}
|
||||
|
||||
/**
|
||||
* PackageData entity operations
|
||||
*/
|
||||
get packageData() {
|
||||
return this.operations.packageData
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -10,15 +10,14 @@
|
||||
import type { DBALConfig } from '../../runtime/config'
|
||||
import type { DBALAdapter } from '../../adapters/adapter'
|
||||
import { createAdapter } from './adapter-factory'
|
||||
import {
|
||||
createUserOperations,
|
||||
createPageOperations,
|
||||
createComponentOperations,
|
||||
createWorkflowOperations,
|
||||
createLuaScriptOperations,
|
||||
createPackageOperations,
|
||||
createSessionOperations,
|
||||
} from '../entities'
|
||||
import {
|
||||
createUserOperations,
|
||||
createPageOperations,
|
||||
createComponentOperations,
|
||||
createWorkflowOperations,
|
||||
createPackageOperations,
|
||||
createSessionOperations,
|
||||
} from '../entities'
|
||||
|
||||
export class DBALClient {
|
||||
private adapter: DBALAdapter
|
||||
@@ -441,88 +440,6 @@ export class DBALClient {
|
||||
}
|
||||
}
|
||||
|
||||
get luaScripts() {
|
||||
return {
|
||||
create: async (data: Omit<LuaScript, 'id' | 'createdAt' | 'updatedAt'>): Promise<LuaScript> => {
|
||||
const validationErrors = validateLuaScriptCreate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError(
|
||||
'Invalid Lua script data',
|
||||
validationErrors.map(error => ({ field: 'luaScript', error }))
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
return this.adapter.create('LuaScript', data) as Promise<LuaScript>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict(`Lua script with name '${data.name}' already exists`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
read: async (id: string): Promise<LuaScript | null> => {
|
||||
const validationErrors = validateId(id)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError(
|
||||
'Invalid Lua script ID',
|
||||
validationErrors.map(error => ({ field: 'id', error }))
|
||||
)
|
||||
}
|
||||
|
||||
const result = await this.adapter.read('LuaScript', id) as LuaScript | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Lua script not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
update: async (id: string, data: Partial<LuaScript>): Promise<LuaScript> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
throw DBALError.validationError(
|
||||
'Invalid Lua script ID',
|
||||
idErrors.map(error => ({ field: 'id', error }))
|
||||
)
|
||||
}
|
||||
|
||||
const validationErrors = validateLuaScriptUpdate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError(
|
||||
'Invalid Lua script update data',
|
||||
validationErrors.map(error => ({ field: 'luaScript', error }))
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
return this.adapter.update('LuaScript', id, data) as Promise<LuaScript>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('Lua script name already exists')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
delete: async (id: string): Promise<boolean> => {
|
||||
const validationErrors = validateId(id)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError(
|
||||
'Invalid Lua script ID',
|
||||
validationErrors.map(error => ({ field: 'id', error }))
|
||||
)
|
||||
}
|
||||
|
||||
const result = await this.adapter.delete('LuaScript', id)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Lua script not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
list: async (options?: ListOptions): Promise<ListResult<LuaScript>> => {
|
||||
return this.adapter.list('LuaScript', options) as Promise<ListResult<LuaScript>>
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
get packages() {
|
||||
return {
|
||||
create: async (data: Omit<Package, 'id' | 'createdAt' | 'updatedAt'>): Promise<Package> => {
|
||||
|
||||
@@ -6,7 +6,7 @@ export const validateClientConfig = (config: DBALConfig): DBALConfig => {
|
||||
throw DBALError.validationError('Adapter type must be specified', [])
|
||||
}
|
||||
|
||||
if (config.mode !== 'production' && !config.database?.url) {
|
||||
if (config.mode !== 'production' && config.adapter !== 'memory' && !config.database?.url) {
|
||||
throw DBALError.validationError('Database URL must be specified for non-production mode', [])
|
||||
}
|
||||
|
||||
|
||||
@@ -9,41 +9,16 @@ export * as user from './user';
|
||||
export * as page from './page';
|
||||
export * as workflow from './workflow';
|
||||
export * as session from './session';
|
||||
export * as luaScript from './lua-script';
|
||||
export * as pkg from './package';
|
||||
|
||||
// Legacy factory exports (for backward compatibility)
|
||||
// TODO: Implement these operation factory functions
|
||||
// export { createUserOperations } from './operations/core/user-operations';
|
||||
// export { createPageOperations } from './operations/system/page-operations';
|
||||
// export { createComponentOperations } from './operations/system/component-operations';
|
||||
// export { createWorkflowOperations } from './operations/core/workflow-operations';
|
||||
// export { createLuaScriptOperations } from './operations/core/lua-script-operations';
|
||||
// export { createPackageOperations } from './operations/system/package-operations';
|
||||
// export { createSessionOperations } from './operations/core/session-operations';
|
||||
|
||||
// Temporary stubs for operation factories
|
||||
export const createUserOperations = (...args: any[]): any => {
|
||||
throw new Error('User operations factory not yet implemented');
|
||||
};
|
||||
export const createPageOperations = (...args: any[]): any => {
|
||||
throw new Error('Page operations factory not yet implemented');
|
||||
};
|
||||
export const createComponentOperations = (...args: any[]): any => {
|
||||
throw new Error('Component operations factory not yet implemented');
|
||||
};
|
||||
export const createWorkflowOperations = (...args: any[]): any => {
|
||||
throw new Error('Workflow operations factory not yet implemented');
|
||||
};
|
||||
export const createLuaScriptOperations = (...args: any[]): any => {
|
||||
throw new Error('Lua script operations factory not yet implemented');
|
||||
};
|
||||
export const createPackageOperations = (...args: any[]): any => {
|
||||
throw new Error('Package operations factory not yet implemented');
|
||||
};
|
||||
export const createSessionOperations = (...args: any[]): any => {
|
||||
throw new Error('Session operations factory not yet implemented');
|
||||
};
|
||||
export { createUserOperations } from './operations/core/user-operations';
|
||||
export { createPageOperations, createPageConfigOperations } from './operations/system/page-operations';
|
||||
export { createComponentOperations, createComponentNodeOperations } from './operations/system/component-operations';
|
||||
export { createWorkflowOperations } from './operations/core/workflow-operations';
|
||||
export { createPackageOperations, createInstalledPackageOperations } from './operations/system/package-operations';
|
||||
export { createPackageDataOperations } from './operations/system/package-data-operations';
|
||||
export { createSessionOperations } from './operations/core/session-operations';
|
||||
|
||||
// Validation utilities
|
||||
export * from '../validation';
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* @file create-lua-script.ts
|
||||
* @description Create Lua script operation
|
||||
*/
|
||||
import type { CreateLuaScriptInput, LuaScript, Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateLuaScriptCreate } from '../../../validation/entities/lua-script/validate-lua-script-create'
|
||||
|
||||
/**
|
||||
* Create a new Lua script in the store
|
||||
*/
|
||||
export const createLuaScript = async (
|
||||
store: InMemoryStore,
|
||||
input: CreateLuaScriptInput
|
||||
): Promise<Result<LuaScript>> => {
|
||||
const isSandboxed = input.isSandboxed ?? true
|
||||
const timeoutMs = input.timeoutMs ?? 5000
|
||||
const validationErrors = validateLuaScriptCreate({
|
||||
name: input.name,
|
||||
description: input.description,
|
||||
code: input.code,
|
||||
isSandboxed,
|
||||
allowedGlobals: input.allowedGlobals,
|
||||
timeoutMs,
|
||||
createdBy: input.createdBy
|
||||
})
|
||||
|
||||
if (validationErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: validationErrors[0] } }
|
||||
}
|
||||
|
||||
if (store.luaScriptNames.has(input.name)) {
|
||||
return { success: false, error: { code: 'CONFLICT', message: 'Lua script name already exists' } }
|
||||
}
|
||||
|
||||
const script: LuaScript = {
|
||||
id: store.generateId('lua'),
|
||||
name: input.name,
|
||||
description: input.description,
|
||||
code: input.code,
|
||||
isSandboxed,
|
||||
allowedGlobals: [...input.allowedGlobals],
|
||||
timeoutMs,
|
||||
createdBy: input.createdBy,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
}
|
||||
|
||||
store.luaScripts.set(script.id, script)
|
||||
store.luaScriptNames.set(script.name, script.id)
|
||||
|
||||
return { success: true, data: script }
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
/**
|
||||
* @file delete-lua-script.ts
|
||||
* @description Delete Lua script operation
|
||||
*/
|
||||
import type { Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateId } from '../../../validation/entities/validate-id'
|
||||
|
||||
/**
|
||||
* Delete a Lua script by ID
|
||||
*/
|
||||
export const deleteLuaScript = async (store: InMemoryStore, id: string): Promise<Result<boolean>> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] } }
|
||||
}
|
||||
|
||||
const script = store.luaScripts.get(id)
|
||||
if (!script) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Lua script not found: ${id}` } }
|
||||
}
|
||||
|
||||
store.luaScripts.delete(id)
|
||||
store.luaScriptNames.delete(script.name)
|
||||
|
||||
return { success: true, data: true }
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* @file get-lua-script.ts
|
||||
* @description Get Lua script by ID operation
|
||||
*/
|
||||
import type { LuaScript, Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateId } from '../../../validation/entities/validate-id'
|
||||
|
||||
/**
|
||||
* Get a Lua script by ID
|
||||
*/
|
||||
export const getLuaScript = async (store: InMemoryStore, id: string): Promise<Result<LuaScript>> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] } }
|
||||
}
|
||||
|
||||
const script = store.luaScripts.get(id)
|
||||
if (!script) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Lua script not found: ${id}` } }
|
||||
}
|
||||
|
||||
return { success: true, data: script }
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
/**
|
||||
* @file list-lua-scripts.ts
|
||||
* @description List Lua scripts with filtering and pagination
|
||||
*/
|
||||
import type { ListOptions, LuaScript, Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
|
||||
/**
|
||||
* List Lua scripts with filtering and pagination
|
||||
*/
|
||||
export const listLuaScripts = async (
|
||||
store: InMemoryStore,
|
||||
options: ListOptions = {}
|
||||
): Promise<Result<LuaScript[]>> => {
|
||||
const { filter = {}, sort = {}, page = 1, limit = 20 } = options
|
||||
|
||||
let scripts = Array.from(store.luaScripts.values())
|
||||
|
||||
if (filter.createdBy !== undefined) {
|
||||
scripts = scripts.filter((s) => s.createdBy === filter.createdBy)
|
||||
}
|
||||
|
||||
if (filter.isSandboxed !== undefined) {
|
||||
scripts = scripts.filter((s) => s.isSandboxed === filter.isSandboxed)
|
||||
}
|
||||
|
||||
if (sort.name) {
|
||||
scripts.sort((a, b) =>
|
||||
sort.name === 'asc' ? a.name.localeCompare(b.name) : b.name.localeCompare(a.name)
|
||||
)
|
||||
} else if (sort.createdAt) {
|
||||
scripts.sort((a, b) =>
|
||||
sort.createdAt === 'asc' ? a.createdAt.getTime() - b.createdAt.getTime() : b.createdAt.getTime() - a.createdAt.getTime()
|
||||
)
|
||||
}
|
||||
|
||||
const start = (page - 1) * limit
|
||||
const paginated = scripts.slice(start, start + limit)
|
||||
|
||||
return { success: true, data: paginated }
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
/**
|
||||
* @file update-lua-script.ts
|
||||
* @description Update Lua script operation
|
||||
*/
|
||||
import type { LuaScript, Result, UpdateLuaScriptInput } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateId } from '../../../validation/entities/validate-id'
|
||||
import { validateLuaScriptUpdate } from '../../../validation/entities/lua-script/validate-lua-script-update'
|
||||
|
||||
/**
|
||||
* Update an existing Lua script
|
||||
*/
|
||||
export const updateLuaScript = async (
|
||||
store: InMemoryStore,
|
||||
id: string,
|
||||
input: UpdateLuaScriptInput
|
||||
): Promise<Result<LuaScript>> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] } }
|
||||
}
|
||||
|
||||
const script = store.luaScripts.get(id)
|
||||
if (!script) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Lua script not found: ${id}` } }
|
||||
}
|
||||
|
||||
const validationErrors = validateLuaScriptUpdate(input)
|
||||
if (validationErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: validationErrors[0] } }
|
||||
}
|
||||
|
||||
if (input.name !== undefined && input.name !== script.name) {
|
||||
if (store.luaScriptNames.has(input.name)) {
|
||||
return { success: false, error: { code: 'CONFLICT', message: 'Lua script name already exists' } }
|
||||
}
|
||||
store.luaScriptNames.delete(script.name)
|
||||
store.luaScriptNames.set(input.name, id)
|
||||
script.name = input.name
|
||||
}
|
||||
|
||||
if (input.description !== undefined) {
|
||||
script.description = input.description
|
||||
}
|
||||
|
||||
if (input.code !== undefined) {
|
||||
script.code = input.code
|
||||
}
|
||||
|
||||
if (input.isSandboxed !== undefined) {
|
||||
script.isSandboxed = input.isSandboxed
|
||||
}
|
||||
|
||||
if (input.allowedGlobals !== undefined) {
|
||||
script.allowedGlobals = [...input.allowedGlobals]
|
||||
}
|
||||
|
||||
if (input.timeoutMs !== undefined) {
|
||||
script.timeoutMs = input.timeoutMs
|
||||
}
|
||||
|
||||
if (input.createdBy !== undefined) {
|
||||
script.createdBy = input.createdBy
|
||||
}
|
||||
|
||||
script.updatedAt = new Date()
|
||||
|
||||
return { success: true, data: script }
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
/**
|
||||
* @file index.ts
|
||||
* @description Barrel export for Lua script operations
|
||||
* NOTE: Lua script operation files not yet implemented - stubbed for build
|
||||
*/
|
||||
|
||||
// TODO: Implement these Lua script operation files
|
||||
// export { createLuaScript } from './crud/create-lua-script';
|
||||
// export { getLuaScript } from './crud/get-lua-script';
|
||||
// export { updateLuaScript } from './crud/update-lua-script';
|
||||
// export { deleteLuaScript } from './crud/delete-lua-script';
|
||||
// export { listLuaScripts } from './crud/list-lua-scripts';
|
||||
|
||||
// Temporary stubs to allow build to proceed
|
||||
export const createLuaScript = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
};
|
||||
export const getLuaScript = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
};
|
||||
export const updateLuaScript = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
};
|
||||
export const deleteLuaScript = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
};
|
||||
export const listLuaScripts = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
};
|
||||
@@ -1,9 +0,0 @@
|
||||
/**
|
||||
* @file in-memory-store.ts
|
||||
* @description In-memory store interface for Lua script operations (stub)
|
||||
*/
|
||||
|
||||
export interface InMemoryStore {
|
||||
luaScripts: Map<string, any>;
|
||||
generateId(entityType: string): string;
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* @file types.ts
|
||||
* @description Type definitions for Lua script operations (stub)
|
||||
*/
|
||||
|
||||
export interface CreateLuaScriptInput {
|
||||
name: string;
|
||||
code: string;
|
||||
description?: string;
|
||||
isActive?: boolean;
|
||||
}
|
||||
|
||||
export interface LuaScriptView {
|
||||
id: string;
|
||||
name: string;
|
||||
code: string;
|
||||
description?: string;
|
||||
isActive: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
export interface Result<T> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: {
|
||||
code: string;
|
||||
message: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ListOptions {
|
||||
filter?: Record<string, any>;
|
||||
sort?: Record<string, 'asc' | 'desc'>;
|
||||
page?: number;
|
||||
limit?: number;
|
||||
skip?: number;
|
||||
take?: number;
|
||||
where?: Record<string, any>;
|
||||
orderBy?: Record<string, 'asc' | 'desc'>;
|
||||
}
|
||||
|
||||
export interface ListResult<T> {
|
||||
items?: T[];
|
||||
data?: T[];
|
||||
total: number;
|
||||
skip?: number;
|
||||
take?: number;
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
/**
|
||||
* @file lua-script-operations.ts
|
||||
* @description LuaScript entity CRUD operations for DBAL client
|
||||
* NOTE: Lua script operations not yet implemented - stubbed for build
|
||||
*
|
||||
* Single-responsibility module following the small-function-file pattern.
|
||||
*/
|
||||
|
||||
// TODO: Implement Lua script operations
|
||||
// import type { DBALAdapter } from '../../adapters/adapter'
|
||||
// import type { LuaScript, ListOptions, ListResult } from '../types'
|
||||
// import { createLuaScript, deleteLuaScript, getLuaScript, listLuaScripts, updateLuaScript } from '../../lua-script'
|
||||
|
||||
export interface LuaScriptOperations {
|
||||
create: (data: any) => Promise<any>
|
||||
read: (id: string) => Promise<any | null>
|
||||
update: (id: string, data: any) => Promise<any>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
list: (options?: any) => Promise<any>
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Lua script operations object for the DBAL client
|
||||
*/
|
||||
export const createLuaScriptOperations = (adapter: any): LuaScriptOperations => ({
|
||||
/**
|
||||
* Create a new Lua script
|
||||
*/
|
||||
create: async (data) => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
},
|
||||
|
||||
/**
|
||||
* Read a Lua script by ID
|
||||
*/
|
||||
read: async (id) => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
},
|
||||
|
||||
/**
|
||||
* Update an existing Lua script
|
||||
*/
|
||||
update: async (id, data) => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a Lua script by ID
|
||||
*/
|
||||
delete: async (id) => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
},
|
||||
|
||||
/**
|
||||
* List Lua scripts with filtering and pagination
|
||||
*/
|
||||
list: async (options) => {
|
||||
throw new Error('Lua script operations not yet implemented');
|
||||
},
|
||||
})
|
||||
@@ -1,53 +1,154 @@
|
||||
/**
|
||||
* @file session-operations.ts
|
||||
* @description Session entity CRUD operations for DBAL client
|
||||
* NOTE: Session operations not yet implemented - stubbed for build
|
||||
*
|
||||
* Single-responsibility module following the small-function-file pattern.
|
||||
*/
|
||||
|
||||
// TODO: Implement session operations
|
||||
// import type { DBALAdapter } from '../../adapters/adapter'
|
||||
// import type { Session, ListOptions, ListResult } from '../types'
|
||||
// import { DBALError } from '../errors'
|
||||
// import { validateSessionCreate, validateSessionUpdate, validateId } from '../validation'
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../foundation/errors'
|
||||
import type { CreateSessionInput, ListOptions, ListResult, Session, UpdateSessionInput } from '../../../foundation/types'
|
||||
import { validateId, validateSessionCreate, validateSessionUpdate } from '../../../foundation/validation'
|
||||
|
||||
/**
|
||||
* Create session operations object for the DBAL client
|
||||
*/
|
||||
export const createSessionOperations = (adapter: any) => ({
|
||||
export interface SessionOperations {
|
||||
create: (data: CreateSessionInput) => Promise<Session>
|
||||
read: (id: string) => Promise<Session | null>
|
||||
update: (id: string, data: UpdateSessionInput) => Promise<Session>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
list: (options?: ListOptions) => Promise<ListResult<Session>>
|
||||
}
|
||||
|
||||
const assertValidId = (id: string) => {
|
||||
const errors = validateId(id)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid session ID', errors.map(error => ({ field: 'id', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidCreate = (data: CreateSessionInput | Session) => {
|
||||
const errors = validateSessionCreate(data)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid session data', errors.map(error => ({ field: 'session', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidUpdate = (data: UpdateSessionInput) => {
|
||||
const errors = validateSessionUpdate(data)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid session update data', errors.map(error => ({ field: 'session', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const resolveTenantId = (configuredTenantId?: string): string | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) return configuredTenantId
|
||||
return null
|
||||
}
|
||||
|
||||
const assertUserInTenant = async (adapter: DBALAdapter, userId: string, tenantId: string) => {
|
||||
const user = await adapter.findFirst('User', { id: userId, tenantId }) as { id?: string } | null
|
||||
if (!user) {
|
||||
throw DBALError.notFound(`User not found: ${userId}`)
|
||||
}
|
||||
}
|
||||
|
||||
export const createSessionOperations = (adapter: DBALAdapter, tenantId?: string): SessionOperations => ({
|
||||
/**
|
||||
* Create a new session
|
||||
*/
|
||||
create: async (data: any): Promise<any> => {
|
||||
throw new Error('Session operations not yet implemented');
|
||||
create: async (data): Promise<Session> => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const now = BigInt(Date.now())
|
||||
const payload: Session = {
|
||||
id: data.id ?? randomUUID(),
|
||||
userId: data.userId,
|
||||
token: data.token,
|
||||
expiresAt: data.expiresAt,
|
||||
createdAt: data.createdAt ?? now,
|
||||
lastActivity: data.lastActivity ?? now,
|
||||
ipAddress: data.ipAddress ?? null,
|
||||
userAgent: data.userAgent ?? null,
|
||||
}
|
||||
assertValidCreate(payload)
|
||||
await assertUserInTenant(adapter, payload.userId, resolvedTenantId)
|
||||
return adapter.create('Session', payload) as Promise<Session>
|
||||
},
|
||||
|
||||
/**
|
||||
* Read a session by ID
|
||||
*/
|
||||
read: async (id: string): Promise<any | null> => {
|
||||
throw new Error('Session operations not yet implemented');
|
||||
read: async (id: string): Promise<Session | null> => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const result = await adapter.read('Session', id) as Session | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Session not found: ${id}`)
|
||||
}
|
||||
await assertUserInTenant(adapter, result.userId, resolvedTenantId)
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* Update an existing session
|
||||
*/
|
||||
update: async (id: string, data: any): Promise<any> => {
|
||||
throw new Error('Session operations not yet implemented');
|
||||
update: async (id: string, data: UpdateSessionInput): Promise<Session> => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
assertValidUpdate(data)
|
||||
const existing = await adapter.read('Session', id) as Session | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Session not found: ${id}`)
|
||||
}
|
||||
await assertUserInTenant(adapter, existing.userId, resolvedTenantId)
|
||||
return adapter.update('Session', id, data) as Promise<Session>
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a session by ID
|
||||
*/
|
||||
delete: async (id: string): Promise<boolean> => {
|
||||
throw new Error('Session operations not yet implemented');
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const existing = await adapter.read('Session', id) as Session | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Session not found: ${id}`)
|
||||
}
|
||||
await assertUserInTenant(adapter, existing.userId, resolvedTenantId)
|
||||
const result = await adapter.delete('Session', id)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Session not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* List sessions with filtering and pagination
|
||||
*/
|
||||
list: async (options?: any): Promise<any> => {
|
||||
throw new Error('Session operations not yet implemented');
|
||||
list: async (options?: ListOptions): Promise<ListResult<Session>> => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const userId = options?.filter?.userId
|
||||
if (typeof userId !== 'string' || userId.length === 0) {
|
||||
throw DBALError.validationError('userId filter is required for session listing', [
|
||||
{ field: 'filter.userId', error: 'userId is required' },
|
||||
])
|
||||
}
|
||||
await assertUserInTenant(adapter, userId, resolvedTenantId)
|
||||
return adapter.list('Session', options) as Promise<ListResult<Session>>
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
// TODO: Implement
|
||||
// Basic user operations placeholder - connects to existing CRUD operations
|
||||
// TODO: Implement full user operations once CRUD layer is ready
|
||||
export const stub = () => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
// Placeholder for user operations
|
||||
// This will be replaced with proper implementation once the CRUD layer is complete
|
||||
return {
|
||||
create: async () => null,
|
||||
read: async () => null,
|
||||
update: async () => null,
|
||||
delete: async () => false,
|
||||
list: async () => [],
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,17 +1,33 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { User } from '../../../../foundation/types'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import { validateUserCreate, validateUserUpdate } from '../../../../foundation/validation'
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { CreateUserInput, UpdateUserInput, User } from '../../../../../core/foundation/types'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { validateUserCreate, validateUserUpdate } from '../../../../../core/foundation/validation'
|
||||
|
||||
export const createManyUsers = async (
|
||||
adapter: DBALAdapter,
|
||||
data: Array<Omit<User, 'id' | 'createdAt' | 'updatedAt'>>,
|
||||
data: CreateUserInput[],
|
||||
): Promise<number> => {
|
||||
if (!data || data.length === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
const validationErrors = data.flatMap((item, index) =>
|
||||
const now = BigInt(Date.now())
|
||||
const payload: User[] = data.map(item => ({
|
||||
id: item.id ?? randomUUID(),
|
||||
username: item.username,
|
||||
email: item.email,
|
||||
role: item.role,
|
||||
profilePicture: item.profilePicture ?? null,
|
||||
bio: item.bio ?? null,
|
||||
createdAt: item.createdAt ?? now,
|
||||
tenantId: item.tenantId ?? null,
|
||||
isInstanceOwner: item.isInstanceOwner ?? false,
|
||||
passwordChangeTimestamp: item.passwordChangeTimestamp ?? null,
|
||||
firstLogin: item.firstLogin ?? false,
|
||||
}))
|
||||
|
||||
const validationErrors = payload.flatMap((item, index) =>
|
||||
validateUserCreate(item).map(error => ({ field: `users[${index}]`, error })),
|
||||
)
|
||||
if (validationErrors.length > 0) {
|
||||
@@ -19,7 +35,7 @@ export const createManyUsers = async (
|
||||
}
|
||||
|
||||
try {
|
||||
return adapter.createMany('User', data as Record<string, unknown>[])
|
||||
return adapter.createMany('User', payload as Record<string, unknown>[])
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('Username or email already exists')
|
||||
@@ -31,7 +47,7 @@ export const createManyUsers = async (
|
||||
export const updateManyUsers = async (
|
||||
adapter: DBALAdapter,
|
||||
filter: Record<string, unknown>,
|
||||
data: Partial<User>,
|
||||
data: UpdateUserInput,
|
||||
): Promise<number> => {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
throw DBALError.validationError('Bulk update requires a filter', [
|
||||
|
||||
@@ -1,16 +1,31 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import type { User } from '../../../../foundation/types'
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import type { CreateUserInput, User } from '../../../../../core/foundation/types'
|
||||
import { assertValidUserCreate } from './validation'
|
||||
|
||||
export const createUser = async (
|
||||
adapter: DBALAdapter,
|
||||
data: Omit<User, 'id' | 'createdAt' | 'updatedAt'>,
|
||||
data: CreateUserInput,
|
||||
): Promise<User> => {
|
||||
assertValidUserCreate(data)
|
||||
const now = BigInt(Date.now())
|
||||
const payload: User = {
|
||||
id: data.id ?? randomUUID(),
|
||||
username: data.username,
|
||||
email: data.email,
|
||||
role: data.role,
|
||||
profilePicture: data.profilePicture ?? null,
|
||||
bio: data.bio ?? null,
|
||||
createdAt: data.createdAt ?? now,
|
||||
tenantId: data.tenantId ?? null,
|
||||
isInstanceOwner: data.isInstanceOwner ?? false,
|
||||
passwordChangeTimestamp: data.passwordChangeTimestamp ?? null,
|
||||
firstLogin: data.firstLogin ?? false,
|
||||
}
|
||||
assertValidUserCreate(payload)
|
||||
|
||||
try {
|
||||
return adapter.create('User', data) as Promise<User>
|
||||
return adapter.create('User', payload) as Promise<User>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('User with username or email already exists')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { assertValidUserId } from './validation'
|
||||
|
||||
export const deleteUser = async (adapter: DBALAdapter, id: string): Promise<boolean> => {
|
||||
|
||||
@@ -1,46 +1,122 @@
|
||||
// TODO: Implement user operations
|
||||
// import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
// import type { User, ListOptions, ListResult } from '../../../../foundation/types'
|
||||
// import { createUser } from './create'
|
||||
// import { deleteUser } from './delete'
|
||||
// import { updateUser } from './update'
|
||||
// import { createManyUsers, deleteManyUsers, updateManyUsers } from './batch'
|
||||
// import { listUsers, readUser } from './reads'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { CreateUserInput, ListOptions, ListResult, UpdateUserInput, User } from '../../../../../core/foundation/types'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { createManyUsers, deleteManyUsers, updateManyUsers } from './batch'
|
||||
import { createUser } from './create'
|
||||
import { deleteUser } from './delete'
|
||||
import { listUsers, readUser } from './reads'
|
||||
import { updateUser } from './update'
|
||||
|
||||
export interface UserOperations {
|
||||
create: (data: any) => Promise<any>
|
||||
read: (id: string) => Promise<any | null>
|
||||
update: (id: string, data: any) => Promise<any>
|
||||
create: (data: CreateUserInput) => Promise<User>
|
||||
read: (id: string) => Promise<User | null>
|
||||
update: (id: string, data: UpdateUserInput) => Promise<User>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
list: (options?: any) => Promise<any>
|
||||
createMany: (data: any[]) => Promise<number>
|
||||
updateMany: (filter: any, data: any) => Promise<number>
|
||||
deleteMany: (filter: any) => Promise<number>
|
||||
list: (options?: ListOptions) => Promise<ListResult<User>>
|
||||
createMany: (data: CreateUserInput[]) => Promise<number>
|
||||
updateMany: (filter: Record<string, unknown>, data: UpdateUserInput) => Promise<number>
|
||||
deleteMany: (filter: Record<string, unknown>) => Promise<number>
|
||||
}
|
||||
|
||||
export const createUserOperations = (adapter: any): UserOperations => ({
|
||||
create: async (data) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
const resolveTenantId = (configuredTenantId?: string, data?: Partial<User>): string | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) return configuredTenantId
|
||||
const tenantId = data?.tenantId
|
||||
if (typeof tenantId === 'string' && tenantId.length > 0) return tenantId
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveTenantFilter = (
|
||||
configuredTenantId: string | undefined,
|
||||
filter?: Record<string, unknown>,
|
||||
): Record<string, unknown> | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: configuredTenantId }
|
||||
}
|
||||
const candidate = filter?.tenantId ?? filter?.tenant_id
|
||||
if (typeof candidate === 'string' && candidate.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: candidate }
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export const createUserOperations = (adapter: DBALAdapter, tenantId?: string): UserOperations => ({
|
||||
create: data => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId, data)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return createUser(adapter, { ...data, tenantId: resolvedTenantId })
|
||||
},
|
||||
read: async (id) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
read: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const result = await adapter.findFirst('User', { id, tenantId: resolvedTenantId }) as User | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`User not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
update: async (id, data) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
if (data.tenantId !== undefined) {
|
||||
throw DBALError.validationError('Tenant ID cannot be updated', [{ field: 'tenantId', error: 'tenantId is immutable' }])
|
||||
}
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const existing = await adapter.findFirst('User', { id, tenantId: resolvedTenantId }) as User | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`User not found: ${id}`)
|
||||
}
|
||||
return updateUser(adapter, id, data)
|
||||
},
|
||||
delete: async (id) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
delete: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const existing = await adapter.findFirst('User', { id, tenantId: resolvedTenantId }) as User | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`User not found: ${id}`)
|
||||
}
|
||||
return deleteUser(adapter, id)
|
||||
},
|
||||
list: async (options) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
list: options => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, options?.filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return listUsers(adapter, { ...options, filter: tenantFilter })
|
||||
},
|
||||
createMany: async (data) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
createMany: data => {
|
||||
const payload = data.map((item, index) => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId, item)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [
|
||||
{ field: `users[${index}].tenantId`, error: 'tenantId is required' },
|
||||
])
|
||||
}
|
||||
return { ...item, tenantId: resolvedTenantId }
|
||||
})
|
||||
return createManyUsers(adapter, payload)
|
||||
},
|
||||
updateMany: async (filter, data) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
updateMany: (filter, data) => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
if (data.tenantId !== undefined) {
|
||||
throw DBALError.validationError('Tenant ID cannot be updated', [{ field: 'tenantId', error: 'tenantId is immutable' }])
|
||||
}
|
||||
return updateManyUsers(adapter, tenantFilter, data)
|
||||
},
|
||||
deleteMany: async (filter) => {
|
||||
throw new Error('User operations not yet implemented');
|
||||
deleteMany: filter => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return deleteManyUsers(adapter, tenantFilter)
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { User, ListOptions, ListResult } from '../../../../foundation/types'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import { validateId } from '../../../../foundation/validation'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { User, ListOptions, ListResult } from '../../../../../core/foundation/types'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import { validateId } from '../../../../../core/foundation/validation'
|
||||
|
||||
export const readUser = async (adapter: DBALAdapter, id: string): Promise<User | null> => {
|
||||
const validationErrors = validateId(id)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import type { User } from '../../../../foundation/types'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import type { UpdateUserInput, User } from '../../../../../core/foundation/types'
|
||||
import { assertValidUserId, assertValidUserUpdate } from './validation'
|
||||
|
||||
export const updateUser = async (
|
||||
adapter: DBALAdapter,
|
||||
id: string,
|
||||
data: Partial<User>,
|
||||
data: UpdateUserInput,
|
||||
): Promise<User> => {
|
||||
assertValidUserId(id)
|
||||
assertValidUserUpdate(data)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import type { User } from '../../../../foundation/types'
|
||||
import { validateId, validateUserCreate, validateUserUpdate } from '../../../../foundation/validation'
|
||||
import { DBALError } from '../../../../../core/foundation/errors'
|
||||
import type { CreateUserInput, UpdateUserInput, User } from '../../../../../core/foundation/types'
|
||||
import { validateId, validateUserCreate, validateUserUpdate } from '../../../../../core/foundation/validation'
|
||||
|
||||
export const assertValidUserId = (id: string): void => {
|
||||
const validationErrors = validateId(id)
|
||||
@@ -9,14 +9,14 @@ export const assertValidUserId = (id: string): void => {
|
||||
}
|
||||
}
|
||||
|
||||
export const assertValidUserCreate = (data: Omit<User, 'id' | 'createdAt' | 'updatedAt'>): void => {
|
||||
export const assertValidUserCreate = (data: CreateUserInput | User): void => {
|
||||
const validationErrors = validateUserCreate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid user data', validationErrors.map(error => ({ field: 'user', error })))
|
||||
}
|
||||
}
|
||||
|
||||
export const assertValidUserUpdate = (data: Partial<User>): void => {
|
||||
export const assertValidUserUpdate = (data: UpdateUserInput): void => {
|
||||
const validationErrors = validateUserUpdate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid user update data', validationErrors.map(error => ({ field: 'user', error })))
|
||||
|
||||
@@ -1,23 +1,170 @@
|
||||
/**
|
||||
* @file workflow-operations.ts
|
||||
* @description Workflow entity CRUD operations for DBAL client (stub)
|
||||
* NOTE: Workflow operations not yet implemented
|
||||
* @description Workflow entity CRUD operations for DBAL client
|
||||
*/
|
||||
|
||||
export const createWorkflowOperations = (adapter: any) => ({
|
||||
create: async (data: any) => {
|
||||
throw new Error('Workflow operations not yet implemented');
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import { DBALError } from '../../../foundation/errors'
|
||||
import type { CreateWorkflowInput, ListOptions, ListResult, UpdateWorkflowInput, Workflow } from '../../../foundation/types'
|
||||
import { validateId, validateWorkflowCreate, validateWorkflowUpdate } from '../../../foundation/validation'
|
||||
|
||||
export interface WorkflowOperations {
|
||||
create: (data: CreateWorkflowInput) => Promise<Workflow>
|
||||
read: (id: string) => Promise<Workflow | null>
|
||||
update: (id: string, data: UpdateWorkflowInput) => Promise<Workflow>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
list: (options?: ListOptions) => Promise<ListResult<Workflow>>
|
||||
}
|
||||
|
||||
const assertValidId = (id: string) => {
|
||||
const errors = validateId(id)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid workflow ID', errors.map(error => ({ field: 'id', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidCreate = (data: CreateWorkflowInput | Workflow) => {
|
||||
const normalized = {
|
||||
...data,
|
||||
description: data.description ?? undefined,
|
||||
}
|
||||
const errors = validateWorkflowCreate(normalized as Partial<Workflow>)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid workflow data', errors.map(error => ({ field: 'workflow', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidUpdate = (data: UpdateWorkflowInput) => {
|
||||
const normalized = {
|
||||
...data,
|
||||
description: data.description ?? undefined,
|
||||
}
|
||||
const errors = validateWorkflowUpdate(normalized as Partial<Workflow>)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid workflow update data', errors.map(error => ({ field: 'workflow', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const resolveTenantId = (configuredTenantId?: string, data?: Partial<Workflow>): string | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) return configuredTenantId
|
||||
const tenantId = data?.tenantId
|
||||
if (typeof tenantId === 'string' && tenantId.length > 0) return tenantId
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveTenantFilter = (
|
||||
configuredTenantId: string | undefined,
|
||||
filter?: Record<string, unknown>,
|
||||
): Record<string, unknown> | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: configuredTenantId }
|
||||
}
|
||||
const candidate = filter?.tenantId ?? filter?.tenant_id
|
||||
if (typeof candidate === 'string' && candidate.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: candidate }
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
const withWorkflowDefaults = (data: CreateWorkflowInput): Workflow => {
|
||||
const now = BigInt(Date.now())
|
||||
return {
|
||||
id: data.id ?? randomUUID(),
|
||||
tenantId: data.tenantId ?? null,
|
||||
name: data.name,
|
||||
description: data.description ?? undefined,
|
||||
nodes: data.nodes,
|
||||
edges: data.edges,
|
||||
enabled: data.enabled,
|
||||
version: data.version ?? 1,
|
||||
createdAt: data.createdAt ?? now,
|
||||
updatedAt: data.updatedAt ?? now,
|
||||
createdBy: data.createdBy ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
export const createWorkflowOperations = (adapter: DBALAdapter, tenantId?: string): WorkflowOperations => ({
|
||||
create: async data => {
|
||||
const normalized = {
|
||||
...data,
|
||||
description: data.description ?? undefined,
|
||||
}
|
||||
const resolvedTenantId = resolveTenantId(tenantId, normalized as Partial<Workflow>)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const payload = withWorkflowDefaults({ ...data, tenantId: resolvedTenantId })
|
||||
assertValidCreate(payload)
|
||||
try {
|
||||
return adapter.create('Workflow', payload) as Promise<Workflow>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
const name = typeof data.name === 'string' ? data.name : 'unknown'
|
||||
throw DBALError.conflict(`Workflow with name '${name}' already exists`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
read: async (id: string) => {
|
||||
throw new Error('Workflow operations not yet implemented');
|
||||
read: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const result = await adapter.findFirst('Workflow', { id, tenantId: resolvedTenantId }) as Workflow | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Workflow not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
update: async (id: string, data: any) => {
|
||||
throw new Error('Workflow operations not yet implemented');
|
||||
update: async (id, data) => {
|
||||
if (data.tenantId !== undefined) {
|
||||
throw DBALError.validationError('Tenant ID cannot be updated', [{ field: 'tenantId', error: 'tenantId is immutable' }])
|
||||
}
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
assertValidUpdate(data)
|
||||
const existing = await adapter.findFirst('Workflow', { id, tenantId: resolvedTenantId }) as Workflow | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Workflow not found: ${id}`)
|
||||
}
|
||||
try {
|
||||
return adapter.update('Workflow', id, data) as Promise<Workflow>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
if (typeof data.name === 'string') {
|
||||
throw DBALError.conflict(`Workflow with name '${data.name}' already exists`)
|
||||
}
|
||||
throw DBALError.conflict('Workflow name already exists')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
delete: async (id: string) => {
|
||||
throw new Error('Workflow operations not yet implemented');
|
||||
delete: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const existing = await adapter.findFirst('Workflow', { id, tenantId: resolvedTenantId }) as Workflow | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Workflow not found: ${id}`)
|
||||
}
|
||||
const result = await adapter.delete('Workflow', id)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Workflow not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
list: async (options?: any) => {
|
||||
throw new Error('Workflow operations not yet implemented');
|
||||
list: options => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, options?.filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return adapter.list('Workflow', { ...options, filter: tenantFilter }) as Promise<ListResult<Workflow>>
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,53 +1,143 @@
|
||||
/**
|
||||
* @file component-operations.ts
|
||||
* @description ComponentHierarchy entity CRUD operations for DBAL client
|
||||
* NOTE: Component operations not yet implemented - stubbed for build
|
||||
*
|
||||
* Single-responsibility module following the small-function-file pattern.
|
||||
* @description ComponentNode entity CRUD operations for DBAL client
|
||||
*/
|
||||
|
||||
// TODO: Implement component operations
|
||||
// import type { DBALAdapter } from '../../adapters/adapter'
|
||||
// import type { ComponentHierarchy } from '../types'
|
||||
// import { DBALError } from '../errors'
|
||||
// import { validateComponentHierarchyCreate, validateComponentHierarchyUpdate, validateId } from '../validation'
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type {
|
||||
ComponentNode,
|
||||
CreateComponentNodeInput,
|
||||
ListResult,
|
||||
PageConfig,
|
||||
UpdateComponentNodeInput,
|
||||
} from '../../../foundation/types'
|
||||
import { DBALError } from '../../../foundation/errors'
|
||||
import { validateComponentHierarchyCreate, validateComponentHierarchyUpdate, validateId } from '../../../foundation/validation'
|
||||
|
||||
/**
|
||||
* Create component operations object for the DBAL client
|
||||
*/
|
||||
export const createComponentOperations = (adapter: any) => ({
|
||||
/**
|
||||
* Create a new component
|
||||
*/
|
||||
create: async (data: any): Promise<any> => {
|
||||
throw new Error('Component operations not yet implemented');
|
||||
export interface ComponentNodeOperations {
|
||||
create: (data: CreateComponentNodeInput) => Promise<ComponentNode>
|
||||
read: (id: string) => Promise<ComponentNode | null>
|
||||
update: (id: string, data: UpdateComponentNodeInput) => Promise<ComponentNode>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
getTree: (pageId: string) => Promise<ComponentNode[]>
|
||||
}
|
||||
|
||||
const assertValidId = (id: string) => {
|
||||
const errors = validateId(id)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid component ID', errors.map(error => ({ field: 'id', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidCreate = (data: CreateComponentNodeInput) => {
|
||||
const errors = validateComponentHierarchyCreate(data)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid component data', errors.map(error => ({ field: 'component', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidUpdate = (data: UpdateComponentNodeInput) => {
|
||||
const errors = validateComponentHierarchyUpdate(data)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid component update data', errors.map(error => ({ field: 'component', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const resolveTenantId = (configuredTenantId?: string, data?: { tenantId?: string | null }): string | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) return configuredTenantId
|
||||
const candidate = data?.tenantId
|
||||
if (typeof candidate === 'string' && candidate.length > 0) return candidate
|
||||
return null
|
||||
}
|
||||
|
||||
const assertPageTenant = async (adapter: DBALAdapter, tenantId: string, pageId: string) => {
|
||||
const page = await adapter.findFirst('PageConfig', { id: pageId, tenantId }) as PageConfig | null
|
||||
if (!page) {
|
||||
throw DBALError.notFound(`Page not found: ${pageId}`)
|
||||
}
|
||||
}
|
||||
|
||||
const withComponentDefaults = (data: CreateComponentNodeInput): ComponentNode => ({
|
||||
id: data.id ?? randomUUID(),
|
||||
type: data.type,
|
||||
parentId: data.parentId ?? null,
|
||||
childIds: data.childIds,
|
||||
order: data.order,
|
||||
pageId: data.pageId,
|
||||
})
|
||||
|
||||
export const createComponentNodeOperations = (adapter: DBALAdapter, tenantId?: string): ComponentNodeOperations => ({
|
||||
create: async data => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidCreate(data)
|
||||
await assertPageTenant(adapter, resolvedTenantId, data.pageId)
|
||||
const payload = withComponentDefaults(data)
|
||||
return adapter.create('ComponentNode', payload) as Promise<ComponentNode>
|
||||
},
|
||||
|
||||
/**
|
||||
* Read a component by ID
|
||||
*/
|
||||
read: async (id: string): Promise<any | null> => {
|
||||
throw new Error('Component operations not yet implemented');
|
||||
read: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const result = await adapter.findFirst('ComponentNode', { id }) as ComponentNode | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Component not found: ${id}`)
|
||||
}
|
||||
await assertPageTenant(adapter, resolvedTenantId, result.pageId)
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* Update an existing component
|
||||
*/
|
||||
update: async (id: string, data: any): Promise<any> => {
|
||||
throw new Error('Component operations not yet implemented');
|
||||
update: async (id, data) => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
assertValidUpdate(data)
|
||||
const existing = await adapter.findFirst('ComponentNode', { id }) as ComponentNode | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Component not found: ${id}`)
|
||||
}
|
||||
await assertPageTenant(adapter, resolvedTenantId, existing.pageId)
|
||||
if (data.pageId) {
|
||||
await assertPageTenant(adapter, resolvedTenantId, data.pageId)
|
||||
}
|
||||
return adapter.update('ComponentNode', id, data) as Promise<ComponentNode>
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a component by ID
|
||||
*/
|
||||
delete: async (id: string): Promise<boolean> => {
|
||||
throw new Error('Component operations not yet implemented');
|
||||
delete: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const existing = await adapter.findFirst('ComponentNode', { id }) as ComponentNode | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Component not found: ${id}`)
|
||||
}
|
||||
await assertPageTenant(adapter, resolvedTenantId, existing.pageId)
|
||||
const result = await adapter.delete('ComponentNode', id)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Component not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* Get component tree for a page
|
||||
*/
|
||||
getTree: async (pageId: string): Promise<any[]> => {
|
||||
throw new Error('Component operations not yet implemented');
|
||||
getTree: async pageId => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(pageId)
|
||||
await assertPageTenant(adapter, resolvedTenantId, pageId)
|
||||
const result = await adapter.list('ComponentNode', {
|
||||
filter: { pageId },
|
||||
sort: { order: 'asc' },
|
||||
}) as ListResult<ComponentNode>
|
||||
return result.data
|
||||
},
|
||||
})
|
||||
|
||||
export const createComponentOperations = createComponentNodeOperations
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* @file package-data-operations.ts
|
||||
* @description PackageData entity CRUD operations for DBAL client
|
||||
*/
|
||||
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { CreatePackageDataInput, ListOptions, ListResult, PackageData, UpdatePackageDataInput } from '../../../foundation/types'
|
||||
import { DBALError } from '../../../foundation/errors'
|
||||
import { isValidJsonString, validateId } from '../../../foundation/validation'
|
||||
|
||||
export interface PackageDataOperations {
|
||||
create: (data: CreatePackageDataInput) => Promise<PackageData>
|
||||
read: (packageId: string) => Promise<PackageData | null>
|
||||
update: (packageId: string, data: UpdatePackageDataInput) => Promise<PackageData>
|
||||
delete: (packageId: string) => Promise<boolean>
|
||||
list: (options?: ListOptions) => Promise<ListResult<PackageData>>
|
||||
}
|
||||
|
||||
const assertValidId = (packageId: string) => {
|
||||
const errors = validateId(packageId)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package ID', errors.map(error => ({ field: 'packageId', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidData = (data: { data?: string }) => {
|
||||
if (!data.data || typeof data.data !== 'string' || !isValidJsonString(data.data)) {
|
||||
throw DBALError.validationError('Invalid package data', [{ field: 'data', error: 'data must be a JSON string' }])
|
||||
}
|
||||
}
|
||||
|
||||
export const createPackageDataOperations = (adapter: DBALAdapter): PackageDataOperations => ({
|
||||
create: async data => {
|
||||
assertValidId(data.packageId)
|
||||
assertValidData(data)
|
||||
return adapter.create('PackageData', data) as Promise<PackageData>
|
||||
},
|
||||
read: async packageId => {
|
||||
assertValidId(packageId)
|
||||
const result = await adapter.read('PackageData', packageId) as PackageData | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Package data not found: ${packageId}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
update: async (packageId, data) => {
|
||||
assertValidId(packageId)
|
||||
if (data.data !== undefined) {
|
||||
assertValidData({ data: data.data })
|
||||
}
|
||||
return adapter.update('PackageData', packageId, data) as Promise<PackageData>
|
||||
},
|
||||
delete: async packageId => {
|
||||
assertValidId(packageId)
|
||||
const result = await adapter.delete('PackageData', packageId)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Package data not found: ${packageId}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
list: options => {
|
||||
return adapter.list('PackageData', options) as Promise<ListResult<PackageData>>
|
||||
},
|
||||
})
|
||||
@@ -1,11 +1,11 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { Package } from '../../../../foundation/types'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { InstalledPackage } from '../../../../foundation/types'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import { validatePackageCreate, validatePackageUpdate } from '../../../../foundation/validation'
|
||||
|
||||
export const createManyPackages = async (
|
||||
export const createManyInstalledPackages = async (
|
||||
adapter: DBALAdapter,
|
||||
data: Array<Omit<Package, 'id' | 'createdAt' | 'updatedAt'>>,
|
||||
data: InstalledPackage[],
|
||||
): Promise<number> => {
|
||||
if (!data || data.length === 0) {
|
||||
return 0
|
||||
@@ -15,23 +15,23 @@ export const createManyPackages = async (
|
||||
validatePackageCreate(item).map(error => ({ field: `packages[${index}]`, error })),
|
||||
)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package batch', validationErrors)
|
||||
throw DBALError.validationError('Invalid installed package batch', validationErrors)
|
||||
}
|
||||
|
||||
try {
|
||||
return adapter.createMany('Package', data as Record<string, unknown>[])
|
||||
return adapter.createMany('InstalledPackage', data as Record<string, unknown>[])
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('Package name+version already exists')
|
||||
throw DBALError.conflict('Installed package already exists')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export const updateManyPackages = async (
|
||||
export const updateManyInstalledPackages = async (
|
||||
adapter: DBALAdapter,
|
||||
filter: Record<string, unknown>,
|
||||
data: Partial<Package>,
|
||||
data: Partial<InstalledPackage>,
|
||||
): Promise<number> => {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
throw DBALError.validationError('Bulk update requires a filter', [
|
||||
@@ -47,25 +47,31 @@ export const updateManyPackages = async (
|
||||
|
||||
const validationErrors = validatePackageUpdate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package update data', validationErrors.map(error => ({ field: 'package', error })))
|
||||
throw DBALError.validationError(
|
||||
'Invalid installed package update data',
|
||||
validationErrors.map(error => ({ field: 'package', error })),
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
return adapter.updateMany('Package', filter, data as Record<string, unknown>)
|
||||
return adapter.updateMany('InstalledPackage', filter, data as Record<string, unknown>)
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('Package name+version already exists')
|
||||
throw DBALError.conflict('Installed package already exists')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export const deleteManyPackages = async (adapter: DBALAdapter, filter: Record<string, unknown>): Promise<number> => {
|
||||
export const deleteManyInstalledPackages = async (
|
||||
adapter: DBALAdapter,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<number> => {
|
||||
if (!filter || Object.keys(filter).length === 0) {
|
||||
throw DBALError.validationError('Bulk delete requires a filter', [
|
||||
{ field: 'filter', error: 'Filter is required' },
|
||||
])
|
||||
}
|
||||
|
||||
return adapter.deleteMany('Package', filter)
|
||||
return adapter.deleteMany('InstalledPackage', filter)
|
||||
}
|
||||
|
||||
@@ -1,40 +1,180 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { Package, ListOptions, ListResult } from '../../../../foundation/types'
|
||||
import { createManyPackages, deleteManyPackages, updateManyPackages } from './batch'
|
||||
import { createPackage, deletePackage, updatePackage } from './mutations'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { CreatePackageInput, InstalledPackage, ListOptions, ListResult, UpdatePackageInput } from '../../../../foundation/types'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import { createManyInstalledPackages, deleteManyInstalledPackages, updateManyInstalledPackages } from './batch'
|
||||
import { createInstalledPackage, deleteInstalledPackage, updateInstalledPackage } from './mutations'
|
||||
import { publishPackage } from './publish'
|
||||
import { listPackages, readPackage } from './reads'
|
||||
import { listInstalledPackages } from './reads'
|
||||
import { unpublishPackage } from './unpublish'
|
||||
import { validatePackage } from './validate'
|
||||
import { validateId } from '../../../../foundation/validation'
|
||||
|
||||
export interface PackageOperations {
|
||||
validate: (data: Partial<Package>) => string[]
|
||||
publish: (data: Omit<Package, 'id' | 'createdAt' | 'updatedAt'>) => Promise<Package>
|
||||
export interface InstalledPackageOperations {
|
||||
validate: (data: Partial<InstalledPackage>) => string[]
|
||||
publish: (data: InstalledPackageCreatePayload) => Promise<InstalledPackage>
|
||||
unpublish: (id: string) => Promise<boolean>
|
||||
create: (data: Omit<Package, 'id' | 'createdAt' | 'updatedAt'>) => Promise<Package>
|
||||
read: (id: string) => Promise<Package | null>
|
||||
update: (id: string, data: Partial<Package>) => Promise<Package>
|
||||
create: (data: InstalledPackageCreatePayload) => Promise<InstalledPackage>
|
||||
read: (id: string) => Promise<InstalledPackage | null>
|
||||
update: (id: string, data: UpdatePackageInput) => Promise<InstalledPackage>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
list: (options?: ListOptions) => Promise<ListResult<Package>>
|
||||
createMany: (data: Array<Omit<Package, 'id' | 'createdAt' | 'updatedAt'>>) => Promise<number>
|
||||
updateMany: (filter: Record<string, unknown>, data: Partial<Package>) => Promise<number>
|
||||
list: (options?: ListOptions) => Promise<ListResult<InstalledPackage>>
|
||||
createMany: (data: InstalledPackageCreatePayload[]) => Promise<number>
|
||||
updateMany: (filter: Record<string, unknown>, data: UpdatePackageInput) => Promise<number>
|
||||
deleteMany: (filter: Record<string, unknown>) => Promise<number>
|
||||
}
|
||||
|
||||
export const createPackageOperations = (adapter: DBALAdapter): PackageOperations => ({
|
||||
type InstalledPackageCreatePayload = CreatePackageInput
|
||||
|
||||
const resolveTenantId = (configuredTenantId?: string, data?: Partial<InstalledPackage>): string | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) return configuredTenantId
|
||||
const candidate = data?.tenantId
|
||||
if (typeof candidate === 'string' && candidate.length > 0) return candidate
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveTenantFilter = (
|
||||
configuredTenantId: string | undefined,
|
||||
filter?: Record<string, unknown>,
|
||||
): Record<string, unknown> | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: configuredTenantId }
|
||||
}
|
||||
const candidate = filter?.tenantId ?? filter?.tenant_id
|
||||
if (typeof candidate === 'string' && candidate.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: candidate }
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
const assertValidId = (id: string) => {
|
||||
const errors = validateId(id)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package ID', errors.map(error => ({ field: 'id', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const withInstalledPackageDefaults = (data: CreatePackageInput): InstalledPackage => {
|
||||
const installedAt = data.installedAt ?? BigInt(Date.now())
|
||||
return {
|
||||
packageId: data.packageId,
|
||||
tenantId: data.tenantId ?? null,
|
||||
installedAt,
|
||||
version: data.version,
|
||||
enabled: data.enabled,
|
||||
config: data.config ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
export const createInstalledPackageOperations = (adapter: DBALAdapter, tenantId?: string): InstalledPackageOperations => ({
|
||||
validate: data => validatePackage(data),
|
||||
publish: data => publishPackage(adapter, data),
|
||||
unpublish: id => unpublishPackage(adapter, id),
|
||||
create: data => createPackage(adapter, data),
|
||||
read: id => readPackage(adapter, id),
|
||||
update: (id, data) => updatePackage(adapter, id, data),
|
||||
delete: id => deletePackage(adapter, id),
|
||||
list: options => listPackages(adapter, options),
|
||||
createMany: data => createManyPackages(adapter, data),
|
||||
updateMany: (filter, data) => updateManyPackages(adapter, filter, data),
|
||||
deleteMany: filter => deleteManyPackages(adapter, filter),
|
||||
publish: data => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId, data)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(data.packageId)
|
||||
return publishPackage(adapter, withInstalledPackageDefaults({ ...data, tenantId: resolvedTenantId }))
|
||||
},
|
||||
unpublish: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const existing = await adapter.findFirst('InstalledPackage', { packageId: id, tenantId: resolvedTenantId }) as InstalledPackage | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Installed package not found: ${id}`)
|
||||
}
|
||||
return unpublishPackage(adapter, id)
|
||||
},
|
||||
create: data => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId, data)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(data.packageId)
|
||||
const payload = withInstalledPackageDefaults({ ...data, tenantId: resolvedTenantId })
|
||||
return createInstalledPackage(adapter, payload)
|
||||
},
|
||||
read: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const result = await adapter.findFirst('InstalledPackage', { packageId: id, tenantId: resolvedTenantId }) as InstalledPackage | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Installed package not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
update: async (id, data) => {
|
||||
if (data.tenantId !== undefined) {
|
||||
throw DBALError.validationError('Tenant ID cannot be updated', [{ field: 'tenantId', error: 'tenantId is immutable' }])
|
||||
}
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const existing = await adapter.findFirst('InstalledPackage', { packageId: id, tenantId: resolvedTenantId }) as InstalledPackage | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Installed package not found: ${id}`)
|
||||
}
|
||||
return updateInstalledPackage(adapter, id, data)
|
||||
},
|
||||
delete: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const existing = await adapter.findFirst('InstalledPackage', { packageId: id, tenantId: resolvedTenantId }) as InstalledPackage | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Installed package not found: ${id}`)
|
||||
}
|
||||
return deleteInstalledPackage(adapter, id)
|
||||
},
|
||||
list: options => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, options?.filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return listInstalledPackages(adapter, { ...options, filter: tenantFilter })
|
||||
},
|
||||
createMany: data => {
|
||||
const payload = data.map((item, index) => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId, item)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [
|
||||
{ field: `packages[${index}].tenantId`, error: 'tenantId is required' },
|
||||
])
|
||||
}
|
||||
assertValidId(item.packageId)
|
||||
return withInstalledPackageDefaults({ ...item, tenantId: resolvedTenantId })
|
||||
})
|
||||
return createManyInstalledPackages(adapter, payload)
|
||||
},
|
||||
updateMany: (filter, data) => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
if (data.tenantId !== undefined) {
|
||||
throw DBALError.validationError('Tenant ID cannot be updated', [{ field: 'tenantId', error: 'tenantId is immutable' }])
|
||||
}
|
||||
return updateManyInstalledPackages(adapter, tenantFilter, data)
|
||||
},
|
||||
deleteMany: filter => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return deleteManyInstalledPackages(adapter, tenantFilter)
|
||||
},
|
||||
})
|
||||
|
||||
export const createPackageOperations = createInstalledPackageOperations
|
||||
|
||||
export { publishPackage } from './publish'
|
||||
export { unpublishPackage } from './unpublish'
|
||||
export { validatePackage } from './validate'
|
||||
|
||||
@@ -1,61 +1,70 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { Package } from '../../../../foundation/types'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { InstalledPackage } from '../../../../foundation/types'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import { validatePackageCreate, validatePackageUpdate, validateId } from '../../../../foundation/validation'
|
||||
|
||||
export const createPackage = async (
|
||||
export const createInstalledPackage = async (
|
||||
adapter: DBALAdapter,
|
||||
data: Omit<Package, 'id' | 'createdAt' | 'updatedAt'>,
|
||||
): Promise<Package> => {
|
||||
data: InstalledPackage,
|
||||
): Promise<InstalledPackage> => {
|
||||
const validationErrors = validatePackageCreate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package data', validationErrors.map(error => ({ field: 'package', error })))
|
||||
throw DBALError.validationError(
|
||||
'Invalid installed package data',
|
||||
validationErrors.map(error => ({ field: 'package', error })),
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
return adapter.create('Package', data) as Promise<Package>
|
||||
return adapter.create('InstalledPackage', data) as Promise<InstalledPackage>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict(`Package ${data.name}@${data.version} already exists`)
|
||||
throw DBALError.conflict(`Installed package ${data.packageId} already exists`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export const updatePackage = async (
|
||||
export const updateInstalledPackage = async (
|
||||
adapter: DBALAdapter,
|
||||
id: string,
|
||||
data: Partial<Package>,
|
||||
): Promise<Package> => {
|
||||
const idErrors = validateId(id)
|
||||
packageId: string,
|
||||
data: Partial<InstalledPackage>,
|
||||
): Promise<InstalledPackage> => {
|
||||
const idErrors = validateId(packageId)
|
||||
if (idErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package ID', idErrors.map(error => ({ field: 'id', error })))
|
||||
throw DBALError.validationError('Invalid package ID', idErrors.map(error => ({ field: 'packageId', error })))
|
||||
}
|
||||
|
||||
const validationErrors = validatePackageUpdate(data)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package update data', validationErrors.map(error => ({ field: 'package', error })))
|
||||
throw DBALError.validationError(
|
||||
'Invalid installed package update data',
|
||||
validationErrors.map(error => ({ field: 'package', error })),
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
return adapter.update('Package', id, data) as Promise<Package>
|
||||
return adapter.update('InstalledPackage', packageId, data) as Promise<InstalledPackage>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('Package name+version already exists')
|
||||
throw DBALError.conflict('Installed package already exists')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export const deletePackage = async (adapter: DBALAdapter, id: string): Promise<boolean> => {
|
||||
const validationErrors = validateId(id)
|
||||
export const deleteInstalledPackage = async (
|
||||
adapter: DBALAdapter,
|
||||
packageId: string,
|
||||
): Promise<boolean> => {
|
||||
const validationErrors = validateId(packageId)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package ID', validationErrors.map(error => ({ field: 'id', error })))
|
||||
throw DBALError.validationError('Invalid package ID', validationErrors.map(error => ({ field: 'packageId', error })))
|
||||
}
|
||||
|
||||
const result = await adapter.delete('Package', id)
|
||||
const result = await adapter.delete('InstalledPackage', packageId)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Package not found: ${id}`)
|
||||
throw DBALError.notFound(`Installed package not found: ${packageId}`)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { Package } from '../../../../foundation/types'
|
||||
import { createPackage } from './mutations'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { InstalledPackage } from '../../../../foundation/types'
|
||||
import { createInstalledPackage } from './mutations'
|
||||
|
||||
export const publishPackage = (
|
||||
adapter: DBALAdapter,
|
||||
data: Omit<Package, 'id' | 'createdAt' | 'updatedAt'>,
|
||||
): Promise<Package> => {
|
||||
return createPackage(adapter, data)
|
||||
data: InstalledPackage,
|
||||
): Promise<InstalledPackage> => {
|
||||
return createInstalledPackage(adapter, data)
|
||||
}
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { Package, ListOptions, ListResult } from '../../../../foundation/types'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import type { InstalledPackage, ListOptions, ListResult } from '../../../../foundation/types'
|
||||
import { DBALError } from '../../../../foundation/errors'
|
||||
import { validateId } from '../../../../foundation/validation'
|
||||
|
||||
export const readPackage = async (adapter: DBALAdapter, id: string): Promise<Package | null> => {
|
||||
const validationErrors = validateId(id)
|
||||
export const readInstalledPackage = async (
|
||||
adapter: DBALAdapter,
|
||||
packageId: string,
|
||||
): Promise<InstalledPackage | null> => {
|
||||
const validationErrors = validateId(packageId)
|
||||
if (validationErrors.length > 0) {
|
||||
throw DBALError.validationError('Invalid package ID', validationErrors.map(error => ({ field: 'id', error })))
|
||||
throw DBALError.validationError('Invalid package ID', validationErrors.map(error => ({ field: 'packageId', error })))
|
||||
}
|
||||
|
||||
const result = await adapter.read('Package', id) as Package | null
|
||||
const result = await adapter.read('InstalledPackage', packageId) as InstalledPackage | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Package not found: ${id}`)
|
||||
throw DBALError.notFound(`Installed package not found: ${packageId}`)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export const listPackages = (adapter: DBALAdapter, options?: ListOptions): Promise<ListResult<Package>> => {
|
||||
return adapter.list('Package', options) as Promise<ListResult<Package>>
|
||||
export const listInstalledPackages = (
|
||||
adapter: DBALAdapter,
|
||||
options?: ListOptions,
|
||||
): Promise<ListResult<InstalledPackage>> => {
|
||||
return adapter.list('InstalledPackage', options) as Promise<ListResult<InstalledPackage>>
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import { deletePackage } from './mutations'
|
||||
import type { DBALAdapter } from '../../../../../adapters/adapter'
|
||||
import { deleteInstalledPackage } from './mutations'
|
||||
|
||||
export const unpublishPackage = (adapter: DBALAdapter, id: string): Promise<boolean> => {
|
||||
return deletePackage(adapter, id)
|
||||
return deleteInstalledPackage(adapter, id)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Package } from '../../../../foundation/types'
|
||||
import type { InstalledPackage } from '../../../../foundation/types'
|
||||
import { validatePackageCreate } from '../../../../foundation/validation'
|
||||
|
||||
export const validatePackage = (data: Partial<Package>): string[] => {
|
||||
export const validatePackage = (data: Partial<InstalledPackage>): string[] => {
|
||||
return validatePackageCreate(data)
|
||||
}
|
||||
|
||||
@@ -1,60 +1,183 @@
|
||||
/**
|
||||
* @file page-operations.ts
|
||||
* @description PageView entity CRUD operations for DBAL client
|
||||
* NOTE: Page operations not yet implemented - stubbed for build
|
||||
*
|
||||
* Single-responsibility module following the small-function-file pattern.
|
||||
* @description PageConfig entity CRUD operations for DBAL client
|
||||
*/
|
||||
|
||||
// TODO: Implement page operations
|
||||
// import type { DBALAdapter } from '../../adapters/adapter'
|
||||
// import type { PageView, ListOptions, ListResult } from '../types'
|
||||
// import { DBALError } from '../errors'
|
||||
// import { validatePageCreate, validatePageUpdate, validateId } from '../validation'
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { DBALAdapter } from '../../../../adapters/adapter'
|
||||
import type { CreatePageInput, ListOptions, ListResult, PageConfig, UpdatePageInput } from '../../../foundation/types'
|
||||
import { DBALError } from '../../../foundation/errors'
|
||||
import { validateId, validatePageCreate, validatePageUpdate } from '../../../foundation/validation'
|
||||
|
||||
/**
|
||||
* Create page operations object for the DBAL client
|
||||
*/
|
||||
export const createPageOperations = (adapter: any) => ({
|
||||
/**
|
||||
* Create a new page
|
||||
*/
|
||||
create: async (data: any): Promise<any> => {
|
||||
throw new Error('Page operations not yet implemented');
|
||||
export interface PageConfigOperations {
|
||||
create: (data: CreatePageInput) => Promise<PageConfig>
|
||||
read: (id: string) => Promise<PageConfig | null>
|
||||
readByPath: (path: string) => Promise<PageConfig | null>
|
||||
update: (id: string, data: UpdatePageInput) => Promise<PageConfig>
|
||||
delete: (id: string) => Promise<boolean>
|
||||
list: (options?: ListOptions) => Promise<ListResult<PageConfig>>
|
||||
}
|
||||
|
||||
const assertValidId = (id: string) => {
|
||||
const errors = validateId(id)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid page ID', errors.map(error => ({ field: 'id', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidPath = (path: string) => {
|
||||
if (!path || typeof path !== 'string' || path.trim().length === 0) {
|
||||
throw DBALError.validationError('Invalid page path', [{ field: 'path', error: 'path is invalid' }])
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidCreate = (data: CreatePageInput) => {
|
||||
const errors = validatePageCreate(data)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid page data', errors.map(error => ({ field: 'page', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const assertValidUpdate = (data: UpdatePageInput) => {
|
||||
const errors = validatePageUpdate(data)
|
||||
if (errors.length > 0) {
|
||||
throw DBALError.validationError('Invalid page update data', errors.map(error => ({ field: 'page', error })))
|
||||
}
|
||||
}
|
||||
|
||||
const resolveTenantId = (configuredTenantId?: string, data?: Partial<PageConfig>): string | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) return configuredTenantId
|
||||
const candidate = data?.tenantId
|
||||
if (typeof candidate === 'string' && candidate.length > 0) return candidate
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveTenantFilter = (
|
||||
configuredTenantId: string | undefined,
|
||||
filter?: Record<string, unknown>,
|
||||
): Record<string, unknown> | null => {
|
||||
if (configuredTenantId && configuredTenantId.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: configuredTenantId }
|
||||
}
|
||||
const candidate = filter?.tenantId ?? filter?.tenant_id
|
||||
if (typeof candidate === 'string' && candidate.length > 0) {
|
||||
return { ...(filter ?? {}), tenantId: candidate }
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
const withPageDefaults = (data: CreatePageInput): PageConfig => {
|
||||
const now = BigInt(Date.now())
|
||||
return {
|
||||
id: data.id ?? randomUUID(),
|
||||
tenantId: data.tenantId ?? null,
|
||||
packageId: data.packageId ?? null,
|
||||
path: data.path,
|
||||
title: data.title,
|
||||
description: data.description ?? null,
|
||||
icon: data.icon ?? null,
|
||||
component: data.component ?? null,
|
||||
componentTree: data.componentTree,
|
||||
level: data.level,
|
||||
requiresAuth: data.requiresAuth,
|
||||
requiredRole: data.requiredRole ?? null,
|
||||
parentPath: data.parentPath ?? null,
|
||||
sortOrder: data.sortOrder ?? 0,
|
||||
isPublished: data.isPublished ?? true,
|
||||
params: data.params ?? null,
|
||||
meta: data.meta ?? null,
|
||||
createdAt: data.createdAt ?? now,
|
||||
updatedAt: data.updatedAt ?? now,
|
||||
}
|
||||
}
|
||||
|
||||
export const createPageConfigOperations = (adapter: DBALAdapter, tenantId?: string): PageConfigOperations => ({
|
||||
create: async data => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId, data)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
const payload = withPageDefaults({ ...data, tenantId: resolvedTenantId })
|
||||
assertValidCreate(payload)
|
||||
try {
|
||||
return adapter.create('PageConfig', payload) as Promise<PageConfig>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict(`Page with path '${data.path}' already exists`)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Read a page by ID
|
||||
*/
|
||||
read: async (id: string): Promise<any | null> => {
|
||||
throw new Error('Page operations not yet implemented');
|
||||
read: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const result = await adapter.findFirst('PageConfig', { id, tenantId: resolvedTenantId }) as PageConfig | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Page not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* Read a page by slug
|
||||
*/
|
||||
readBySlug: async (slug: string): Promise<any | null> => {
|
||||
throw new Error('Page operations not yet implemented');
|
||||
readByPath: async path => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidPath(path)
|
||||
const result = await adapter.findFirst('PageConfig', { path, tenantId: resolvedTenantId }) as PageConfig | null
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Page not found with path: ${path}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* Update an existing page
|
||||
*/
|
||||
update: async (id: string, data: any): Promise<any> => {
|
||||
throw new Error('Page operations not yet implemented');
|
||||
update: async (id, data) => {
|
||||
if (data.tenantId !== undefined) {
|
||||
throw DBALError.validationError('Tenant ID cannot be updated', [{ field: 'tenantId', error: 'tenantId is immutable' }])
|
||||
}
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
assertValidUpdate(data)
|
||||
const existing = await adapter.findFirst('PageConfig', { id, tenantId: resolvedTenantId }) as PageConfig | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Page not found: ${id}`)
|
||||
}
|
||||
try {
|
||||
return adapter.update('PageConfig', id, data) as Promise<PageConfig>
|
||||
} catch (error) {
|
||||
if (error instanceof DBALError && error.code === 409) {
|
||||
throw DBALError.conflict('Page path already exists')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a page by ID
|
||||
*/
|
||||
delete: async (id: string): Promise<boolean> => {
|
||||
throw new Error('Page operations not yet implemented');
|
||||
delete: async id => {
|
||||
const resolvedTenantId = resolveTenantId(tenantId)
|
||||
if (!resolvedTenantId) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
assertValidId(id)
|
||||
const existing = await adapter.findFirst('PageConfig', { id, tenantId: resolvedTenantId }) as PageConfig | null
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`Page not found: ${id}`)
|
||||
}
|
||||
const result = await adapter.delete('PageConfig', id)
|
||||
if (!result) {
|
||||
throw DBALError.notFound(`Page not found: ${id}`)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
/**
|
||||
* List pages with filtering and pagination
|
||||
*/
|
||||
list: async (options?: any): Promise<any> => {
|
||||
throw new Error('Page operations not yet implemented');
|
||||
list: options => {
|
||||
const tenantFilter = resolveTenantFilter(tenantId, options?.filter)
|
||||
if (!tenantFilter) {
|
||||
throw DBALError.validationError('Tenant ID is required', [{ field: 'tenantId', error: 'tenantId is required' }])
|
||||
}
|
||||
return adapter.list('PageConfig', { ...options, filter: tenantFilter }) as Promise<ListResult<PageConfig>>
|
||||
},
|
||||
})
|
||||
|
||||
export const createPageOperations = createPageConfigOperations
|
||||
|
||||
@@ -27,8 +27,6 @@ export { validateComponentHierarchyCreate } from '../../foundation/validation'
|
||||
export { validateComponentHierarchyUpdate } from '../../foundation/validation'
|
||||
export { validateWorkflowCreate } from '../../foundation/validation'
|
||||
export { validateWorkflowUpdate } from '../../foundation/validation'
|
||||
export { validateLuaScriptCreate } from '../../foundation/validation'
|
||||
export { validateLuaScriptUpdate } from '../../foundation/validation'
|
||||
export { validatePackageCreate } from '../../foundation/validation'
|
||||
export { validatePackageUpdate } from '../../foundation/validation'
|
||||
export { validateId } from '../../foundation/validation'
|
||||
|
||||
@@ -2,54 +2,37 @@
|
||||
* @file create-package.ts
|
||||
* @description Create package operation
|
||||
*/
|
||||
import type { CreatePackageInput, Package, Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validatePackageCreate } from '../../validation/validate-package-create'
|
||||
import type { CreatePackageInput, InstalledPackage, Result } from '../types'
|
||||
import type { InMemoryStore } from '../store/in-memory-store'
|
||||
import { validatePackageCreate } from '../validation/validate-package-create'
|
||||
|
||||
/**
|
||||
* Create a new package in the store
|
||||
* Create a new installed package in the store
|
||||
*/
|
||||
export const createPackage = async (
|
||||
store: InMemoryStore,
|
||||
input: CreatePackageInput
|
||||
): Promise<Result<Package>> => {
|
||||
const isInstalled = input.isInstalled ?? false
|
||||
const validationErrors = validatePackageCreate({
|
||||
name: input.name,
|
||||
): Promise<Result<InstalledPackage>> => {
|
||||
const installedAt = input.installedAt ?? BigInt(Date.now())
|
||||
const payload: InstalledPackage = {
|
||||
packageId: input.packageId,
|
||||
tenantId: input.tenantId ?? null,
|
||||
installedAt,
|
||||
version: input.version,
|
||||
description: input.description,
|
||||
author: input.author,
|
||||
manifest: input.manifest,
|
||||
isInstalled,
|
||||
installedAt: input.installedAt,
|
||||
installedBy: input.installedBy
|
||||
})
|
||||
enabled: input.enabled,
|
||||
config: input.config ?? null
|
||||
}
|
||||
|
||||
const validationErrors = validatePackageCreate(payload)
|
||||
if (validationErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: validationErrors[0] } }
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: validationErrors[0] ?? 'Validation failed' } }
|
||||
}
|
||||
|
||||
const key = `${input.name}@${input.version}`
|
||||
if (store.packageKeys.has(key)) {
|
||||
return { success: false, error: { code: 'CONFLICT', message: 'Package name+version already exists' } }
|
||||
if (store.installedPackages.has(payload.packageId!)) {
|
||||
return { success: false, error: { code: 'CONFLICT', message: 'Package ID already exists' } }
|
||||
}
|
||||
|
||||
const pkg: Package = {
|
||||
id: store.generateId('package'),
|
||||
name: input.name,
|
||||
version: input.version,
|
||||
description: input.description,
|
||||
author: input.author,
|
||||
manifest: input.manifest,
|
||||
isInstalled,
|
||||
installedAt: input.installedAt,
|
||||
installedBy: input.installedBy,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date()
|
||||
}
|
||||
store.installedPackages.set(payload.packageId!, payload)
|
||||
|
||||
store.packages.set(pkg.id, pkg)
|
||||
store.packageKeys.set(key, pkg.id)
|
||||
|
||||
return { success: true, data: pkg }
|
||||
return { success: true, data: payload }
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* @file delete-package.ts
|
||||
* @description Delete package operation
|
||||
*/
|
||||
import type { Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateId } from '../../validation/validate-id'
|
||||
import type { Result } from '../types'
|
||||
import type { InMemoryStore } from '../store/in-memory-store'
|
||||
import { validateId } from '../validation/validate-id'
|
||||
|
||||
/**
|
||||
* Delete a package by ID
|
||||
@@ -12,16 +12,15 @@ import { validateId } from '../../validation/validate-id'
|
||||
export const deletePackage = async (store: InMemoryStore, id: string): Promise<Result<boolean>> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] } }
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] ?? 'Invalid ID' } }
|
||||
}
|
||||
|
||||
const pkg = store.packages.get(id)
|
||||
const pkg = store.installedPackages.get(id)
|
||||
if (!pkg) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Package not found: ${id}` } }
|
||||
}
|
||||
|
||||
store.packages.delete(id)
|
||||
store.packageKeys.delete(`${pkg.name}@${pkg.version}`)
|
||||
store.installedPackages.delete(id)
|
||||
|
||||
return { success: true, data: true }
|
||||
}
|
||||
|
||||
@@ -2,20 +2,20 @@
|
||||
* @file get-package.ts
|
||||
* @description Get package operations
|
||||
*/
|
||||
import type { Package, Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateId } from '../../validation/validate-id'
|
||||
import type { InstalledPackage, Result } from '../types'
|
||||
import type { InMemoryStore } from '../store/in-memory-store'
|
||||
import { validateId } from '../validation/validate-id'
|
||||
|
||||
/**
|
||||
* Get a package by ID
|
||||
* Get an installed package by ID
|
||||
*/
|
||||
export const getPackage = async (store: InMemoryStore, id: string): Promise<Result<Package>> => {
|
||||
export const getPackage = async (store: InMemoryStore, id: string): Promise<Result<InstalledPackage>> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] } }
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] ?? 'Invalid ID' } }
|
||||
}
|
||||
|
||||
const pkg = store.packages.get(id)
|
||||
const pkg = store.installedPackages.get(id)
|
||||
if (!pkg) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Package not found: ${id}` } }
|
||||
}
|
||||
@@ -24,17 +24,15 @@ export const getPackage = async (store: InMemoryStore, id: string): Promise<Resu
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a package by name+version key (name@version)
|
||||
* Get an installed package by packageId
|
||||
*/
|
||||
export const getPackageByPackageId = async (store: InMemoryStore, packageKey: string): Promise<Result<Package>> => {
|
||||
if (!packageKey) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: 'Package key is required' } }
|
||||
export const getPackageByPackageId = async (
|
||||
store: InMemoryStore,
|
||||
packageId: string
|
||||
): Promise<Result<InstalledPackage>> => {
|
||||
if (!packageId) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: 'packageId is required' } }
|
||||
}
|
||||
|
||||
const id = store.packageKeys.get(packageKey)
|
||||
if (!id) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Package not found: ${packageKey}` } }
|
||||
}
|
||||
|
||||
return getPackage(store, id)
|
||||
return getPackage(store, packageId)
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
* @file list-packages.ts
|
||||
* @description List packages with filtering and pagination
|
||||
*/
|
||||
import type { ListOptions, Package, Result } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import type { InstalledPackage, ListOptions, Result } from '../types'
|
||||
import type { InMemoryStore } from '../store/in-memory-store'
|
||||
|
||||
/**
|
||||
* List packages with filtering and pagination
|
||||
@@ -11,37 +11,39 @@ import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
export const listPackages = async (
|
||||
store: InMemoryStore,
|
||||
options: ListOptions = {}
|
||||
): Promise<Result<Package[]>> => {
|
||||
): Promise<Result<InstalledPackage[]>> => {
|
||||
const { filter = {}, sort = {}, page = 1, limit = 20 } = options
|
||||
|
||||
let packages = Array.from(store.packages.values())
|
||||
let packages = Array.from(store.installedPackages.values())
|
||||
|
||||
if (filter.name !== undefined) {
|
||||
packages = packages.filter((pkg) => pkg.name === filter.name)
|
||||
}
|
||||
|
||||
if (filter.version !== undefined) {
|
||||
packages = packages.filter((pkg) => pkg.version === filter.version)
|
||||
}
|
||||
|
||||
if (filter.author !== undefined) {
|
||||
packages = packages.filter((pkg) => pkg.author === filter.author)
|
||||
}
|
||||
|
||||
if (filter.isInstalled !== undefined) {
|
||||
packages = packages.filter((pkg) => pkg.isInstalled === filter.isInstalled)
|
||||
}
|
||||
|
||||
if (sort.name) {
|
||||
packages.sort((a, b) =>
|
||||
sort.name === 'asc' ? a.name.localeCompare(b.name) : b.name.localeCompare(a.name)
|
||||
)
|
||||
} else if (sort.createdAt) {
|
||||
packages.sort((a, b) =>
|
||||
sort.createdAt === 'asc' ? a.createdAt.getTime() - b.createdAt.getTime() : b.createdAt.getTime() - a.createdAt.getTime()
|
||||
if (filter && Object.keys(filter).length > 0) {
|
||||
packages = packages.filter((pkg) =>
|
||||
Object.entries(filter).every(([key, value]) => (pkg as Record<string, unknown>)[key] === value)
|
||||
)
|
||||
}
|
||||
|
||||
const sortKey = Object.keys(sort)[0]
|
||||
if (sortKey) {
|
||||
const direction = sort[sortKey]
|
||||
packages.sort((a, b) => {
|
||||
const left = (a as Record<string, unknown>)[sortKey]
|
||||
const right = (b as Record<string, unknown>)[sortKey]
|
||||
if (typeof left === 'string' && typeof right === 'string') {
|
||||
return direction === 'asc' ? left.localeCompare(right) : right.localeCompare(left)
|
||||
}
|
||||
if (typeof left === 'bigint' && typeof right === 'bigint') {
|
||||
return direction === 'asc' ? Number(left - right) : Number(right - left)
|
||||
}
|
||||
if (typeof left === 'number' && typeof right === 'number') {
|
||||
return direction === 'asc' ? left - right : right - left
|
||||
}
|
||||
if (typeof left === 'boolean' && typeof right === 'boolean') {
|
||||
return direction === 'asc' ? Number(left) - Number(right) : Number(right) - Number(left)
|
||||
}
|
||||
return 0
|
||||
})
|
||||
}
|
||||
|
||||
const start = (page - 1) * limit
|
||||
const paginated = packages.slice(start, start + limit)
|
||||
|
||||
|
||||
@@ -2,76 +2,53 @@
|
||||
* @file update-package.ts
|
||||
* @description Update package operation
|
||||
*/
|
||||
import type { Package, Result, UpdatePackageInput } from '../../types'
|
||||
import type { InMemoryStore } from '../../store/in-memory-store'
|
||||
import { validateId } from '../../validation/validate-id'
|
||||
import { validatePackageUpdate } from '../../validation/validate-package-update'
|
||||
import type { InstalledPackage, Result, UpdatePackageInput } from '../types'
|
||||
import type { InMemoryStore } from '../store/in-memory-store'
|
||||
import { validateId } from '../validation/validate-id'
|
||||
import { validatePackageUpdate } from '../validation/validate-package-update'
|
||||
|
||||
/**
|
||||
* Update an existing package
|
||||
* Update an existing installed package
|
||||
*/
|
||||
export const updatePackage = async (
|
||||
store: InMemoryStore,
|
||||
id: string,
|
||||
input: UpdatePackageInput
|
||||
): Promise<Result<Package>> => {
|
||||
): Promise<Result<InstalledPackage>> => {
|
||||
const idErrors = validateId(id)
|
||||
if (idErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] } }
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: idErrors[0] ?? 'Invalid ID' } }
|
||||
}
|
||||
|
||||
const pkg = store.packages.get(id)
|
||||
if (input.tenantId !== undefined) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: 'tenantId is immutable' } }
|
||||
}
|
||||
|
||||
const pkg = store.installedPackages.get(id)
|
||||
if (!pkg) {
|
||||
return { success: false, error: { code: 'NOT_FOUND', message: `Package not found: ${id}` } }
|
||||
}
|
||||
|
||||
const validationErrors = validatePackageUpdate(input)
|
||||
if (validationErrors.length > 0) {
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: validationErrors[0] } }
|
||||
}
|
||||
|
||||
const nextName = input.name ?? pkg.name
|
||||
const nextVersion = input.version ?? pkg.version
|
||||
const currentKey = `${pkg.name}@${pkg.version}`
|
||||
const nextKey = `${nextName}@${nextVersion}`
|
||||
|
||||
if (nextKey !== currentKey) {
|
||||
const existingId = store.packageKeys.get(nextKey)
|
||||
if (existingId && existingId !== id) {
|
||||
return { success: false, error: { code: 'CONFLICT', message: 'Package name+version already exists' } }
|
||||
}
|
||||
store.packageKeys.delete(currentKey)
|
||||
store.packageKeys.set(nextKey, id)
|
||||
}
|
||||
|
||||
pkg.name = nextName
|
||||
pkg.version = nextVersion
|
||||
|
||||
if (input.description !== undefined) {
|
||||
pkg.description = input.description
|
||||
}
|
||||
|
||||
if (input.author !== undefined) {
|
||||
pkg.author = input.author
|
||||
}
|
||||
|
||||
if (input.manifest !== undefined) {
|
||||
pkg.manifest = input.manifest
|
||||
}
|
||||
|
||||
if (input.isInstalled !== undefined) {
|
||||
pkg.isInstalled = input.isInstalled
|
||||
return { success: false, error: { code: 'VALIDATION_ERROR', message: validationErrors[0] ?? 'Validation failed' } }
|
||||
}
|
||||
|
||||
if (input.installedAt !== undefined) {
|
||||
pkg.installedAt = input.installedAt
|
||||
}
|
||||
|
||||
if (input.installedBy !== undefined) {
|
||||
pkg.installedBy = input.installedBy
|
||||
if (input.version !== undefined) {
|
||||
pkg.version = input.version
|
||||
}
|
||||
|
||||
pkg.updatedAt = new Date()
|
||||
if (input.enabled !== undefined) {
|
||||
pkg.enabled = input.enabled
|
||||
}
|
||||
|
||||
if (input.config !== undefined) {
|
||||
pkg.config = input.config ?? null
|
||||
}
|
||||
|
||||
return { success: true, data: pkg }
|
||||
}
|
||||
|
||||
@@ -1,32 +1,10 @@
|
||||
/**
|
||||
* @file index.ts
|
||||
* @description Barrel export for package operations
|
||||
* NOTE: Package operation files not yet implemented - stubbed for build
|
||||
*/
|
||||
|
||||
// TODO: Implement these package operation files
|
||||
// export { createPackage } from './create-package';
|
||||
// export { getPackage, getPackageByPackageId } from './get-package';
|
||||
// export { updatePackage } from './update-package';
|
||||
// export { deletePackage } from './delete-package';
|
||||
// export { listPackages } from './list-packages';
|
||||
|
||||
// Temporary stubs to allow build to proceed
|
||||
export const createPackage = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Package operations not yet implemented');
|
||||
};
|
||||
export const getPackage = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Package operations not yet implemented');
|
||||
};
|
||||
export const getPackageByPackageId = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Package operations not yet implemented');
|
||||
};
|
||||
export const updatePackage = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Package operations not yet implemented');
|
||||
};
|
||||
export const deletePackage = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Package operations not yet implemented');
|
||||
};
|
||||
export const listPackages = async (...args: any[]): Promise<any> => {
|
||||
throw new Error('Package operations not yet implemented');
|
||||
};
|
||||
export { createPackage } from './crud/create-package'
|
||||
export { deletePackage } from './crud/delete-package'
|
||||
export { getPackage, getPackageByPackageId } from './crud/get-package'
|
||||
export { listPackages } from './crud/list-packages'
|
||||
export { updatePackage } from './crud/update-package'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user