Compare commits

..

486 Commits

Author SHA1 Message Date
5c2f15ec12 chore: improve package data typings 2025-12-29 23:39:17 +00:00
f8efac1188 Merge branch 'main' of https://github.com/johndoe6345789/metabuilder 2025-12-29 23:31:49 +00:00
f3b1058d62 feat(ui): Add UI components for header, intro, and user dashboard
- Implemented App Header with lifecycle and rendering scripts.
- Created Intro Section with rendering logic.
- Developed User Dashboard with profile, comments, and chat functionalities.
- Added Admin Panel for user and content management.
- Introduced Application Builder with schemas and workflows.
- Established Super God panel for tenant management.
- Updated metadata and tests for all new components and functionalities.
- Enhanced UI Pages Bundle to include dependencies for all levels.
- Improved permission checks and constants in the permissions package.
2025-12-29 23:31:43 +00:00
dc3b2bdfe4 Merge pull request #480 from johndoe6345789/codex/fix-type-lint-warnings
Fix type lint warnings in hooks
2025-12-29 23:30:38 +00:00
92e9b870fa Fix type lint warnings in hooks 2025-12-29 23:30:27 +00:00
427f502a3d Merge pull request #478 from johndoe6345789/codex/resolve-lint-warnings
Resolve lint warnings
2025-12-29 23:21:32 +00:00
c852289a06 chore: resolve lint warnings 2025-12-29 23:21:14 +00:00
a173b41d4b Merge branch 'main' of https://github.com/johndoe6345789/metabuilder 2025-12-29 23:19:40 +00:00
38237123cf feat: add UI components for login, home, and permissions with metadata and scripts 2025-12-29 23:19:32 +00:00
0457cbcd61 Merge pull request #475 from johndoe6345789/codex/migrate-ui-to-lua-package-system
feat: load UI pages from lua packages
2025-12-29 23:19:12 +00:00
d0dbf45d24 feat: load UI pages from lua packages 2025-12-29 23:19:02 +00:00
1f88b32d0c fix: rename error variable in GET function for consistency 2025-12-29 23:04:19 +00:00
6268cbb4bd feat: add UI rendering pipeline documentation 2025-12-29 23:01:12 +00:00
5d880c6c3b feat: implement dynamic UI page rendering and database integration 2025-12-29 22:58:50 +00:00
9f10d771d2 feat: add login and level 1 pages with UI components and actions 2025-12-29 22:55:41 +00:00
bc1b8de3e0 test: update assertion for handling unknown component types in generateComponentTree tests 2025-12-29 22:50:49 +00:00
21d45bc559 test: add unit tests for generateComponentTree function 2025-12-29 22:50:36 +00:00
df40166a60 fix: remove unused LuaUIComponent type import in loadLuaUIPackage 2025-12-29 22:49:31 +00:00
3f12f2d23a feat: add normalizeLuaStructure and normalizeLuaComponent functions for Lua data handling 2025-12-29 22:49:22 +00:00
3f5f9d66cc fix: standardize import formatting across multiple files
- Adjusted import statements to ensure consistent spacing and formatting in various test files, utility files, and component files.
- Updated type imports to maintain uniformity in spacing.
- Ensured that all import statements follow the same style for better readability and maintainability.
2025-12-29 22:49:05 +00:00
3265d06737 fix: standardize import formatting across components and tests
- Adjusted import statements for consistency by adding spaces after commas in multiple files.
- Ensured proper export formatting in various components to maintain code readability.
- Updated test files to follow the same import formatting standards.
- Refactored the `createLuaFunctionWrapper` function to simplify its parameters.
2025-12-29 22:48:33 +00:00
4d46410015 Refactor action function wrapper creation in loadLuaUIPackage for clarity 2025-12-29 22:48:12 +00:00
71a2d784bd Implement loadLuaUIPackage function to load Lua UI packages and handle errors 2025-12-29 22:48:03 +00:00
f8577072cb Fix import formatting in loadLuaUIPackage tests for consistency 2025-12-29 22:47:48 +00:00
72be29b288 Add callLuaFunction utility and tests for Lua module loading 2025-12-29 22:47:16 +00:00
a6e427647c Add tests for loadLuaUIPackage functionality and structure validation 2025-12-29 22:45:32 +00:00
b7e6234c38 Refactor code structure and remove redundant sections for improved readability and maintainability 2025-12-29 22:45:07 +00:00
dbfbb32188 refactor 2025-12-29 22:42:31 +00:00
50cd5c40b2 Refactor page rendering functions for consistency and readability
- Adjusted indentation and formatting in get-pages.ts for consistency.
- Enhanced readability in build-feature-card.ts by formatting parameters and adding commas.
- Updated build-features-component.ts to maintain consistent formatting and added missing commas.
- Improved formatting in build-hero-component.ts for better readability.
- Standardized formatting in build-level1-homepage.ts by adding commas.
- Refactored initialize-default-pages.ts for consistent indentation and added commas.
- Cleaned up PageRendererUtils.ts by ensuring consistent argument handling.
- Streamlined check-permissions.ts for better readability and consistency.
- Refined execute-lua-script.ts for consistent error handling and formatting.
- Enhanced get-page.ts and get-pages-by-level.ts with consistent return formatting.
- Improved load-pages.ts for better readability and consistent formatting.
- Standardized on-page-load.ts and on-page-unload.ts for consistent formatting and readability.
2025-12-29 22:38:34 +00:00
d305b25c76 refactor 2025-12-29 22:38:23 +00:00
bccb33e2ba refactor: reorganize imports in guards test file for better clarity 2025-12-29 22:27:42 +00:00
0be0fe9301 feat: add lint command permission and implement type guard tests 2025-12-29 22:27:16 +00:00
b9f62c7b5d feat: add type guards for error handling and JSON validation 2025-12-29 22:25:51 +00:00
a9d500b940 refactor: remove native Prisma bridge implementation 2025-12-29 22:25:07 +00:00
41d24f94c9 feat: implement native Prisma bridge with SQL template handling and authorization 2025-12-29 22:24:26 +00:00
c6dc552023 feat: add timeout command for build process in settings 2025-12-29 22:23:33 +00:00
bf9bfcf843 feat: add bunx playwright command permission to settings 2025-12-29 22:23:15 +00:00
d0be4da56c feat: add error formatting tests and interfaces for consistent error handling 2025-12-29 22:21:12 +00:00
c2997c915a refactor: remove toUserMessage function for error handling 2025-12-29 22:19:33 +00:00
83d9c16094 feat: add toUserMessage function for user-friendly error handling 2025-12-29 22:19:16 +00:00
e2092d146d feat: implement centralized error handling and logging with ErrorBoundary component 2025-12-29 22:19:10 +00:00
b134f3f8d4 feat: add permission for git restore command in settings 2025-12-29 22:17:30 +00:00
977a2a9e58 feat: add permission for bun audit command in settings 2025-12-29 22:16:55 +00:00
89270e1d7e feat: add permission for git checkout command in settings 2025-12-29 22:16:48 +00:00
cb942f77e7 feat: add permission for bun run lint command in settings 2025-12-29 22:16:28 +00:00
4918627d42 feat: add permission for npm audit command in settings 2025-12-29 22:16:03 +00:00
fb38b5b304 feat: add permission for npm run lint command in settings 2025-12-29 22:15:35 +00:00
8d7d2691b0 feat: add permission for npm run typecheck command in settings 2025-12-29 22:15:20 +00:00
fbe1f7721f refactor: remove DbalIntegrationUtils class wrapper and individual function exports 2025-12-29 22:15:08 +00:00
f3f60a09a2 feat: add permission for npx prisma validate command in settings 2025-12-29 22:14:53 +00:00
fd556ad3ee feat: add permission for echo command in settings 2025-12-29 22:14:46 +00:00
65143eb904 refactor: replace 'any' with specific parameter types in class wrapper functions 2025-12-29 22:12:44 +00:00
f788ade4ab fix: update test case for hasProperty to reflect correct behavior for primitive strings 2025-12-29 22:11:50 +00:00
cdf022e9c7 feat: add permission for bun run test:unit command 2025-12-29 22:11:34 +00:00
7584253a9d test: add unit tests for type guard functions 2025-12-29 22:11:23 +00:00
d4285d10d4 feat: add permission for bun run typecheck command 2025-12-29 22:10:53 +00:00
6b31c9df6a refactor: replace 'any' with specific types in KVStore and TenantContext definitions 2025-12-29 22:10:46 +00:00
a60f5ee064 feat: add type guard utilities for runtime type checking 2025-12-29 22:10:05 +00:00
6a9762b99e feat: add utility types for type-safe replacements of any 2025-12-29 22:09:57 +00:00
ad5e86c97f feat: add Prettier and ESLint configuration files, update scripts, and enhance permissions 2025-12-29 22:09:36 +00:00
1e4f902847 Implement feature X to enhance user experience and optimize performance 2025-12-29 22:07:27 +00:00
836eb6a086 fix: add permission for 'bun add' command in settings 2025-12-29 22:07:01 +00:00
d0ffe58ef5 refactor: remove obsolete credential management functions and related test files 2025-12-29 22:06:44 +00:00
a87b1043dc refactor: remove obsolete tests and credential management functions
- Deleted tests for user authentication, component configuration, and credential operations.
- Removed credential management functions including getCredentials, setCredential, and related password reset functionalities.
- Cleaned up unused imports and mock setups across various test files.
- Streamlined the codebase by eliminating redundant files and tests that are no longer applicable.
2025-12-29 22:01:28 +00:00
7e66010928 refactor: remove obsolete test files for package-glue, seed-data, and system components 2025-12-29 21:59:52 +00:00
c27843f576 fix: correct Grid component usage in LevelsGrid for proper layout 2025-12-29 21:56:09 +00:00
e6c368bfbe fix: add permission for 'cat' command in settings 2025-12-29 21:55:01 +00:00
555589d9a8 fix: add git log permission to settings and update LevelsGrid component import 2025-12-29 21:54:29 +00:00
3d7061ca3f refactor: remove obsolete test files for primary key field, install package content, and rate limiting 2025-12-29 21:54:06 +00:00
e10feca62c fix: update test for registerLuaScript to handle async execution 2025-12-29 21:53:38 +00:00
ad9fb27e66 fix: await expect in Lua script execution test for proper async handling 2025-12-29 21:53:30 +00:00
4d1ac45b19 Refactor and add tests for various CRUD operations across components, schemas, pages, and workflows
- Updated import paths for hooks and components.
- Added unit tests for user authentication, user retrieval by email and username, and comment management.
- Implemented tests for component configuration and node management.
- Created tests for page management including adding, deleting, and updating pages.
- Added schema management tests for CRUD operations.
- Implemented workflow management tests for adding, deleting, updating, and retrieving workflows.
- Updated NotificationSummaryCard import path to reflect new structure.
2025-12-29 21:53:22 +00:00
dfb2ddf337 feat: Add 'claudeCode.allowDangerouslySkipPermissions' and 'claudeCode.initialPermissionMode' to settings.json; update peer dependencies in package-lock.json 2025-12-29 21:48:59 +00:00
35e2b02ec1 feat: Add 'Bash(DATABASE_URL="file:./dev.db" npx prisma generate:*)' permission to settings.local.json 2025-12-29 21:45:43 +00:00
d3f4d6b8d4 feat: Add 'Bash(npx prisma generate:*)' permission to settings.local.json 2025-12-29 21:45:15 +00:00
75de014884 feat: Add 'Bash(npm run db:generate:*)' permission to settings.local.json 2025-12-29 21:43:10 +00:00
c0e38f393f feat: Update Bash permissions and add peer dependencies in package-lock.json 2025-12-29 21:42:00 +00:00
3092cf5578 feat: Expand Bash permissions in settings.local.json 2025-12-29 21:39:37 +00:00
7243f29f19 feat: Add UI Standards, Refactoring Summary, and Dependency Update Reports
- Introduced UI Standards document to enforce Material-UI and SASS usage.
- Created Refactoring Summary detailing the conversion of large TypeScript files to JSON, improving maintainability and reducing code complexity.
- Added Dependency Update Summary outlining major version updates and API refactoring for Prisma and other dependencies.
- Documented Legacy Pipeline Cruft Analysis to identify redundant workflows post-implementation of gated workflows.
- Enhanced PR Summary for converting TODO items to GitHub issues with new scripts and automation workflows.
- Established Renovate Dependency Dashboard Status Report confirming successful dependency updates and compliance with UI standards.
2025-12-29 21:37:54 +00:00
b56554287b Merge branch 'main' of https://github.com/johndoe6345789/metabuilder 2025-12-29 21:28:24 +00:00
ee67f916e1 Add .vscode/claudesync.json to .gitignore 2025-12-29 21:28:17 +00:00
9dffeff73d Merge pull request #404 from johndoe6345789/copilot/refactor-large-typescript-files
Refactor forms.ts: Convert TypeScript config to JSON (244→35 lines)
2025-12-29 21:27:39 +00:00
copilot-swe-agent[bot]
5e3a913988 Add comprehensive refactoring summary documentation
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 21:23:45 +00:00
copilot-swe-agent[bot]
56171929b6 Polish: Improve comments and error messages per code review
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 21:22:26 +00:00
copilot-swe-agent[bot]
f955d0d200 Address code review: Remove eval(), improve type safety
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 21:21:13 +00:00
copilot-swe-agent[bot]
d2d382a765 Fix forms.ts: Use ES6 import for JSON modules
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 21:19:56 +00:00
copilot-swe-agent[bot]
c8593119b2 Refactor forms.ts: Convert to JSON config (244→29 lines, +3 JSON files)
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 21:17:53 +00:00
copilot-swe-agent[bot]
3970ef22fd Analyze refactoring tools and identify issues
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 21:11:37 +00:00
copilot-swe-agent[bot]
3ef908051c Initial plan 2025-12-29 21:03:33 +00:00
a146c74a2c Merge pull request #402 from johndoe6345789/copilot/cleanup-typescript-errors
Fix TypeScript syntax errors from bulk refactor
2025-12-29 21:02:15 +00:00
copilot-swe-agent[bot]
f89574b504 Fix TypeScript errors from bulk refactor - remove extra closing braces and fix transferSuperGodPower import
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 20:57:03 +00:00
copilot-swe-agent[bot]
b6a5d29fe6 Initial plan 2025-12-29 20:48:07 +00:00
b97de7e1e2 Merge pull request #398 from johndoe6345789/copilot/make-auto-code-extractor-3000
Add Auto Code Extractor 3000™ for automated lambda-per-file refactoring with extraction registry and live project-wide extraction
2025-12-29 20:46:49 +00:00
e42fff4ea0 Merge branch 'main' into copilot/make-auto-code-extractor-3000 2025-12-29 20:46:37 +00:00
copilot-swe-agent[bot]
efc906997f Fix extracted TypeScript files: rename JSX files to .tsx, fix reserved keyword 'delete', add TODO for incorrectly extracted class methods
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 20:06:37 +00:00
copilot-swe-agent[bot]
4db87be546 Changes before error encountered
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 19:33:03 +00:00
copilot-swe-agent[bot]
2489c2133e Add extraction registry tracking and improve logging with detailed function analysis
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:56:18 +00:00
copilot-swe-agent[bot]
4427f63c17 Add full project-wide extraction output - 52 files processed successfully
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:40:57 +00:00
copilot-swe-agent[bot]
0baaa09caf Add Auto Code Extractor 3000™ execution output to PR
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:35:19 +00:00
copilot-swe-agent[bot]
c12008bd3f Add implementation summary and final documentation
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:33:17 +00:00
copilot-swe-agent[bot]
034f8e3f51 Add comprehensive documentation for Auto Code Extractor 3000™
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:32:06 +00:00
copilot-swe-agent[bot]
29ed9d4b7e Add Auto Code Extractor 3000™ - fully automated code extraction tool
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:30:18 +00:00
3bb37764df Merge pull request #196 from johndoe6345789/copilot/improve-duplication-detection-script
Auto-detect duplicate issues without manual configuration
2025-12-29 18:29:09 +00:00
893d49a0d3 Merge branch 'main' into copilot/improve-duplication-detection-script 2025-12-29 18:29:01 +00:00
fbb9585835 Merge pull request #246 from johndoe6345789/codex/create-blockitem-and-grouping-files-nflww8
refactor: extract lua block item and grouping helpers
2025-12-29 18:26:58 +00:00
c6e6492d8b Merge pull request #397 from johndoe6345789/copilot/sub-pr-246
Extract BlockSection and BlockFields from BlockItem component
2025-12-29 18:26:16 +00:00
9eecbc45aa Merge branch 'codex/create-blockitem-and-grouping-files-nflww8' into copilot/sub-pr-246 2025-12-29 18:26:01 +00:00
copilot-swe-agent[bot]
45452e4b15 Initial plan for auto code extractor 3000™
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 18:20:45 +00:00
copilot-swe-agent[bot]
60391b36c1 Initial plan 2025-12-29 18:17:50 +00:00
copilot-swe-agent[bot]
4e7145a441 Extract BlockSection and BlockFields into separate files
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-29 17:41:44 +00:00
4461288d13 Merge branch 'main' into codex/create-blockitem-and-grouping-files-nflww8 2025-12-29 17:38:45 +00:00
copilot-swe-agent[bot]
c0d86f6d12 Initial plan 2025-12-29 17:38:18 +00:00
fb970a768a Update frontends/nextjs/src/components/editors/lua/blocks/grouping.ts
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:38:16 +00:00
cd942ef691 Merge pull request #333 from johndoe6345789/codex/add-dialog-and-dropdownmenu-components-sirirg
Add dropdown state hook and dialog sections
2025-12-29 17:14:36 +00:00
4b79be2687 Merge branch 'main' into codex/add-dialog-and-dropdownmenu-components-sirirg 2025-12-29 17:14:27 +00:00
edc6e3e448 Update frontends/nextjs/src/components/ui/molecules/overlay/DropdownMenu/MenuItem.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:13:54 +00:00
6c0d83929c Merge pull request #334 from johndoe6345789/codex/create-core,-overlay,-and-data-exports-54e96s
Add UI subset barrels and streamline UI index exports
2025-12-29 17:13:34 +00:00
f917920233 Merge branch 'main' into codex/create-core,-overlay,-and-data-exports-54e96s 2025-12-29 17:13:25 +00:00
20aec4e9a0 Merge pull request #383 from johndoe6345789/codex/create-fieldgroup-and-validationsummary-components
Add shared data form and table components
2025-12-29 17:12:51 +00:00
a1cc4415a5 Merge branch 'main' into codex/create-fieldgroup-and-validationsummary-components 2025-12-29 17:12:42 +00:00
fdb83483eb Update frontends/nextjs/src/data/table/EmptyState.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:12:17 +00:00
675c8d9b82 Update frontends/nextjs/src/data/form/ValidationSummary.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:12:03 +00:00
499c277501 Update frontends/nextjs/src/data/form/ValidationSummary.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:11:51 +00:00
6ed12f49b4 Merge pull request #335 from johndoe6345789/codex/create-fieldtypes,-propertypanels,-and-rendernode-files-ufu1c8
Refactor rendering components into modular panels
2025-12-29 17:10:52 +00:00
3034cef5f5 Merge branch 'main' into codex/create-fieldtypes,-propertypanels,-and-rendernode-files-ufu1c8 2025-12-29 17:10:44 +00:00
7227619449 Merge pull request #336 from johndoe6345789/codex/add-securitymessage-and-actionbuttons-components-bctjgn
Refactor security warning dialog components
2025-12-29 17:10:18 +00:00
048d4e93dc Merge branch 'main' into codex/add-securitymessage-and-actionbuttons-components-bctjgn 2025-12-29 17:10:10 +00:00
e0a61c9786 Merge pull request #337 from johndoe6345789/codex/create-nerdmodeide-components-and-hooks-wf8pyj
Refactor NerdModeIDE into modular components
2025-12-29 17:09:49 +00:00
a14de95795 Merge branch 'main' into codex/create-nerdmodeide-components-and-hooks-wf8pyj 2025-12-29 17:09:44 +00:00
ab44bcd782 Merge pull request #338 from johndoe6345789/codex/add-dialog-header,-body,-and-footer-components-xttcz7
Refactor dialog molecule subcomponents
2025-12-29 17:09:04 +00:00
19518e5700 Merge branch 'main' into codex/add-dialog-header,-body,-and-footer-components-xttcz7 2025-12-29 17:08:54 +00:00
eefa743cf6 Merge pull request #381 from johndoe6345789/codex/add-menuitemlist,-header,-and-navsections
Refactor sidebar navigation components into smaller files
2025-12-29 17:08:34 +00:00
8faa0fa674 Merge branch 'main' into codex/add-menuitemlist,-header,-and-navsections 2025-12-29 17:08:22 +00:00
77f83b9a4c Merge pull request #382 from johndoe6345789/codex/split-components-into-separate-files
Split dialog components into smaller modules
2025-12-29 17:07:53 +00:00
d25466217e Merge branch 'main' into codex/split-components-into-separate-files 2025-12-29 17:07:41 +00:00
fb0dff5892 Update frontends/nextjs/src/components/ui/organisms/dialogs/Sheet/Header.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:07:16 +00:00
2f8f7623c2 Merge pull request #385 from johndoe6345789/codex/create-toastcontainer-and-config-files-s3isbk
Refactor sonner toast components
2025-12-29 17:06:48 +00:00
843dbfdfe5 Merge branch 'main' into codex/create-toastcontainer-and-config-files-s3isbk 2025-12-29 17:06:31 +00:00
853daf38db Update frontends/nextjs/src/components/ui/sonner/ToastContainer.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:03:47 +00:00
f1222ac6ea Merge pull request #384 from johndoe6345789/codex/create-and-organize-test-files-pkv1td
test: reorganize hook tests
2025-12-29 17:03:14 +00:00
3f28ebbe0f Merge branch 'main' into codex/create-and-organize-test-files-pkv1td 2025-12-29 17:03:03 +00:00
4055b5cbb2 Update frontends/nextjs/src/hooks/data/__tests__/useKV.validation.test.ts
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 17:02:40 +00:00
710d53647c refactor: reorganize sonner toast components 2025-12-29 16:50:57 +00:00
fca5638dd9 test: reorganize hook tests 2025-12-29 16:50:50 +00:00
8918cca6e4 feat: add shared data form and table components 2025-12-29 16:34:04 +00:00
1d0a8c2a3d refactor: split dialog components into modules 2025-12-29 16:33:56 +00:00
9b404a10b4 refactor: extract sidebar navigation pieces 2025-12-29 16:33:48 +00:00
6ef4496e59 Merge pull request #331 from johndoe6345789/codex/split-components-into-separate-files-5h3pqi
Split dialog components into smaller modules
2025-12-29 16:12:40 +00:00
4cdfae45f2 Update frontends/nextjs/src/components/ui/organisms/dialogs/Sheet/Header.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 16:12:12 +00:00
50428bd48b Update frontends/nextjs/src/components/ui/organisms/dialogs/alert/Content.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 16:12:05 +00:00
144a01e1e0 Merge pull request #330 from johndoe6345789/codex/add-menuitemlist,-header,-and-navsections-le7c32
Refactor sidebar navigation components into smaller files
2025-12-29 16:11:42 +00:00
9f75b67bd3 Merge branch 'main' into codex/add-menuitemlist,-header,-and-navsections-le7c32 2025-12-29 16:11:36 +00:00
cb8191915e Merge pull request #329 from johndoe6345789/codex/create-toastcontainer-and-config-files-u80qjb
Refactor sonner toast components
2025-12-29 16:11:05 +00:00
b85d2e8204 Merge branch 'main' into codex/create-toastcontainer-and-config-files-u80qjb 2025-12-29 16:10:55 +00:00
5f36b15fe3 Update frontends/nextjs/src/components/ui/sonner.tsx
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 16:10:39 +00:00
b19d0d679c Merge pull request #328 from johndoe6345789/codex/create-and-organize-test-files-a02kqy
test: reorganize hook tests
2025-12-29 16:10:11 +00:00
a6f2dc10b2 Merge branch 'main' into codex/create-and-organize-test-files-a02kqy 2025-12-29 16:10:01 +00:00
0dfba0bc83 Update frontends/nextjs/src/hooks/data/__tests__/useKV.validation.test.ts
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-29 16:09:33 +00:00
4b681492b6 Merge pull request #327 from johndoe6345789/codex/create-entities-and-operations-exports-r6tst8
refactor: add db core barrel exports
2025-12-29 16:08:53 +00:00
ce3010253a Merge branch 'main' into codex/create-entities-and-operations-exports-r6tst8 2025-12-29 16:08:44 +00:00
192a42b9bf Merge branch 'main' into codex/add-dialog-header,-body,-and-footer-components-xttcz7 2025-12-28 04:12:58 +00:00
44fc726bb5 Merge pull request #339 from johndoe6345789/codex/create-new-components-for-viewers-pvcc7s
Add viewer filter and detail components
2025-12-28 04:12:40 +00:00
4f3ae3e74b Merge branch 'main' into codex/create-new-components-for-viewers-pvcc7s 2025-12-28 04:12:32 +00:00
dcf8e95475 Merge pull request #340 from johndoe6345789/codex/refactor-workflow-files-and-components-rxe21p
Refactor GitHub Actions fetcher hooks and run list layout
2025-12-28 04:12:08 +00:00
0fb122fa4a Merge pull request #341 from johndoe6345789/codex/create-dbal-and-irc-modules-and-components-mloc44
Modularize demo UIs for DBAL, IRC chat, and screenshot analyzer
2025-12-28 04:11:49 +00:00
af4f2575c9 Merge branch 'main' into codex/create-dbal-and-irc-modules-and-components-mloc44 2025-12-28 04:11:41 +00:00
c54f5415ad Merge pull request #351 from johndoe6345789/codex/add-base,-advanced,-and-experimental-templates-65ldee
Modularize template config structure
2025-12-28 04:11:20 +00:00
c8a5e83705 Merge branch 'main' into codex/add-base,-advanced,-and-experimental-templates-65ldee 2025-12-28 04:11:15 +00:00
1925cf96c2 Merge pull request #353 from johndoe6345789/codex/add-parser-and-stats-files
Refactor workflow run analysis helpers
2025-12-28 04:10:52 +00:00
b107581649 Merge pull request #354 from johndoe6345789/codex/create-builders-and-mappers-files-in-dbal/core/client
feat: add dbal client builders and integration entry
2025-12-28 02:39:58 +00:00
24f2ce59d4 Merge branch 'main' into codex/create-builders-and-mappers-files-in-dbal/core/client 2025-12-28 02:39:49 +00:00
d496779024 Merge pull request #355 from johndoe6345789/codex/add-base,-advanced,-and-experimental-css-files
Organize CSS category seed data
2025-12-28 02:39:30 +00:00
9e432c0df5 Merge branch 'main' into codex/add-base,-advanced,-and-experimental-css-files 2025-12-28 02:39:24 +00:00
19b3b6d7d2 Merge pull request #352 from johndoe6345789/codex/create-test-files-for-lua-engine
Split lua-engine tests into execution and events suites
2025-12-28 02:39:03 +00:00
611bc6ca21 chore: organize css category seeds 2025-12-27 23:35:30 +00:00
66bd336722 feat: add client builders and integration entry 2025-12-27 23:35:18 +00:00
b1d81875fc refactor: split workflow run analysis helpers 2025-12-27 23:35:08 +00:00
b6da1954d4 test: split lua-engine suites 2025-12-27 23:34:56 +00:00
840f2e3596 refactor: modularize template configs 2025-12-27 23:33:55 +00:00
a2ae7e6913 feat: modularize demo components 2025-12-27 23:00:51 +00:00
6ff0174e9b refactor: modularize github actions fetcher 2025-12-27 23:00:42 +00:00
5074a0274a feat: add viewer filter and detail components 2025-12-27 23:00:32 +00:00
2d83c95eba refactor: split dialog subcomponents 2025-12-27 23:00:25 +00:00
f747301f65 refactor: modularize nerd mode ide layout 2025-12-27 23:00:13 +00:00
fb69a894a2 refactor: extract security dialog components 2025-12-27 23:00:05 +00:00
19f6be22b8 refactor: modularize rendering components 2025-12-27 22:59:53 +00:00
e532ef69db chore: add ui subset barrels 2025-12-27 22:59:42 +00:00
ce6daf533b feat: add dropdown helpers and dialog sections 2025-12-27 22:59:34 +00:00
5384332b01 refactor: split dialog components into modules 2025-12-27 22:59:13 +00:00
e306813a87 refactor: extract sidebar navigation pieces 2025-12-27 22:59:05 +00:00
1c5c6ec8f1 refactor: reorganize sonner toast components 2025-12-27 22:58:56 +00:00
8012fe13ec test: reorganize hook tests 2025-12-27 22:58:48 +00:00
6b0c7c0242 refactor: add db core barrel exports 2025-12-27 22:58:39 +00:00
c356674ea1 feat: implement IRC Webchat component with workflows, actions, and layout schema 2025-12-27 19:18:25 +00:00
02e6780cdb feat(tests): add validation, execution, and regression tests for package-glue module 2025-12-27 19:17:57 +00:00
0dcc613843 feat(tests): add unit tests for page renderer lifecycle, layout, and permissions
- Created new test files for `page-renderer.layout.test.ts`, `page-renderer.lifecycle.test.ts`, and `page-renderer.permissions.test.ts` to cover various functionalities of the PageRenderer class.
- Implemented tests for registering pages, loading pages from the database, filtering pages by level, and checking permissions based on user roles.
- Removed the old `page-renderer.test.ts` file to streamline test organization and improve maintainability.

refactor(schema): reorganize schema utility functions and add tests

- Introduced a new structure for schema utility functions, grouping them into directories based on their functionality (e.g., `field`, `model`, `record`).
- Added tests for schema utilities, including validation, serialization, and migration functions.
- Created mock data for testing schema-related functionalities, ensuring comprehensive coverage of edge cases and expected behaviors.
- Added backward compatibility for schema utilities through a new entry point.

chore: clean up unused code and improve code organization

- Removed redundant code and improved the organization of schema utility functions for better readability and maintainability.
- Ensured all functions are properly imported and exported from their respective directories.
2025-12-27 19:17:32 +00:00
93a93b995d feat: add comprehensive tests for workflow engine errors, execution, and persistence 2025-12-27 19:16:04 +00:00
6049c28cdd feat: add test analysis CLI and report generation functionality 2025-12-27 19:15:47 +00:00
d152f822b3 feat: add class and function detectors for TypeScript/TSX source files 2025-12-27 19:15:25 +00:00
25228f3371 feat: implement test coverage report generation with improved structure and best practices 2025-12-27 19:14:17 +00:00
027320b644 Merge pull request #271 from johndoe6345789/codex/add-new-react-components-for-data
Add data UI components for generic pages, quick guides, and SMTP
2025-12-27 19:00:37 +00:00
262a00c3a9 Merge branch 'main' into codex/add-new-react-components-for-data 2025-12-27 19:00:29 +00:00
d6c6a85e5a feat: add data-specific ui components 2025-12-27 19:00:17 +00:00
ad6b8b7754 Merge pull request #270 from johndoe6345789/codex/create-auth-components-for-user-authentication
Refactor auth subcomponents for login and god credentials
2025-12-27 18:59:56 +00:00
e52aa4470d Merge branch 'main' into codex/create-auth-components-for-user-authentication 2025-12-27 18:59:51 +00:00
f5141369c7 refactor: extract auth subcomponents 2025-12-27 18:59:32 +00:00
d3595ac878 Merge pull request #269 from johndoe6345789/codex/add-dependenciestab-and-scriptstab-components
Refactor package manager dialogs
2025-12-27 18:59:11 +00:00
8f9be2fa25 Merge branch 'main' into codex/add-dependenciestab-and-scriptstab-components 2025-12-27 18:59:00 +00:00
a496ff5423 refactor: modularize package manager dialogs 2025-12-27 18:58:49 +00:00
efac7d35c4 Merge pull request #268 from johndoe6345789/codex/create-and-organize-test-files
test: reorganize hook tests
2025-12-27 18:58:31 +00:00
d7da9697fb Merge branch 'main' into codex/create-and-organize-test-files 2025-12-27 18:58:23 +00:00
d04fe3a4f0 test: reorganize hook tests 2025-12-27 18:58:11 +00:00
b4028dd6f4 Merge pull request #267 from johndoe6345789/codex/create-builders-and-mappers-files-in-dbal/core/client
feat: add dbal client builders and integration entry
2025-12-27 18:57:46 +00:00
7f0b4e073d Merge branch 'main' into codex/create-builders-and-mappers-files-in-dbal/core/client 2025-12-27 18:57:39 +00:00
166162718f feat: add client builders and integration entry 2025-12-27 18:57:30 +00:00
39687bec71 Merge pull request #266 from johndoe6345789/codex/add-parser-and-stats-files
Refactor workflow run analysis helpers
2025-12-27 18:57:13 +00:00
a0ae41ade9 Merge branch 'main' into codex/add-parser-and-stats-files 2025-12-27 18:57:05 +00:00
0656df5a0f refactor: split workflow run analysis helpers 2025-12-27 18:56:55 +00:00
4ccacaa2f4 Merge pull request #265 from johndoe6345789/codex/add-base,-advanced,-and-experimental-templates
Modularize template config structure
2025-12-27 18:56:37 +00:00
2dbcdb9f23 Merge branch 'main' into codex/add-base,-advanced,-and-experimental-templates 2025-12-27 18:56:29 +00:00
7282290d1a refactor: modularize template configs 2025-12-27 18:56:18 +00:00
4af202cdc0 Merge pull request #264 from johndoe6345789/codex/add-javascript-injection-and-xss-modules
Split JavaScript security patterns into modules
2025-12-27 18:55:59 +00:00
298d8bbcfa Merge branch 'main' into codex/add-javascript-injection-and-xss-modules 2025-12-27 18:55:50 +00:00
a37459ed62 chore: split javascript security patterns 2025-12-27 18:55:07 +00:00
f37078c207 Merge pull request #263 from johndoe6345789/codex/create-security-scanner-tests
Split security scanner tests into detection and reporting suites
2025-12-27 18:54:46 +00:00
76df9a59e6 Merge branch 'main' into codex/create-security-scanner-tests 2025-12-27 18:54:37 +00:00
33cc1322cc test: split security scanner coverage 2025-12-27 18:54:27 +00:00
9901bd7df7 Merge pull request #262 from johndoe6345789/codex/add-schema/default/forms,-components-and-validation
Refactor default schema into modular files
2025-12-27 18:53:55 +00:00
f7c891e3d3 Merge branch 'main' into codex/add-schema/default/forms,-components-and-validation 2025-12-27 18:53:47 +00:00
50f934abbb refactor: split default schema definitions 2025-12-27 18:53:35 +00:00
ae26bd4f18 Merge pull request #260 from johndoe6345789/codex/create-type-definition-files
Split theme type declarations into smaller modules
2025-12-27 18:53:17 +00:00
f0bdeb860a chore: split theme type declarations 2025-12-27 18:53:08 +00:00
99d4411a41 Merge pull request #259 from johndoe6345789/codex/create-connectionform,-schemaviewer,-and-actiontoolbar
feat: modularize database manager UI
2025-12-27 18:50:27 +00:00
87ea17056c Merge branch 'main' into codex/create-connectionform,-schemaviewer,-and-actiontoolbar 2025-12-27 18:50:18 +00:00
6797acc724 feat: modularize database manager UI 2025-12-27 18:50:05 +00:00
ac6b954585 Merge pull request #258 from johndoe6345789/codex/add-ruleeditor,-preview,-and-hooks-files
Refactor CSS class builder into modular components
2025-12-27 18:49:44 +00:00
53d84e7f84 Merge branch 'main' into codex/add-ruleeditor,-preview,-and-hooks-files 2025-12-27 18:49:35 +00:00
cb90ae91b5 refactor: modularize css class builder 2025-12-27 18:49:26 +00:00
33411e3b85 Merge pull request #256 from johndoe6345789/codex/add-user-management-components
feat: add user management subcomponents
2025-12-27 18:49:05 +00:00
4ab7aac63e Merge branch 'main' into codex/add-user-management-components 2025-12-27 18:48:59 +00:00
1f7c2e637e Merge pull request #257 from johndoe6345789/codex/create-fields-and-actions-components
Refactor component dialog fields and hierarchy tree
2025-12-27 18:48:35 +00:00
9c354fdac5 Merge branch 'main' into codex/create-fields-and-actions-components 2025-12-27 18:48:26 +00:00
f57b41f86d refactor: extract dialog fields and hierarchy tree 2025-12-27 18:48:15 +00:00
1e9a6271ea feat: add user management subcomponents 2025-12-27 18:47:43 +00:00
7989c700b9 Merge pull request #254 from johndoe6345789/codex/create-shared-powertransfer-tabs-component
Refactor power transfer tab layout
2025-12-27 18:47:21 +00:00
02e7188b20 Merge branch 'main' into codex/create-shared-powertransfer-tabs-component 2025-12-27 18:47:13 +00:00
1523cf735c refactor: extract power transfer sections 2025-12-27 18:47:02 +00:00
adedf5f70c Merge pull request #253 from johndoe6345789/codex/create-level4/tabs/config.ts-and-tabcontent.tsx
refactor: modularize level4 tabs
2025-12-27 18:46:26 +00:00
c069bd0540 Merge branch 'main' into codex/create-level4/tabs/config.ts-and-tabcontent.tsx 2025-12-27 18:46:18 +00:00
871b84ebf4 refactor: modularize level4 tabs 2025-12-27 18:46:06 +00:00
db8c01de1b Merge pull request #251 from johndoe6345789/codex/create-section-components-for-levels
Refactor level pages to share section components
2025-12-27 18:45:35 +00:00
85afb870e8 Merge branch 'main' into codex/create-section-components-for-levels 2025-12-27 18:45:26 +00:00
57a6bd32d6 refactor: share level section components 2025-12-27 18:45:14 +00:00
afacdb82cc Merge pull request #250 from johndoe6345789/codex/add-contact-form-example-components
Add contact form example config and preview
2025-12-27 18:44:55 +00:00
b9350f0da9 Merge branch 'main' into codex/add-contact-form-example-components 2025-12-27 18:44:49 +00:00
4f2bff3a47 feat: add contact form example config and preview 2025-12-27 18:44:34 +00:00
de605d4809 Merge pull request #248 from johndoe6345789/codex/create-schema-level-4-files
Refactor Level 4 schema editor into modular components
2025-12-27 18:44:14 +00:00
67c7509bb9 Merge branch 'main' into codex/create-schema-level-4-files 2025-12-27 18:44:05 +00:00
ecd04fa1a0 refactor: modularize level 4 schema editor 2025-12-27 18:43:54 +00:00
f00d345fe8 Merge pull request #247 from johndoe6345789/codex/add-selectors,-actions,-and-storage-files
Refactor Lua blocks state hook utilities
2025-12-27 18:43:37 +00:00
d161f0f9cd Merge branch 'main' into codex/add-selectors,-actions,-and-storage-files 2025-12-27 18:43:29 +00:00
a72299176c refactor: modularize lua blocks state hook 2025-12-27 18:43:12 +00:00
a26666199c Merge pull request #245 from johndoe6345789/codex/create-blockitem-and-grouping-files
refactor: extract lua block item and grouping helpers
2025-12-27 18:42:52 +00:00
7932581ec3 Merge branch 'main' into codex/create-blockitem-and-grouping-files 2025-12-27 18:42:46 +00:00
a93ec759d6 refactor: extract lua block item and grouping helpers 2025-12-27 18:42:35 +00:00
4d8394acc0 refactor: extract lua block item and grouping helpers 2025-12-27 18:42:14 +00:00
704c1bca86 Merge pull request #244 from johndoe6345789/codex/add-luasnippetlibrary-components
Refactor Lua snippet library into modular components
2025-12-27 18:41:52 +00:00
ee76be73f2 Merge branch 'main' into codex/add-luasnippetlibrary-components 2025-12-27 18:41:47 +00:00
e0c556c279 refactor: modularize lua snippet library 2025-12-27 18:41:21 +00:00
73a53c4715 Merge pull request #242 from johndoe6345789/codex/create-blocklistview,-codepreview,-and-useluablockeditorstat
Refactor Lua blocks editor composition
2025-12-27 18:41:01 +00:00
6d4b786150 Merge branch 'main' into codex/create-blocklistview,-codepreview,-and-useluablockeditorstat 2025-12-27 18:40:53 +00:00
7c061b43ca refactor: modularize lua blocks editor 2025-12-27 18:40:43 +00:00
adcd9c69de Merge pull request #240 from johndoe6345789/codex/create-header-and-sidebar-components
Refactor Codegen Studio layout
2025-12-27 18:40:23 +00:00
4bd98918cc Merge branch 'main' into codex/create-header-and-sidebar-components 2025-12-27 18:40:18 +00:00
97d461b667 refactor: modularize codegen studio layout 2025-12-27 18:40:06 +00:00
d322e425cb Merge pull request #239 from johndoe6345789/codex/add-package-operations-for-publish,-unpublish,-validate
Add package publish lifecycle helpers
2025-12-27 18:39:46 +00:00
7ae32965cf Merge branch 'main' into codex/add-package-operations-for-publish,-unpublish,-validate 2025-12-27 18:39:37 +00:00
c0f1b5af14 feat: add package lifecycle operations 2025-12-27 18:39:27 +00:00
a7fde7cd0d Merge pull request #237 from johndoe6345789/codex/create-user-operations-in-core/entities
Refactor user operations into separate modules
2025-12-27 18:39:07 +00:00
cea8211297 Merge branch 'main' into codex/create-user-operations-in-core/entities 2025-12-27 18:38:59 +00:00
66f9d2cfe6 refactor: split user operations into separate modules 2025-12-27 18:38:50 +00:00
366ffb5de9 Merge pull request #235 from johndoe6345789/codex/add-websocket-bridge-lifecycle-and-routing
Refactor websocket bridge lifecycle and routing
2025-12-27 18:38:23 +00:00
e848a7bac5 Merge branch 'main' into codex/add-websocket-bridge-lifecycle-and-routing 2025-12-27 18:38:13 +00:00
b10bef82a9 refactor: harden websocket bridge lifecycle 2025-12-27 18:38:04 +00:00
1e3dff83fa Merge pull request #221 from johndoe6345789/codex/create-tenant-context-and-audit-hooks
Refactor tenant-aware blob storage context and hooks
2025-12-27 18:37:46 +00:00
901a5438dd Merge branch 'main' into codex/create-tenant-context-and-audit-hooks 2025-12-27 18:37:39 +00:00
d84c55cfe1 Merge pull request #232 from johndoe6345789/codex/introduce-shared-helpers-and-refactor-storage
Refactor memory storage helpers into utilities and serialization
2025-12-27 18:37:13 +00:00
9331a1b7f7 Merge branch 'main' into codex/introduce-shared-helpers-and-refactor-storage 2025-12-27 18:37:04 +00:00
bcac86fce9 refactor: modularize memory storage helpers 2025-12-27 18:36:56 +00:00
824a1f4487 Merge pull request #230 from johndoe6345789/codex/refactor-acl-adapter-structure-and-imports
Refactor ACL adapter into strategies
2025-12-27 18:36:35 +00:00
af4a2246c0 Merge branch 'main' into codex/refactor-acl-adapter-structure-and-imports 2025-12-27 18:36:27 +00:00
fcd0e55125 refactor: modularize ACL adapter strategies 2025-12-27 18:36:16 +00:00
4b3d5f4043 Merge pull request #228 from johndoe6345789/codex/create-c++-build-assistant-files
Refactor cpp build assistant CLI into modular components
2025-12-27 18:35:56 +00:00
a47085dc67 Merge branch 'main' into codex/create-c++-build-assistant-files 2025-12-27 18:35:48 +00:00
756c48fc83 refactor: modularize cpp build assistant 2025-12-27 18:35:35 +00:00
ac45fb171c Merge pull request #226 from johndoe6345789/codex/add-moderatorpanel-components
Refactor moderator panel into modular components
2025-12-27 18:35:08 +00:00
7562c4184d Merge branch 'main' into codex/add-moderatorpanel-components 2025-12-27 18:34:58 +00:00
fcd7322861 refactor: modularize moderator panel components 2025-12-27 18:34:46 +00:00
7a64fa6b7e Merge pull request #224 from johndoe6345789/codex/add-dropdownconfigform-and-previewpane
Refactor dropdown config manager into modular components
2025-12-27 18:34:31 +00:00
9d3a39f6cc Merge branch 'main' into codex/add-dropdownconfigform-and-previewpane 2025-12-27 18:34:23 +00:00
d9a8e75fbf refactor: extract dropdown manager components 2025-12-27 18:34:09 +00:00
5cb1e9f63e Merge pull request #223 from johndoe6345789/codex/create-routestable,-routeeditor,-and-preview-components
Refactor page routes manager into modular components
2025-12-27 18:33:50 +00:00
53d365f07d Merge branch 'main' into codex/create-routestable,-routeeditor,-and-preview-components 2025-12-27 18:33:34 +00:00
a320a85353 refactor: split page routes manager components 2025-12-27 18:33:06 +00:00
01ae4c753f refactor: modularize tenant-aware blob storage 2025-12-27 18:32:40 +00:00
c04d8923b3 Merge pull request #219 from johndoe6345789/codex/create-types-directory-and-files
Add foundation type modules
2025-12-27 18:32:21 +00:00
658bd1e196 Merge branch 'main' into codex/create-types-directory-and-files 2025-12-27 18:32:12 +00:00
149ee90339 chore: add foundation type modules 2025-12-27 18:32:01 +00:00
eea561c225 Merge pull request #217 from johndoe6345789/codex/create-toolbar-and-schemasection-components
Extract JSON editor UI components
2025-12-27 18:31:45 +00:00
ead2acee40 Merge branch 'main' into codex/create-toolbar-and-schemasection-components 2025-12-27 18:31:40 +00:00
07efe7609a refactor: extract json editor ui components 2025-12-27 18:31:22 +00:00
daefe075b3 Merge pull request #216 from johndoe6345789/codex/add-paletteeditor-and-previewpane-components
Modularize theme editor components
2025-12-27 18:31:08 +00:00
b6b48eafb3 feat: modularize theme editor 2025-12-27 18:30:57 +00:00
cadaa8c5fe Merge pull request #211 from johndoe6345789/codex/refactor-error-as-todo-refactor.ts
Refactor error-as-todo runner into modular components
2025-12-27 18:24:06 +00:00
f4a5950c31 Merge branch 'main' into codex/refactor-error-as-todo-refactor.ts 2025-12-27 18:23:48 +00:00
d44385fc41 refactor: modularize error-as-todo runner 2025-12-27 18:23:08 +00:00
copilot-swe-agent[bot]
7ebedc2d56 Fix API error detection in fetch_all_open_issues
- Corrected error check to detect GitHub API error objects
- API errors return {message: "..."} not array with message
- All tests still passing

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 18:22:07 +00:00
25220fad97 Merge pull request #207 from johndoe6345789/codex/refactor-dbal-directory-into-lambda-modules
Refactor DBAL helpers into modular lambda subfolders
2025-12-27 18:21:44 +00:00
b9ac291e68 Merge branch 'main' into codex/refactor-dbal-directory-into-lambda-modules 2025-12-27 18:21:37 +00:00
880544e58d refactor: modularize dbal utilities 2025-12-27 18:21:26 +00:00
579103e916 Merge pull request #206 from johndoe6345789/codex/refactor-tools-scripts-into-lambda-files
refactor: modularize stub tooling
2025-12-27 18:21:03 +00:00
0abb48c7aa Merge branch 'main' into codex/refactor-tools-scripts-into-lambda-files 2025-12-27 18:20:54 +00:00
6447e7a203 refactor: modularize stub tooling 2025-12-27 18:20:45 +00:00
b7a721cf8d Merge pull request #203 from johndoe6345789/codex/refactor-dbaldemo-into-separate-files
Refactor DBAL demo tabs into separate components
2025-12-27 18:19:56 +00:00
c0015f45fc Merge branch 'main' into codex/refactor-dbaldemo-into-separate-files 2025-12-27 18:19:51 +00:00
219637c4c6 refactor: split dbal demo tabs 2025-12-27 18:19:38 +00:00
copilot-swe-agent[bot]
a9fc5c4773 Add dry-run mode and comprehensive documentation
- Added --dry-run flag to preview changes without closing issues
- Created comprehensive README-triage.md with usage examples
- Updated test suite to cover all new features
- Script is now production-ready with safety features

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 18:19:30 +00:00
copilot-swe-agent[bot]
1081dc8934 Implement smart duplicate detection for triage script
- Auto-detects ALL duplicate issue titles without requiring manual config
- Groups duplicates by title and processes each group
- Keeps most recent issue open, closes all duplicates
- Supports optional SEARCH_TITLE filter for specific titles
- All tests passing

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 18:16:21 +00:00
1a6d1f5f2d Merge pull request #200 from johndoe6345789/codex/refactor-command.tsx-into-multiple-files
Refactor command dialog into modular components
2025-12-27 18:12:44 +00:00
f5baf35666 Merge branch 'main' into codex/refactor-command.tsx-into-multiple-files 2025-12-27 18:12:39 +00:00
30f35ae07f refactor: split command dialog components 2025-12-27 18:12:29 +00:00
06def0d890 Merge pull request #198 from johndoe6345789/codex/refactor-runlist-into-lambda-components
Refactor run list view into reusable subcomponents
2025-12-27 18:12:13 +00:00
43f8325ad2 Merge branch 'main' into codex/refactor-runlist-into-lambda-components 2025-12-27 18:12:04 +00:00
f273de2cab refactor: extract run list components 2025-12-27 18:11:52 +00:00
76f4d131ad Merge pull request #197 from johndoe6345789/codex/refactor-tool-scripts-into-smaller-lambdas
Modularize error-as-todo refactoring tool
2025-12-27 18:11:35 +00:00
1beeeba7ff Merge branch 'main' into codex/refactor-tool-scripts-into-smaller-lambdas 2025-12-27 18:11:26 +00:00
d12b24a36b refactor: modularize error-as-todo runner 2025-12-27 18:11:17 +00:00
copilot-swe-agent[bot]
8d67fe8a49 Initial plan 2025-12-27 18:10:59 +00:00
3e0dbfd78d Merge pull request #183 from johndoe6345789/copilot/fix-issue-triage-script
Fix triage script to dynamically find duplicates via GitHub API
2025-12-27 18:09:14 +00:00
342a76bbad Merge branch 'main' into copilot/fix-issue-triage-script 2025-12-27 18:09:06 +00:00
copilot-swe-agent[bot]
21c735f126 Add before/after comparison document for triage script improvements
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 18:05:35 +00:00
copilot-swe-agent[bot]
99132e65ec Add comprehensive documentation for triage scripts
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 18:04:05 +00:00
copilot-swe-agent[bot]
6903901ec0 Fix triage script to dynamically find and close duplicates using GitHub API
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 18:03:02 +00:00
b20011a21e Merge pull request #191 from johndoe6345789/codex/refactor-database-admin-structure
Refactor default data seeding into modular builders
2025-12-27 18:01:27 +00:00
8fe11b60f1 Merge branch 'main' into codex/refactor-database-admin-structure 2025-12-27 18:01:19 +00:00
086db10f74 refactor: modularize default data seeding 2025-12-27 18:00:57 +00:00
b5e6501bbb Merge pull request #189 from johndoe6345789/codex/refactor-pagination-components-and-utilities
Refactor pagination components into dedicated files
2025-12-27 18:00:28 +00:00
566fa19031 Merge branch 'main' into codex/refactor-pagination-components-and-utilities 2025-12-27 18:00:19 +00:00
a91917fde5 refactor: split pagination components 2025-12-27 18:00:09 +00:00
b70d8649f5 Merge pull request #188 from johndoe6345789/codex/refactor-block-metadata-and-lua-helpers
Refactor Lua block metadata and serialization utilities
2025-12-27 17:59:46 +00:00
76b1ce9486 refactor: modularize lua block metadata 2025-12-27 17:59:35 +00:00
1fd72be97d Merge pull request #186 from johndoe6345789/codex/refactor-catalog-array-into-thematic-files
Refactor component catalog into thematic modules
2025-12-27 17:58:51 +00:00
2ad62be4e9 Merge branch 'main' into codex/refactor-catalog-array-into-thematic-files 2025-12-27 17:58:42 +00:00
ed704f93aa refactor: segment component catalog 2025-12-27 17:58:29 +00:00
6b033ea57c Merge pull request #184 from johndoe6345789/codex/ensure-codebase-is-mui-theme-driven
Sync theme mode attributes with MUI theme
2025-12-27 17:58:00 +00:00
046c81ec9c Merge branch 'main' into codex/ensure-codebase-is-mui-theme-driven 2025-12-27 17:57:56 +00:00
15d8fa4aff chore: sync mui theme mode with document 2025-12-27 17:57:45 +00:00
copilot-swe-agent[bot]
4f9f42f5c2 Initial plan 2025-12-27 17:55:57 +00:00
8b2f836c2c Merge pull request #180 from johndoe6345789/codex/organize-components-and-extract-logic
Refactor component hierarchy editor into modular hooks
2025-12-27 17:52:07 +00:00
64496b9549 Merge branch 'main' into codex/organize-components-and-extract-logic 2025-12-27 17:52:02 +00:00
782ac21120 refactor: modularize component hierarchy editor 2025-12-27 17:51:53 +00:00
24d50f931a Merge pull request #178 from johndoe6345789/codex/organize-import/export-helpers-into-subfolders
Organize database admin import/export helpers
2025-12-27 17:51:36 +00:00
b693eeaf24 Merge branch 'main' into codex/organize-import/export-helpers-into-subfolders 2025-12-27 17:51:28 +00:00
93092c3a21 refactor: organize database admin import/export helpers 2025-12-27 17:51:19 +00:00
c41140391f Merge pull request #177 from johndoe6345789/codex/split-packagemanager-into-multiple-files
Refactor package manager into modular components
2025-12-27 17:51:00 +00:00
df9193ffe6 refactor: split package manager components 2025-12-27 17:50:50 +00:00
4a12a6f2dd Merge pull request #140 from johndoe6345789/copilot/fix-pre-deployment-validation
Fix Prisma v7 configuration for pre-deployment validation
2025-12-27 17:45:36 +00:00
8ec13ee23d Merge branch 'main' into copilot/fix-pre-deployment-validation 2025-12-27 17:45:22 +00:00
e3a8a91051 Merge pull request #173 from johndoe6345789/codex/group-lua-snippets-by-category
Refactor Lua snippets into category modules
2025-12-27 17:40:52 +00:00
e57cf107fe Merge branch 'main' into codex/group-lua-snippets-by-category 2025-12-27 17:40:42 +00:00
5cbbf0b6b0 refactor: reorganize lua snippets 2025-12-27 17:40:30 +00:00
af286fac68 Merge pull request #170 from johndoe6345789/codex/refactor-navigation-component-structure
Refactor navigation component into modular files
2025-12-27 17:40:05 +00:00
7ce7f9a133 Merge branch 'main' into codex/refactor-navigation-component-structure 2025-12-27 17:39:55 +00:00
59efb7ea1a refactor: split navigation components 2025-12-27 17:39:45 +00:00
5dc236bd1c Merge pull request #169 from johndoe6345789/codex/refactor-workfloweditor-into-separate-modules
Refactor workflow editor into modular components
2025-12-27 17:39:27 +00:00
bb3cb93432 Merge branch 'main' into codex/refactor-workfloweditor-into-separate-modules 2025-12-27 17:39:21 +00:00
ed97047bdf refactor: modularize workflow editor 2025-12-27 17:38:58 +00:00
823c2d979f Merge pull request #165 from johndoe6345789/codex/refactor-errorlogstab-into-lambda-modules
Refactor error logs tab into modular components
2025-12-27 17:38:05 +00:00
4b4f370d53 Merge branch 'main' into codex/refactor-errorlogstab-into-lambda-modules 2025-12-27 17:37:55 +00:00
fb7c1ea5f3 refactor: modularize error logs tab 2025-12-27 17:37:10 +00:00
e4792fa1f2 Merge pull request #163 from johndoe6345789/codex/refactor-irc-webchat.ts-for-modular-exports
Refactor IRC webchat package definition
2025-12-27 17:36:49 +00:00
cda8db4a4e Merge branch 'main' into codex/refactor-irc-webchat.ts-for-modular-exports 2025-12-27 17:36:44 +00:00
9ce4031af9 refactor: modularize irc webchat package 2025-12-27 17:36:31 +00:00
b1557a65b1 Merge pull request #161 from johndoe6345789/codex/refactor-luaeditor-into-separate-modules
Refactor Lua editor into modular modules
2025-12-27 17:36:15 +00:00
7767f7fdf5 Merge branch 'main' into codex/refactor-luaeditor-into-separate-modules 2025-12-27 17:36:06 +00:00
61710f3f73 refactor: modularize lua editor concerns 2025-12-27 17:35:25 +00:00
fb0f1773aa Merge pull request #159 from johndoe6345789/codex/refactor-errorlogstab-into-smaller-components
Refactor error logs tab into modular components
2025-12-27 17:35:07 +00:00
f8721970f0 Merge branch 'main' into codex/refactor-errorlogstab-into-smaller-components 2025-12-27 17:35:00 +00:00
bd3779820a refactor: modularize error logs tab 2025-12-27 17:34:49 +00:00
fb72fb61e1 Merge pull request #158 from johndoe6345789/codex/split-large-dbal-files-into-modules
Refactor DBAL storage modules into modular subdirectories
2025-12-27 17:34:33 +00:00
18896aed7f Merge branch 'main' into codex/split-large-dbal-files-into-modules 2025-12-27 17:34:22 +00:00
b741328642 refactor: modularize dbal storage modules 2025-12-27 17:34:10 +00:00
c8a5da4971 Merge pull request #156 from johndoe6345789/codex/refactor-tool-scripts-into-single-purpose-lambdas
Refactor lambda refactoring tools into modular helpers
2025-12-27 17:33:46 +00:00
3dde857965 Merge branch 'main' into codex/refactor-tool-scripts-into-single-purpose-lambdas 2025-12-27 17:33:36 +00:00
f7f15bacb3 refactor: modularize lambda refactor tooling 2025-12-27 17:33:26 +00:00
e11b7c4bd1 Merge pull request #154 from johndoe6345789/codex/refactor-errorlogstab-into-modules
Refactor ErrorLogsTab into modular components
2025-12-27 17:32:56 +00:00
e77bc711cb Merge branch 'main' into codex/refactor-errorlogstab-into-modules 2025-12-27 17:32:51 +00:00
ade49ad0e9 refactor: modularize error logs tab 2025-12-27 17:32:41 +00:00
copilot-swe-agent[bot]
28e8ef1828 Remove deprecated @types/jszip package
- jszip provides its own type definitions
- @types/jszip is deprecated and causes conflicts
- Addresses code review feedback

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:32:16 +00:00
b17c9872a3 Merge pull request #151 from johndoe6345789/codex/refactor-packageimportexport-into-modules
Refactor PackageImportExport into modular handlers
2025-12-27 17:31:36 +00:00
9503348263 Merge branch 'main' into codex/refactor-packageimportexport-into-modules 2025-12-27 17:31:28 +00:00
79632c2913 refactor: modularize package import/export flow 2025-12-27 17:31:18 +00:00
fb7a8b8533 Merge pull request #150 from johndoe6345789/codex/decompose-luaeditor-into-modules
Refactor Lua editor into modular components
2025-12-27 17:30:58 +00:00
2778ea1daa Merge branch 'main' into codex/decompose-luaeditor-into-modules 2025-12-27 17:30:49 +00:00
5643fa5f8d refactor: modularize lua editor 2025-12-27 17:30:36 +00:00
3edcbc4416 Merge pull request #139 from johndoe6345789/copilot/update-dependencies-dashboard
Verify and document Renovate dependency updates status
2025-12-27 17:29:50 +00:00
bb19d5ed2e Merge branch 'main' into copilot/update-dependencies-dashboard 2025-12-27 17:29:43 +00:00
copilot-swe-agent[bot]
f89aaf92a4 Fix Prisma v7 configuration for pre-deployment validation
- Remove url from prisma/schema.prisma (not allowed in v7)
- Add proper prisma.config.ts with defineConfig from prisma/config
- Use process.env.DATABASE_URL with fallback for CI environments
- Generate Prisma Client successfully with v7 configuration

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:28:01 +00:00
copilot-swe-agent[bot]
86a0445cb3 Add issue comment template for Dependency Dashboard
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:26:51 +00:00
copilot-swe-agent[bot]
6bd06111af Add comprehensive Renovate Dashboard status report
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:25:56 +00:00
43b904a0ca Merge pull request #146 from johndoe6345789/codex/refactor-package-catalog-structure
Refactor package catalog into per-package definitions
2025-12-27 17:22:27 +00:00
copilot-swe-agent[bot]
5a3236a228 Verify Renovate Dashboard dependency status - all checked updates applied
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:22:24 +00:00
b835b50174 Merge branch 'main' into codex/refactor-package-catalog-structure 2025-12-27 17:22:17 +00:00
a9e34e7432 refactor: modularize package catalog definitions 2025-12-27 17:22:07 +00:00
14fba411f9 Merge pull request #144 from johndoe6345789/codex/refactor-luablockseditor-structure-and-files
Refactor Lua blocks editor into modular components
2025-12-27 17:21:47 +00:00
9cd6bcfd37 Merge branch 'main' into codex/refactor-luablockseditor-structure-and-files 2025-12-27 17:21:39 +00:00
acf0a7074e refactor: modularize lua blocks editor 2025-12-27 17:21:29 +00:00
5f48cedfa3 Merge pull request #143 from johndoe6345789/codex/refactor-github-components-and-hooks-structure
refactor: modularize github actions viewer
2025-12-27 17:21:07 +00:00
cacf567534 Merge branch 'main' into codex/refactor-github-components-and-hooks-structure 2025-12-27 17:21:05 +00:00
072506a637 refactor: modularize github actions viewer 2025-12-27 17:20:36 +00:00
8378449299 Merge pull request #141 from johndoe6345789/codex/refactor-tools/refactoring-structure
Refactor multi-language refactor tooling
2025-12-27 17:20:02 +00:00
37a53e1c65 Merge branch 'main' into codex/refactor-tools/refactoring-structure 2025-12-27 17:19:47 +00:00
4454e4d104 refactor: modularize multi-language refactor tooling 2025-12-27 17:19:34 +00:00
copilot-swe-agent[bot]
6f8dad83e8 Initial plan 2025-12-27 17:18:19 +00:00
copilot-swe-agent[bot]
79b12f9dc8 Initial plan 2025-12-27 17:14:58 +00:00
d370695498 Merge pull request #134 from johndoe6345789/copilot/update-dependencies-dashboard
Update dependencies per Renovate: framer-motion → motion v12.6.2, actions/checkout v4 → v6
2025-12-27 17:13:28 +00:00
2f37440ae4 Merge branch 'main' into copilot/update-dependencies-dashboard 2025-12-27 17:13:16 +00:00
84bc504f23 Merge pull request #131 from johndoe6345789/copilot/fix-pre-deployment-issue
Fix Prisma 7 monorepo configuration and add required SQLite adapter
2025-12-27 17:12:38 +00:00
4e1f627644 Merge branch 'main' into copilot/fix-pre-deployment-issue 2025-12-27 17:12:32 +00:00
copilot-swe-agent[bot]
ba063117b6 Fix motion package version to match Renovate requirement (12.6.2)
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:09:36 +00:00
copilot-swe-agent[bot]
2bf3e274f7 Update docs with correct Prisma 7 migration info
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:03:49 +00:00
copilot-swe-agent[bot]
a45a630a76 Update dependencies: replace framer-motion with motion, update actions/checkout to v6, remove deprecated @types/jszip
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:03:08 +00:00
copilot-swe-agent[bot]
3afbd7228b Add SQLite adapter for Prisma 7 runtime
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 17:01:37 +00:00
copilot-swe-agent[bot]
e4db8a0bdc Fix Prisma 7 monorepo setup - install at root level
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:56:34 +00:00
a0c47a8b81 Merge pull request #135 from johndoe6345789/codex/refactor-typescript-files-into-modular-structure
Refactor level 1 homepage builder into modular components
2025-12-27 16:54:56 +00:00
9a7e5bf8c8 refactor: modularize level1 homepage builder 2025-12-27 16:54:45 +00:00
copilot-swe-agent[bot]
05fac4ec16 Initial plan 2025-12-27 16:53:39 +00:00
46188f6fb9 Merge pull request #132 from johndoe6345789/codex/refactor-typescript-files-to-modular-structure
Refactor render and size analysis tools into modular lambda structure
2025-12-27 16:49:28 +00:00
94aa22828f refactor: modularize render analysis and size checks 2025-12-27 16:49:05 +00:00
copilot-swe-agent[bot]
cc7b5c78de Initial plan 2025-12-27 16:48:11 +00:00
9c2f42c298 Merge pull request #127 from johndoe6345789/copilot/rollback-production-deployment
Fix Prisma 7 monorepo config and improve deployment failure handling
2025-12-27 16:47:10 +00:00
89f0cc0855 Merge branch 'main' into copilot/rollback-production-deployment 2025-12-27 16:47:02 +00:00
60669ead49 Merge pull request #129 from johndoe6345789/codex/refactor-typescript-files-into-modules
Refactor complexity checker into modular lambda-per-file layout
2025-12-27 16:44:50 +00:00
copilot-swe-agent[bot]
23d01a0b11 Final code review improvements
- Use 'prisma/config' import (re-export from @prisma/config for better compatibility)
- Change workflow condition from always() to failure() for proper job triggering
- Fix migration rollback command syntax with proper schema path
- All changes verified and tested successfully

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:44:41 +00:00
3cab2e42e1 refactor: modularize complexity checker 2025-12-27 16:44:25 +00:00
copilot-swe-agent[bot]
bb25361c97 Address code review feedback
- Remove dotenv import attempt (not needed, DATABASE_URL set via env)
- Remove @ts-ignore directive
- Replace dangerous 'prisma migrate reset' with safer 'prisma migrate resolve' in rollback docs
- Verified Prisma generation still works without dotenv import

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:43:00 +00:00
copilot-swe-agent[bot]
f7dfa1d559 Update deployment workflow to prefer roll-forward over rollback
- Rename rollback-preparation job to deployment-failure-handler
- Add detection of pre-deployment vs production failures
- Provide clear roll-forward guidance emphasizing it as preferred approach
- Include when rollback is appropriate (only for critical production issues)
- Create more actionable issues with fix-forward checklists
- Add helpful troubleshooting for common pre-deployment failures

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:40:56 +00:00
copilot-swe-agent[bot]
def61b1da3 Fix Prisma client generation in CI/CD
- Fix import path from 'prisma/config' to '@prisma/config' in prisma.config.ts
- Add proper output path to generator in schema.prisma for monorepo structure
- Make dotenv import optional with try/catch for CI environments
- Prisma client now generates successfully in frontends/nextjs/node_modules/.prisma/client

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:39:50 +00:00
98eddc7c65 Merge pull request #128 from johndoe6345789/codex/refactor-typescript-files-into-modules
Refactor implementation completeness analyzer into modular files
2025-12-27 16:37:10 +00:00
5689e9223e refactor: modularize implementation completeness analyzer 2025-12-27 16:36:46 +00:00
copilot-swe-agent[bot]
6db635e3bc Initial plan 2025-12-27 16:30:45 +00:00
d6dd5890b2 Merge pull request #79 from johndoe6345789/copilot/ensure-molecules-import-atoms
Ensure molecules only import from atoms, not organisms
2025-12-27 16:27:33 +00:00
e4cfc2867d Merge branch 'main' into copilot/ensure-molecules-import-atoms 2025-12-27 16:26:51 +00:00
copilot-swe-agent[bot]
438628198f Mark molecule import audit as complete in TODO
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:25:02 +00:00
5753a0e244 Merge pull request #75 from johndoe6345789/copilot/convert-todo-items-to-issues
Enhance TODO-to-issues conversion with filtering, monitoring, and automation
2025-12-27 16:24:43 +00:00
b2f198dbc8 Merge branch 'main' into copilot/convert-todo-items-to-issues 2025-12-27 16:24:37 +00:00
96fe4a6ce3 Merge branch 'main' into copilot/ensure-molecules-import-atoms 2025-12-27 16:23:31 +00:00
51ed478f50 Merge pull request #77 from johndoe6345789/copilot/audit-organisms-composition
Complete organism composition audit per Atomic Design principles
2025-12-27 16:23:14 +00:00
90c090c1bd Merge branch 'main' into copilot/audit-organisms-composition 2025-12-27 16:23:04 +00:00
a17ec87fcc Merge pull request #125 from johndoe6345789/copilot/triage-issues-in-repo
Fix false-positive rollback issues from pre-deployment validation failures
2025-12-27 16:21:29 +00:00
13432be4f3 Merge branch 'main' into copilot/triage-issues-in-repo 2025-12-27 16:20:26 +00:00
copilot-swe-agent[bot]
1819dc9b17 Add comprehensive triage summary
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:16:09 +00:00
copilot-swe-agent[bot]
38fec0840e Add documentation for issue triage process
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:15:18 +00:00
copilot-swe-agent[bot]
c13c862b78 Fix gated-deployment workflow to prevent false-positive rollback issues
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:14:03 +00:00
f8f225d262 Merge pull request #109 from johndoe6345789/copilot/create-error-log-screen
Add error log screen to God and SuperGod tier panels with tenant isolation
2025-12-27 16:11:20 +00:00
21d5716471 Merge branch 'main' into copilot/create-error-log-screen 2025-12-27 16:11:08 +00:00
copilot-swe-agent[bot]
3c31dfd6f0 Initial plan 2025-12-27 16:09:47 +00:00
copilot-swe-agent[bot]
2458c021ab Merge main branch changes into error log feature branch
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:07:54 +00:00
45636747b1 Merge pull request #123 from johndoe6345789/codex/enhance-workflow-system-for-triaging
Route triage workflow through Copilot
2025-12-27 16:06:01 +00:00
9c55a9983d chore: route triage through copilot 2025-12-27 16:05:47 +00:00
copilot-swe-agent[bot]
428ccfc05c Add security features and tenancy-scoped error logs for God and SuperGod tiers
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 16:00:40 +00:00
ef7543beac Merge pull request #110 from johndoe6345789/copilot/refactor-typescript-modular-structure
Add automated lambda-per-file refactoring tools with multi-language support and error-as-TODO tracking
2025-12-27 15:55:14 +00:00
copilot-swe-agent[bot]
5b3ee91fff Changes before error encountered
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 14:59:49 +00:00
copilot-swe-agent[bot]
f5eaa18e16 Add tests for error logging functionality
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 14:56:18 +00:00
copilot-swe-agent[bot]
3db55d5870 Add ErrorLog model, database utilities, and ErrorLogsTab component
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 14:52:56 +00:00
copilot-swe-agent[bot]
3f700886c2 Initial plan 2025-12-27 14:45:34 +00:00
copilot-swe-agent[bot]
4eb334a784 Add comprehensive PR summary document
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 04:10:54 +00:00
copilot-swe-agent[bot]
e46c7a825d Add GitHub Action workflow and TODO monitoring script with comprehensive docs
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 04:09:05 +00:00
copilot-swe-agent[bot]
6b9629b304 Add audit README for quick reference and summary
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 04:06:53 +00:00
copilot-swe-agent[bot]
08513ab8a3 Add npm scripts and comprehensive documentation for TODO to issues conversion
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 04:05:57 +00:00
copilot-swe-agent[bot]
8ec09f9f0b Complete organism audit and create comprehensive documentation
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 04:05:40 +00:00
copilot-swe-agent[bot]
e79ea8564a Add comprehensive tests and filtering options to populate-kanban script
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
2025-12-27 04:03:22 +00:00
copilot-swe-agent[bot]
61f8f70c1e Initial plan 2025-12-27 04:00:50 +00:00
copilot-swe-agent[bot]
3cabfb983a Initial plan 2025-12-27 04:00:32 +00:00
1211d714a1 Merge branch 'main' into copilot/convert-todo-items-to-issues 2025-12-27 03:59:00 +00:00
copilot-swe-agent[bot]
0d1eab930d Initial plan 2025-12-27 03:56:23 +00:00
1681 changed files with 59206 additions and 34281 deletions

View File

@@ -0,0 +1,43 @@
{
"permissions": {
"allow": [
"Bash(git mv:*)",
"Bash(ls:*)",
"Bash(find:*)",
"Bash(npm run test:unit:*)",
"Bash(npm install:*)",
"Bash(xargs:*)",
"Bash(npm run db:generate:*)",
"Bash(npx prisma generate:*)",
"Bash(DATABASE_URL=\"file:./dev.db\" npx prisma generate:*)",
"Bash(git rm:*)",
"Bash(git log:*)",
"Bash(cat:*)",
"Bash(xargs git rm:*)",
"Bash(bun add:*)",
"Bash(bun install:*)",
"Bash(test -f:*)",
"Bash(bun run typecheck:*)",
"Bash(bun run test:unit:*)",
"Bash(echo:*)",
"Bash(npx prisma validate:*)",
"Bash(npm run typecheck:*)",
"Bash(npm run lint)",
"Bash(npm audit:*)",
"Bash(bun run lint)",
"Bash(git checkout:*)",
"Bash(bun audit:*)",
"Bash(git restore:*)",
"Bash(bunx playwright:*)",
"Bash(timeout 30 bun run build:*)",
"Bash(bun run lint:fix:*)",
"Bash(bun run format:*)",
"Bash(while read file)",
"Bash(do eslint:*)",
"Bash(done)",
"Bash(eslint:*)",
"Bash(bunx eslint:*)",
"Bash(bun test:*)"
]
}
}

View File

@@ -52,6 +52,19 @@ All workflows are designed to work seamlessly with **GitHub Copilot** to assist
### 🚦 Enterprise Gated Workflows (New)
#### Issue and PR Triage (`triage.yml`) 🆕
**Triggered on:** Issues (opened/edited/reopened) and Pull Requests (opened/reopened/synchronize/edited)
**Purpose:** Quickly categorize inbound work so reviewers know what to look at first.
- Auto-applies labels for type (bug/enhancement/docs/security/testing/performance) and area (frontend/backend/database/workflows/documentation)
- Sets a default priority and highlights beginner-friendly issues
- Flags missing information (repro steps, expected/actual results, versions) with a checklist comment
- For PRs, labels areas touched, estimates risk based on change size and critical paths, and prompts for test plans/screenshots/linked issues
- Mentions **@copilot** to sanity-check the triage with GitHub-native AI (no external Codex webhooks)
This workflow runs alongside the existing PR management jobs to keep triage lightweight while preserving the richer checks in the gated pipelines.
#### 1. Enterprise Gated CI/CD Pipeline (`gated-ci.yml`)
**Triggered on:** Push to main/master/develop branches, Pull requests

View File

@@ -23,7 +23,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Install build dependencies
run: |

View File

@@ -28,7 +28,7 @@ jobs:
has_sources: ${{ steps.check.outputs.has_sources }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Check if C++ sources exist
id: check
@@ -56,7 +56,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
@@ -128,7 +128,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
@@ -181,7 +181,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
@@ -232,7 +232,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
@@ -273,7 +273,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4

View File

@@ -24,7 +24,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -22,7 +22,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0
@@ -180,7 +180,7 @@ jobs:
contains(github.event.comment.body, '@copilot')
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Parse Copilot request
uses: actions/github-script@v7
@@ -272,7 +272,7 @@ jobs:
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -60,7 +60,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -104,7 +104,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -153,7 +153,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -207,7 +207,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -260,7 +260,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -301,7 +301,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -342,7 +342,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -454,7 +454,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -519,7 +519,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -574,7 +574,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -696,7 +696,7 @@ jobs:
build-success: ${{ steps.build-step.outcome }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -756,7 +756,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -45,7 +45,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -79,7 +79,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -111,7 +111,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -143,7 +143,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -206,7 +206,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -248,7 +248,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -293,7 +293,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -371,7 +371,7 @@ jobs:
build-success: ${{ steps.build-step.outcome }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
@@ -414,7 +414,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -48,7 +48,7 @@ jobs:
deployment-environment: ${{ steps.determine-env.outputs.environment }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0
@@ -147,7 +147,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -283,7 +283,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -400,7 +400,7 @@ jobs:
if: always() && (needs.deploy-staging.result == 'success' || needs.deploy-production.result == 'success')
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Determine deployed environment
id: env
@@ -452,66 +452,166 @@ jobs:
console.log('Note: Set up actual monitoring alerts in your observability platform');
# ============================================================================
# Rollback Procedure (Manual Trigger)
# Deployment Failure Handler - Prefer Roll Forward
# ============================================================================
rollback-preparation:
name: Prepare Rollback (if needed)
deployment-failure-handler:
name: Handle Deployment Failure
runs-on: ubuntu-latest
needs: [deploy-production]
if: failure()
needs: [pre-deployment-validation, deploy-production]
if: |
failure() &&
(needs.pre-deployment-validation.result == 'failure' || needs.deploy-production.result == 'failure')
steps:
- name: Rollback instructions
- name: Determine failure stage
id: failure-stage
run: |
echo "🔄 ROLLBACK PROCEDURE"
echo "===================="
echo ""
echo "Production deployment failed or encountered issues."
echo ""
echo "Immediate actions:"
echo " 1. Assess the severity of the failure"
echo " 2. Check application logs and error rates"
echo " 3. Determine if immediate rollback is needed"
echo ""
echo "To rollback:"
echo " 1. Re-run this workflow with previous stable commit"
echo " 2. Or use manual rollback procedure:"
echo " - Revert database migrations"
echo " - Deploy previous Docker image/build"
echo " - Restore from pre-deployment backup"
echo ""
echo "Emergency contacts:"
echo " - Check on-call rotation"
echo " - Notify engineering leads"
echo " - Update status page"
if [ "${{ needs.pre-deployment-validation.result }}" == "failure" ]; then
echo "stage=pre-deployment" >> $GITHUB_OUTPUT
echo "severity=low" >> $GITHUB_OUTPUT
else
echo "stage=production" >> $GITHUB_OUTPUT
echo "severity=high" >> $GITHUB_OUTPUT
fi
- name: Create rollback issue
- name: Display roll-forward guidance
run: |
echo "⚡ DEPLOYMENT FAILURE DETECTED"
echo "================================"
echo ""
echo "Failure Stage: ${{ steps.failure-stage.outputs.stage }}"
echo "Severity: ${{ steps.failure-stage.outputs.severity }}"
echo ""
echo "🎯 RECOMMENDED APPROACH: ROLL FORWARD"
echo "────────────────────────────────────────"
echo ""
echo "Rolling forward is preferred because it:"
echo " ✅ Fixes the root cause permanently"
echo " ✅ Maintains forward progress"
echo " ✅ Builds team capability"
echo " ✅ Prevents recurrence"
echo ""
echo "Steps to roll forward:"
echo " 1. Review failure logs (link below)"
echo " 2. Identify and fix the root cause"
echo " 3. Test the fix locally"
echo " 4. Push fix to trigger new deployment"
echo ""
echo "⚠️ ROLLBACK ONLY IF:"
echo "────────────────────────"
echo " • Production is actively broken"
echo " • Users are experiencing outages"
echo " • Critical security vulnerability"
echo " • Data integrity at risk"
echo ""
if [ "${{ steps.failure-stage.outputs.stage }}" == "pre-deployment" ]; then
echo "✅ GOOD NEWS: Failure occurred pre-deployment"
echo " → Production is NOT affected"
echo " → Safe to fix and retry"
echo " → No rollback needed"
else
echo "🚨 Production deployment failed"
echo " → Assess production impact immediately"
echo " → Check monitoring dashboards"
echo " → Verify user-facing functionality"
fi
- name: Create fix-forward issue
uses: actions/github-script@v7
with:
script: |
const stage = '${{ steps.failure-stage.outputs.stage }}';
const severity = '${{ steps.failure-stage.outputs.severity }}';
const isProd = stage === 'production';
const title = isProd
? '🚨 Production Deployment Failed - Fix Required'
: '⚠️ Pre-Deployment Validation Failed';
const body = `## Deployment Failure - ${stage === 'production' ? 'Production' : 'Pre-Deployment'}
**Time:** ${new Date().toISOString()}
**Commit:** ${context.sha.substring(0, 7)}
**Workflow Run:** [View Logs](${context.payload.repository.html_url}/actions/runs/${context.runId})
**Failure Stage:** ${stage}
**Severity:** ${severity}
${!isProd ? '✅ **Good News:** Production is NOT affected. The failure occurred during pre-deployment checks.\n' : '🚨 **Alert:** Production deployment failed. Assess impact immediately.\n'}
### 🎯 Recommended Action: Roll Forward (Fix and Re-deploy)
Rolling forward is the preferred approach because it:
- ✅ Fixes the root cause permanently
- ✅ Maintains development momentum
- ✅ Prevents the same issue from recurring
- ✅ Builds team problem-solving skills
### 📋 Fix-Forward Checklist
- [ ] **Investigate:** Review [workflow logs](${context.payload.repository.html_url}/actions/runs/${context.runId})
- [ ] **Diagnose:** Identify root cause of failure
- [ ] **Fix:** Implement fix in a new branch/commit
- [ ] **Test:** Verify fix locally (run relevant tests/builds)
- [ ] **Deploy:** Push fix to trigger new deployment
- [ ] **Verify:** Monitor deployment and confirm success
- [ ] **Document:** Update this issue with resolution details
${isProd ? `
### 🚨 Production Impact Assessment
**Before proceeding, verify:**
- [ ] Check monitoring dashboards for errors/alerts
- [ ] Verify critical user flows are working
- [ ] Check application logs for issues
- [ ] Assess if immediate rollback is needed
` : ''}
### ⚠️ When to Rollback Instead
**Only rollback if:**
- 🔴 Production is actively broken with user impact
- 🔴 Critical security vulnerability exposed
- 🔴 Data integrity at risk
- 🔴 Cannot fix forward within acceptable timeframe
${isProd ? `
### 🔄 Rollback Procedure (if absolutely necessary)
1. **Re-run workflow** with previous stable commit SHA
2. **OR use manual rollback:**
- Rollback specific migration: \`npx prisma migrate resolve --rolled-back MIGRATION_NAME --schema=prisma/schema.prisma\`
- Deploy previous Docker image/build
- Restore from pre-deployment backup if needed
- ⚠️ Avoid \`prisma migrate reset\` in production (causes data loss)
3. **Notify:** Update team and status page
4. **Document:** Create post-mortem issue
See [Rollback Procedure](docs/deployment/rollback.md) for details.
` : `
### 💡 Common Pre-Deployment Failures
- **Prisma Generate:** Check schema.prisma syntax and DATABASE_URL
- **Build Failure:** Review TypeScript errors or missing dependencies
- **Test Failure:** Fix failing tests or update test snapshots
- **Lint Errors:** Run \`npm run lint:fix\` locally
`}
### 📚 Resources
- [Workflow Run Logs](${context.payload.repository.html_url}/actions/runs/${context.runId})
- [Commit Details](${context.payload.repository.html_url}/commit/${context.sha})
- [Deployment Documentation](docs/deployment/)
`;
const labels = isProd
? ['deployment', 'production', 'incident', 'high-priority', 'fix-forward']
: ['deployment', 'pre-deployment', 'ci-failure', 'fix-forward'];
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: '🚨 Production Deployment Failed - Rollback Required',
body: `## Production Deployment Failure
**Time:** ${new Date().toISOString()}
**Commit:** ${context.sha.substring(0, 7)}
**Workflow:** ${context.runId}
### Actions Required
- [ ] Assess impact and severity
- [ ] Determine rollback necessity
- [ ] Execute rollback procedure if needed
- [ ] Investigate root cause
- [ ] Document incident
### Rollback Options
1. Re-deploy previous stable version
2. Revert problematic commits
3. Restore from backup
See [Rollback Procedure](docs/deployment/rollback.md) for details.
`,
labels: ['deployment', 'production', 'incident', 'high-priority']
title: title,
body: body,
labels: labels
});

View File

@@ -109,7 +109,7 @@ jobs:
(github.event.action == 'labeled' && github.event.label.name == 'auto-fix')
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Analyze issue and suggest fix
uses: actions/github-script@v7
@@ -147,7 +147,7 @@ jobs:
if: github.event.action == 'labeled' && github.event.label.name == 'create-pr'
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4

View File

@@ -24,7 +24,7 @@ jobs:
}}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Check PR status and merge
uses: actions/github-script@v7

View File

@@ -18,7 +18,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -18,7 +18,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -16,7 +16,7 @@ jobs:
if: github.event.action == 'opened' || github.event.action == 'synchronize'
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@@ -17,7 +17,7 @@ jobs:
(github.event.label.name == 'enhancement' || github.event.label.name == 'feature-request')
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Review against architecture principles
uses: actions/github-script@v7
@@ -100,7 +100,7 @@ jobs:
if: github.event.action == 'labeled' && github.event.label.name == 'enhancement'
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Check PRD for similar features
uses: actions/github-script@v7
@@ -150,7 +150,7 @@ jobs:
github.event.label.name == 'ready-to-implement'
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Generate implementation suggestion
uses: actions/github-script@v7

View File

@@ -23,7 +23,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0
@@ -98,7 +98,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -168,7 +168,7 @@ jobs:
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -237,7 +237,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0
@@ -307,7 +307,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -379,7 +379,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0
@@ -443,7 +443,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2
@@ -505,7 +505,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0
@@ -591,7 +591,7 @@ jobs:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2

View File

@@ -20,7 +20,7 @@ jobs:
working-directory: frontends/nextjs
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Bun
uses: oven-sh/setup-bun@v2

162
.github/workflows/todo-to-issues.yml vendored Normal file
View File

@@ -0,0 +1,162 @@
name: TODO to Issues Sync
# This workflow can be triggered manually to convert TODO items to GitHub issues
# or can be run on a schedule to keep issues in sync with TODO files
on:
workflow_dispatch:
inputs:
mode:
description: 'Execution mode'
required: true
type: choice
options:
- dry-run
- export-json
- create-issues
default: 'dry-run'
filter_priority:
description: 'Filter by priority (leave empty for all)'
required: false
type: choice
options:
- ''
- critical
- high
- medium
- low
filter_label:
description: 'Filter by label (e.g., security, frontend)'
required: false
type: string
exclude_checklist:
description: 'Exclude checklist items'
required: false
type: boolean
default: true
limit:
description: 'Limit number of issues (0 for no limit)'
required: false
type: number
default: 0
# Uncomment to run on a schedule (e.g., weekly)
# schedule:
# - cron: '0 0 * * 0' # Every Sunday at midnight
jobs:
convert-todos:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install GitHub CLI
run: |
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
&& sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& sudo apt update \
&& sudo apt install gh -y
- name: Authenticate GitHub CLI
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "$GH_TOKEN" | gh auth login --with-token
gh auth status
- name: Build command arguments
id: args
run: |
ARGS=""
# Add mode
if [ "${{ inputs.mode }}" = "dry-run" ]; then
ARGS="$ARGS --dry-run"
elif [ "${{ inputs.mode }}" = "export-json" ]; then
ARGS="$ARGS --output todos-export.json"
elif [ "${{ inputs.mode }}" = "create-issues" ]; then
ARGS="$ARGS --create"
fi
# Add filters
if [ -n "${{ inputs.filter_priority }}" ]; then
ARGS="$ARGS --filter-priority ${{ inputs.filter_priority }}"
fi
if [ -n "${{ inputs.filter_label }}" ]; then
ARGS="$ARGS --filter-label ${{ inputs.filter_label }}"
fi
if [ "${{ inputs.exclude_checklist }}" = "true" ]; then
ARGS="$ARGS --exclude-checklist"
fi
# Add limit if specified
if [ "${{ inputs.limit }}" != "0" ]; then
ARGS="$ARGS --limit ${{ inputs.limit }}"
fi
echo "args=$ARGS" >> $GITHUB_OUTPUT
echo "Command arguments: $ARGS"
- name: Run populate-kanban script
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python3 tools/project-management/populate-kanban.py ${{ steps.args.outputs.args }}
- name: Upload JSON export (if applicable)
if: inputs.mode == 'export-json'
uses: actions/upload-artifact@v4
with:
name: todos-export
path: todos-export.json
retention-days: 30
- name: Create summary
if: always()
run: |
echo "## TODO to Issues Conversion" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Mode:** ${{ inputs.mode }}" >> $GITHUB_STEP_SUMMARY
if [ -n "${{ inputs.filter_priority }}" ]; then
echo "**Priority Filter:** ${{ inputs.filter_priority }}" >> $GITHUB_STEP_SUMMARY
fi
if [ -n "${{ inputs.filter_label }}" ]; then
echo "**Label Filter:** ${{ inputs.filter_label }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ inputs.exclude_checklist }}" = "true" ]; then
echo "**Checklist Items:** Excluded" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ inputs.limit }}" != "0" ]; then
echo "**Limit:** ${{ inputs.limit }} items" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ inputs.mode }}" = "export-json" ]; then
echo "✅ JSON export created successfully" >> $GITHUB_STEP_SUMMARY
echo "Download the artifact from the workflow run page" >> $GITHUB_STEP_SUMMARY
elif [ "${{ inputs.mode }}" = "create-issues" ]; then
echo "✅ GitHub issues created successfully" >> $GITHUB_STEP_SUMMARY
echo "View issues: https://github.com/${{ github.repository }}/issues" >> $GITHUB_STEP_SUMMARY
else
echo " Dry run completed - no issues created" >> $GITHUB_STEP_SUMMARY
fi

198
.github/workflows/triage.yml vendored Normal file
View File

@@ -0,0 +1,198 @@
name: Issue and PR Triage
on:
issues:
types: [opened, edited, reopened]
pull_request:
types: [opened, reopened, synchronize, edited]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
triage-issue:
name: Triage Issues
if: github.event_name == 'issues'
runs-on: ubuntu-latest
steps:
- name: Categorize and label issue
uses: actions/github-script@v7
with:
script: |
const issue = context.payload.issue;
const title = (issue.title || '').toLowerCase();
const body = (issue.body || '').toLowerCase();
const text = `${title}\n${body}`;
const labels = new Set();
const missing = [];
const typeMatchers = [
{ regex: /bug|error|crash|broken|fail/, label: 'bug' },
{ regex: /feature|enhancement|add|new|implement/, label: 'enhancement' },
{ regex: /document|readme|docs|guide/, label: 'documentation' },
{ regex: /test|testing|spec|e2e/, label: 'testing' },
{ regex: /security|vulnerability|exploit|xss|sql/, label: 'security' },
{ regex: /performance|slow|optimize|speed/, label: 'performance' },
];
for (const match of typeMatchers) {
if (text.match(match.regex)) {
labels.add(match.label);
}
}
const areaMatchers = [
{ regex: /frontend|react|next|ui|component|browser/, label: 'area: frontend' },
{ regex: /api|backend|service|server/, label: 'area: backend' },
{ regex: /database|prisma|schema|sql/, label: 'area: database' },
{ regex: /workflow|github actions|ci|pipeline/, label: 'area: workflows' },
{ regex: /docs|readme|guide/, label: 'area: documentation' },
];
for (const match of areaMatchers) {
if (text.match(match.regex)) {
labels.add(match.label);
}
}
if (text.match(/critical|urgent|asap|blocker/)) {
labels.add('priority: high');
} else if (text.match(/minor|low|nice to have/)) {
labels.add('priority: low');
} else {
labels.add('priority: medium');
}
if (text.match(/beginner|easy|simple|starter/) || labels.size <= 2) {
labels.add('good first issue');
}
const reproductionHints = ['steps to reproduce', 'expected', 'actual'];
for (const hint of reproductionHints) {
if (!body.includes(hint)) {
missing.push(hint);
}
}
const supportInfo = body.includes('version') || body.match(/v\d+\.\d+/);
if (!supportInfo) {
missing.push('version information');
}
if (labels.size > 0) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issue.number,
labels: Array.from(labels),
}).catch(e => console.log('Some labels may not exist:', e.message));
}
const checklist = missing.map(item => `- [ ] Add ${item}`).join('\n') || '- [x] Description includes key details.';
const summary = Array.from(labels).map(l => `- ${l}`).join('\n') || '- No labels inferred yet.';
const comment = [
'👋 Thanks for reporting an issue! I ran a quick triage:',
'',
'**Proposed labels:**',
summary,
'',
'**Missing details:**',
checklist,
'',
'Adding the missing details will help reviewers respond faster. If the proposed labels look wrong, feel free to update them.',
'',
'@copilot Please review this triage and refine labels or request any additional context needed—no Codex webhooks involved.'
].join('\n');
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issue.number,
body: comment,
});
triage-pr:
name: Triage Pull Requests
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Analyze PR files and label
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
const { data: files } = await github.rest.pulls.listFiles({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: pr.number,
});
const labels = new Set();
const fileFlags = {
workflows: files.some(f => f.filename.includes('.github/workflows')),
docs: files.some(f => f.filename.match(/\.(md|mdx)$/) || f.filename.startsWith('docs/')),
frontend: files.some(f => f.filename.includes('frontends/nextjs')),
db: files.some(f => f.filename.includes('prisma/') || f.filename.includes('dbal/')),
tests: files.some(f => f.filename.match(/(test|spec)\.[jt]sx?/)),
};
if (fileFlags.workflows) labels.add('area: workflows');
if (fileFlags.docs) labels.add('area: documentation');
if (fileFlags.frontend) labels.add('area: frontend');
if (fileFlags.db) labels.add('area: database');
if (fileFlags.tests) labels.add('tests');
const totalChanges = files.reduce((sum, f) => sum + f.additions + f.deletions, 0);
const highRiskPaths = files.filter(f => f.filename.includes('.github/workflows') || f.filename.includes('prisma/'));
let riskLabel = 'risk: low';
if (highRiskPaths.length > 0 || totalChanges >= 400) {
riskLabel = 'risk: high';
} else if (totalChanges >= 150) {
riskLabel = 'risk: medium';
}
labels.add(riskLabel);
const missing = [];
const body = (pr.body || '').toLowerCase();
if (!body.includes('test')) missing.push('Test plan');
if (fileFlags.frontend && !body.includes('screenshot')) missing.push('Screenshots for UI changes');
if (!body.match(/#\d+|https:\/\/github\.com/)) missing.push('Linked issue reference');
if (labels.size > 0) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
labels: Array.from(labels),
}).catch(e => console.log('Some labels may not exist:', e.message));
}
const labelSummary = Array.from(labels).map(l => `- ${l}`).join('\n');
const missingList = missing.length ? missing.map(item => `- [ ] ${item}`).join('\n') : '- [x] Description includes required context.';
const comment = [
'🤖 **Automated PR triage**',
'',
'**Proposed labels:**',
labelSummary,
'',
'**Description check:**',
missingList,
'',
'If any labels look incorrect, feel free to adjust them. Closing the missing items will help reviewers move faster.',
'',
'@copilot Please double-check this triage (no Codex webhook) and add any extra labels or questions for the author.'
].join('\n');
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
body: comment,
});

6
.gitignore vendored
View File

@@ -88,8 +88,14 @@ lint-output.txt
stub-patterns.json
complexity-report.json
# TODO management
todos-baseline.json
todos-export.json
todos*.json
# Project-specific
**/agent-eval-report*
vite.config.ts.bak*
.cache/
dist-old/
.vscode/claudesync.json

View File

@@ -57,5 +57,7 @@
"https://docs.github.com/*": true,
"https://www.npmjs.com/*": true,
"https://registry.npmjs.org/*": true
}
},
"claudeCode.allowDangerouslySkipPermissions": true,
"claudeCode.initialPermissionMode": "bypassPermissions"
}

View File

@@ -80,6 +80,29 @@ MetaBuilder is a **data-driven, multi-tenant platform** with these core features
## Refactor Plan
### 🚀 Auto Code Extractor 3000™ - Automated File Splitting
**NEW: One-command solution to split large files (>150 LOC) into modular structure!**
We have 62 files exceeding 150 lines. The Auto Code Extractor 3000™ automatically extracts functions into individual files following the lambda-per-file pattern.
#### Quick Commands
```bash
# Preview what will be extracted
npm run extract:preview
# Extract 5 files
npm run extract:quick
# Extract all high-priority files (automated)
npm run extract:auto
```
**📖 [Quick Start Guide](./tools/refactoring/QUICK_START.md)** | **📚 [Full Documentation](./tools/refactoring/AUTO_CODE_EXTRACTOR_3000.md)**
---
### Next.js to Lua Conversion TODO
#### Table of Contents

View File

@@ -1,258 +1,3 @@
/**
* @file acl-adapter.ts
* @description ACL adapter that wraps a base adapter with access control
*/
import type { DBALAdapter, AdapterCapabilities } from './adapter'
import type { ListOptions, ListResult } from '../core/foundation/types'
import type { User, ACLRule } from './acl/types'
import { resolvePermissionOperation } from './acl/resolve-permission-operation'
import { checkPermission } from './acl/check-permission'
import { checkRowLevelAccess } from './acl/check-row-level-access'
import { logAudit } from './acl/audit-logger'
import { defaultACLRules } from './acl/default-rules'
export class ACLAdapter implements DBALAdapter {
private baseAdapter: DBALAdapter
private user: User
private rules: ACLRule[]
private auditLog: boolean
constructor(
baseAdapter: DBALAdapter,
user: User,
options?: {
rules?: ACLRule[]
auditLog?: boolean
}
) {
this.baseAdapter = baseAdapter
this.user = user
this.rules = options?.rules || defaultACLRules
this.auditLog = options?.auditLog ?? true
}
private log(entity: string, operation: string, success: boolean, message?: string): void {
if (this.auditLog) {
logAudit(entity, operation, success, this.user, message)
}
}
async create(entity: string, data: Record<string, unknown>): Promise<unknown> {
const operation = 'create'
checkPermission(entity, operation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.create(entity, data)
this.log(entity, operation, true)
return result
} catch (error) {
this.log(entity, operation, false, (error as Error).message)
throw error
}
}
async read(entity: string, id: string): Promise<unknown | null> {
const operation = 'read'
checkPermission(entity, operation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.read(entity, id)
if (result) {
checkRowLevelAccess(entity, operation, result as Record<string, unknown>, this.user, this.rules, this.log.bind(this))
}
this.log(entity, operation, true)
return result
} catch (error) {
this.log(entity, operation, false, (error as Error).message)
throw error
}
}
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
const operation = 'update'
checkPermission(entity, operation, this.user, this.rules, this.log.bind(this))
const existing = await this.baseAdapter.read(entity, id)
if (existing) {
checkRowLevelAccess(entity, operation, existing as Record<string, unknown>, this.user, this.rules, this.log.bind(this))
}
try {
const result = await this.baseAdapter.update(entity, id, data)
this.log(entity, operation, true)
return result
} catch (error) {
this.log(entity, operation, false, (error as Error).message)
throw error
}
}
async delete(entity: string, id: string): Promise<boolean> {
const operation = 'delete'
checkPermission(entity, operation, this.user, this.rules, this.log.bind(this))
const existing = await this.baseAdapter.read(entity, id)
if (existing) {
checkRowLevelAccess(entity, operation, existing as Record<string, unknown>, this.user, this.rules, this.log.bind(this))
}
try {
const result = await this.baseAdapter.delete(entity, id)
this.log(entity, operation, true)
return result
} catch (error) {
this.log(entity, operation, false, (error as Error).message)
throw error
}
}
async list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
const operation = 'list'
checkPermission(entity, operation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.list(entity, options)
this.log(entity, operation, true)
return result
} catch (error) {
this.log(entity, operation, false, (error as Error).message)
throw error
}
}
async findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
const resolvedOperation = resolvePermissionOperation('findFirst')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.findFirst(entity, filter)
if (result) {
checkRowLevelAccess(entity, resolvedOperation, result as Record<string, unknown>, this.user, this.rules, this.log.bind(this))
}
this.log(entity, 'findFirst', true)
return result
} catch (error) {
this.log(entity, 'findFirst', false, (error as Error).message)
throw error
}
}
async findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
const resolvedOperation = resolvePermissionOperation('findByField')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.findByField(entity, field, value)
if (result) {
checkRowLevelAccess(entity, resolvedOperation, result as Record<string, unknown>, this.user, this.rules, this.log.bind(this))
}
this.log(entity, 'findByField', true)
return result
} catch (error) {
this.log(entity, 'findByField', false, (error as Error).message)
throw error
}
}
async upsert(
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>
): Promise<unknown> {
checkPermission(entity, 'create', this.user, this.rules, this.log.bind(this))
checkPermission(entity, 'update', this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.upsert(entity, filter, createData, updateData)
this.log(entity, 'upsert', true)
return result
} catch (error) {
this.log(entity, 'upsert', false, (error as Error).message)
throw error
}
}
async updateByField(entity: string, field: string, value: unknown, data: Record<string, unknown>): Promise<unknown> {
const resolvedOperation = resolvePermissionOperation('updateByField')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.updateByField(entity, field, value, data)
this.log(entity, 'updateByField', true)
return result
} catch (error) {
this.log(entity, 'updateByField', false, (error as Error).message)
throw error
}
}
async deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
const resolvedOperation = resolvePermissionOperation('deleteByField')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.deleteByField(entity, field, value)
this.log(entity, 'deleteByField', true)
return result
} catch (error) {
this.log(entity, 'deleteByField', false, (error as Error).message)
throw error
}
}
async createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
const resolvedOperation = resolvePermissionOperation('createMany')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.createMany(entity, data)
this.log(entity, 'createMany', true)
return result
} catch (error) {
this.log(entity, 'createMany', false, (error as Error).message)
throw error
}
}
async updateMany(entity: string, filter: Record<string, unknown>, data: Record<string, unknown>): Promise<number> {
const resolvedOperation = resolvePermissionOperation('updateMany')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.updateMany(entity, filter, data)
this.log(entity, 'updateMany', true)
return result
} catch (error) {
this.log(entity, 'updateMany', false, (error as Error).message)
throw error
}
}
async deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
const resolvedOperation = resolvePermissionOperation('deleteMany')
checkPermission(entity, resolvedOperation, this.user, this.rules, this.log.bind(this))
try {
const result = await this.baseAdapter.deleteMany(entity, filter)
this.log(entity, 'deleteMany', true)
return result
} catch (error) {
this.log(entity, 'deleteMany', false, (error as Error).message)
throw error
}
}
async getCapabilities(): Promise<AdapterCapabilities> {
return this.baseAdapter.getCapabilities()
}
async close(): Promise<void> {
await this.baseAdapter.close()
}
}
// Re-export types for convenience
export type { User, ACLRule } from './acl/types'
export { ACLAdapter } from './acl-adapter'
export type { ACLAdapterOptions, ACLContext, ACLRule, User } from './acl-adapter/types'
export { defaultACLRules } from './acl/default-rules'

View File

@@ -0,0 +1,86 @@
import type { AdapterCapabilities, DBALAdapter } from '../adapter'
import type { ListOptions, ListResult } from '../../core/foundation/types'
import { createContext } from './context'
import { createReadStrategy } from './read-strategy'
import { createWriteStrategy } from './write-strategy'
import type { ACLAdapterOptions, ACLContext, ACLRule, User } from './types'
export class ACLAdapter implements DBALAdapter {
private readonly context: ACLContext
private readonly readStrategy: ReturnType<typeof createReadStrategy>
private readonly writeStrategy: ReturnType<typeof createWriteStrategy>
constructor(baseAdapter: DBALAdapter, user: User, options?: ACLAdapterOptions) {
this.context = createContext(baseAdapter, user, options)
this.readStrategy = createReadStrategy(this.context)
this.writeStrategy = createWriteStrategy(this.context)
}
async create(entity: string, data: Record<string, unknown>): Promise<unknown> {
return this.writeStrategy.create(entity, data)
}
async read(entity: string, id: string): Promise<unknown | null> {
return this.readStrategy.read(entity, id)
}
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
return this.writeStrategy.update(entity, id, data)
}
async delete(entity: string, id: string): Promise<boolean> {
return this.writeStrategy.delete(entity, id)
}
async list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
return this.readStrategy.list(entity, options)
}
async findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
return this.readStrategy.findFirst(entity, filter)
}
async findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
return this.readStrategy.findByField(entity, field, value)
}
async upsert(
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>,
): Promise<unknown> {
return this.writeStrategy.upsert(entity, filter, createData, updateData)
}
async updateByField(entity: string, field: string, value: unknown, data: Record<string, unknown>): Promise<unknown> {
return this.writeStrategy.updateByField(entity, field, value, data)
}
async deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
return this.writeStrategy.deleteByField(entity, field, value)
}
async createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
return this.writeStrategy.createMany(entity, data)
}
async updateMany(entity: string, filter: Record<string, unknown>, data: Record<string, unknown>): Promise<number> {
return this.writeStrategy.updateMany(entity, filter, data)
}
async deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
return this.writeStrategy.deleteMany(entity, filter)
}
async getCapabilities(): Promise<AdapterCapabilities> {
return this.context.baseAdapter.getCapabilities()
}
async close(): Promise<void> {
await this.context.baseAdapter.close()
}
}
export type { ACLAdapterOptions, ACLContext, ACLRule, User }
export { defaultACLRules } from '../acl/default-rules'

View File

@@ -0,0 +1,67 @@
import type { ACLContext } from './context'
import { enforceRowAccess, resolveOperation, withAudit } from './guards'
export const findFirst = (context: ACLContext) => async (entity: string, filter?: Record<string, unknown>) => {
const operation = resolveOperation('findFirst')
return withAudit(context, entity, operation, async () => {
const result = await context.baseAdapter.findFirst(entity, filter)
if (result) {
enforceRowAccess(context, entity, operation, result as Record<string, unknown>)
}
return result
})
}
export const findByField = (context: ACLContext) => async (entity: string, field: string, value: unknown) => {
const operation = resolveOperation('findByField')
return withAudit(context, entity, operation, async () => {
const result = await context.baseAdapter.findByField(entity, field, value)
if (result) {
enforceRowAccess(context, entity, operation, result as Record<string, unknown>)
}
return result
})
}
export const upsert = (context: ACLContext) => async (
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>,
) => {
return withAudit(context, entity, 'upsert', () => context.baseAdapter.upsert(entity, filter, createData, updateData))
}
export const updateByField = (context: ACLContext) => async (
entity: string,
field: string,
value: unknown,
data: Record<string, unknown>,
) => {
const operation = resolveOperation('updateByField')
return withAudit(context, entity, operation, () => context.baseAdapter.updateByField(entity, field, value, data))
}
export const deleteByField = (context: ACLContext) => async (entity: string, field: string, value: unknown) => {
const operation = resolveOperation('deleteByField')
return withAudit(context, entity, operation, () => context.baseAdapter.deleteByField(entity, field, value))
}
export const createMany = (context: ACLContext) => async (entity: string, data: Record<string, unknown>[]) => {
const operation = resolveOperation('createMany')
return withAudit(context, entity, operation, () => context.baseAdapter.createMany(entity, data))
}
export const updateMany = (context: ACLContext) => async (
entity: string,
filter: Record<string, unknown>,
data: Record<string, unknown>,
) => {
const operation = resolveOperation('updateMany')
return withAudit(context, entity, operation, () => context.baseAdapter.updateMany(entity, filter, data))
}
export const deleteMany = (context: ACLContext) => async (entity: string, filter?: Record<string, unknown>) => {
const operation = resolveOperation('deleteMany')
return withAudit(context, entity, operation, () => context.baseAdapter.deleteMany(entity, filter))
}

View File

@@ -0,0 +1,26 @@
import type { DBALAdapter } from '../adapter'
import type { ACLAdapterOptions, ACLContext, ACLRule, User } from './types'
import { logAudit } from '../acl/audit-logger'
import { defaultACLRules } from '../acl/default-rules'
export const createContext = (
baseAdapter: DBALAdapter,
user: User,
options?: ACLAdapterOptions,
): ACLContext => {
const auditLog = options?.auditLog ?? true
const rules = options?.rules || defaultACLRules
const logger = (entity: string, operation: string, success: boolean, message?: string) => {
if (auditLog) {
logAudit(entity, operation, success, user, message)
}
}
return {
baseAdapter,
user,
rules,
auditLog,
logger,
}
}

View File

@@ -0,0 +1,41 @@
import type { ListOptions, ListResult } from '../../core/foundation/types'
import type { ACLContext } from './context'
import { enforceRowAccess, withAudit } from './guards'
export const createEntity = (context: ACLContext) => async (entity: string, data: Record<string, unknown>) => {
return withAudit(context, entity, 'create', () => context.baseAdapter.create(entity, data))
}
export const readEntity = (context: ACLContext) => async (entity: string, id: string) => {
return withAudit(context, entity, 'read', async () => {
const result = await context.baseAdapter.read(entity, id)
if (result) {
enforceRowAccess(context, entity, 'read', result as Record<string, unknown>)
}
return result
})
}
export const updateEntity = (context: ACLContext) => async (entity: string, id: string, data: Record<string, unknown>) => {
return withAudit(context, entity, 'update', async () => {
const existing = await context.baseAdapter.read(entity, id)
if (existing) {
enforceRowAccess(context, entity, 'update', existing as Record<string, unknown>)
}
return context.baseAdapter.update(entity, id, data)
})
}
export const deleteEntity = (context: ACLContext) => async (entity: string, id: string) => {
return withAudit(context, entity, 'delete', async () => {
const existing = await context.baseAdapter.read(entity, id)
if (existing) {
enforceRowAccess(context, entity, 'delete', existing as Record<string, unknown>)
}
return context.baseAdapter.delete(entity, id)
})
}
export const listEntities = (context: ACLContext) => async (entity: string, options?: ListOptions): Promise<ListResult<unknown>> => {
return withAudit(context, entity, 'list', () => context.baseAdapter.list(entity, options))
}

View File

@@ -0,0 +1,37 @@
import { checkPermission } from '../acl/check-permission'
import { checkRowLevelAccess } from '../acl/check-row-level-access'
import { resolvePermissionOperation } from '../acl/resolve-permission-operation'
import type { ACLContext } from './types'
export const enforcePermission = (context: ACLContext, entity: string, operation: string) => {
checkPermission(entity, operation, context.user, context.rules, context.logger)
}
export const enforceRowAccess = (
context: ACLContext,
entity: string,
operation: string,
record: Record<string, unknown>,
) => {
checkRowLevelAccess(entity, operation, record, context.user, context.rules, context.logger)
}
export const withAudit = async <T>(
context: ACLContext,
entity: string,
operation: string,
action: () => Promise<T>,
) => {
enforcePermission(context, entity, operation)
try {
const result = await action()
context.logger(entity, operation, true)
return result
} catch (error) {
context.logger(entity, operation, false, (error as Error).message)
throw error
}
}
export const resolveOperation = resolvePermissionOperation

View File

@@ -0,0 +1,3 @@
export { ACLAdapter } from './acl-adapter'
export type { ACLAdapterOptions, ACLContext, ACLRule, User } from './types'
export { defaultACLRules } from '../acl/default-rules'

View File

@@ -0,0 +1,48 @@
import type { ListOptions, ListResult } from '../../core/foundation/types'
import { enforceRowAccess, resolveOperation, withAudit } from './guards'
import type { ACLContext } from './types'
export const createReadStrategy = (context: ACLContext) => {
const read = async (entity: string, id: string): Promise<unknown | null> => {
return withAudit(context, entity, 'read', async () => {
const result = await context.baseAdapter.read(entity, id)
if (result) {
enforceRowAccess(context, entity, 'read', result as Record<string, unknown>)
}
return result
})
}
const list = async (entity: string, options?: ListOptions): Promise<ListResult<unknown>> => {
return withAudit(context, entity, 'list', () => context.baseAdapter.list(entity, options))
}
const findFirst = async (entity: string, filter?: Record<string, unknown>): Promise<unknown | null> => {
const operation = resolveOperation('findFirst')
return withAudit(context, entity, operation, async () => {
const result = await context.baseAdapter.findFirst(entity, filter)
if (result) {
enforceRowAccess(context, entity, operation, result as Record<string, unknown>)
}
return result
})
}
const findByField = async (entity: string, field: string, value: unknown): Promise<unknown | null> => {
const operation = resolveOperation('findByField')
return withAudit(context, entity, operation, async () => {
const result = await context.baseAdapter.findByField(entity, field, value)
if (result) {
enforceRowAccess(context, entity, operation, result as Record<string, unknown>)
}
return result
})
}
return {
read,
list,
findFirst,
findByField,
}
}

View File

@@ -0,0 +1,27 @@
import type { DBALAdapter } from '../adapter'
export interface User {
id: string
username: string
role: 'user' | 'admin' | 'god' | 'supergod'
}
export interface ACLRule {
entity: string
roles: string[]
operations: string[]
rowLevelFilter?: (user: User, data: Record<string, unknown>) => boolean
}
export interface ACLAdapterOptions {
rules?: ACLRule[]
auditLog?: boolean
}
export interface ACLContext {
baseAdapter: DBALAdapter
user: User
rules: ACLRule[]
auditLog: boolean
logger: (entity: string, operation: string, success: boolean, message?: string) => void
}

View File

@@ -0,0 +1,83 @@
import { enforceRowAccess, resolveOperation, withAudit } from './guards'
import type { ACLContext } from './types'
export const createWriteStrategy = (context: ACLContext) => {
const create = async (entity: string, data: Record<string, unknown>): Promise<unknown> => {
return withAudit(context, entity, 'create', () => context.baseAdapter.create(entity, data))
}
const update = async (entity: string, id: string, data: Record<string, unknown>): Promise<unknown> => {
return withAudit(context, entity, 'update', async () => {
const existing = await context.baseAdapter.read(entity, id)
if (existing) {
enforceRowAccess(context, entity, 'update', existing as Record<string, unknown>)
}
return context.baseAdapter.update(entity, id, data)
})
}
const remove = async (entity: string, id: string): Promise<boolean> => {
return withAudit(context, entity, 'delete', async () => {
const existing = await context.baseAdapter.read(entity, id)
if (existing) {
enforceRowAccess(context, entity, 'delete', existing as Record<string, unknown>)
}
return context.baseAdapter.delete(entity, id)
})
}
const upsert = async (
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>,
): Promise<unknown> => {
return withAudit(context, entity, 'upsert', () => context.baseAdapter.upsert(entity, filter, createData, updateData))
}
const updateByField = async (
entity: string,
field: string,
value: unknown,
data: Record<string, unknown>,
): Promise<unknown> => {
const operation = resolveOperation('updateByField')
return withAudit(context, entity, operation, () => context.baseAdapter.updateByField(entity, field, value, data))
}
const deleteByField = async (entity: string, field: string, value: unknown): Promise<boolean> => {
const operation = resolveOperation('deleteByField')
return withAudit(context, entity, operation, () => context.baseAdapter.deleteByField(entity, field, value))
}
const createMany = async (entity: string, data: Record<string, unknown>[]): Promise<number> => {
const operation = resolveOperation('createMany')
return withAudit(context, entity, operation, () => context.baseAdapter.createMany(entity, data))
}
const updateMany = async (
entity: string,
filter: Record<string, unknown>,
data: Record<string, unknown>,
): Promise<number> => {
const operation = resolveOperation('updateMany')
return withAudit(context, entity, operation, () => context.baseAdapter.updateMany(entity, filter, data))
}
const deleteMany = async (entity: string, filter?: Record<string, unknown>): Promise<number> => {
const operation = resolveOperation('deleteMany')
return withAudit(context, entity, operation, () => context.baseAdapter.deleteMany(entity, filter))
}
return {
create,
update,
delete: remove,
upsert,
updateByField,
deleteByField,
createMany,
updateMany,
deleteMany,
}
}

View File

@@ -3,7 +3,7 @@
* @description Audit logging for ACL operations
*/
import type { User } from './types'
import type { User } from '../acl-adapter/types'
/**
* Log audit entry for ACL operation

View File

@@ -4,7 +4,7 @@
*/
import { DBALError } from '../../core/foundation/errors'
import type { User, ACLRule } from './types'
import type { ACLRule, User } from '../acl-adapter/types'
/**
* Check if user has permission to perform operation on entity

View File

@@ -4,7 +4,7 @@
*/
import { DBALError } from '../../core/foundation/errors'
import type { User, ACLRule } from './types'
import type { ACLRule, User } from '../acl-adapter/types'
/**
* Check row-level access for specific data

View File

@@ -3,7 +3,7 @@
* @description Default ACL rules for entities
*/
import type { ACLRule } from './types'
import type { ACLRule } from '../acl-adapter/types'
export const defaultACLRules: ACLRule[] = [
{

View File

@@ -1,350 +0,0 @@
import { PrismaClient } from '@prisma/client'
import type { DBALAdapter, AdapterCapabilities } from './adapter'
import type { ListOptions, ListResult } from '../core/foundation/types'
import { DBALError } from '../core/foundation/errors'
type PrismaAdapterDialect = 'postgres' | 'mysql' | 'sqlite' | 'generic'
export interface PrismaAdapterOptions {
queryTimeout?: number
dialect?: PrismaAdapterDialect
}
export class PrismaAdapter implements DBALAdapter {
private prisma: PrismaClient
private queryTimeout: number
private dialect: PrismaAdapterDialect
constructor(databaseUrl?: string, options?: PrismaAdapterOptions) {
const inferredDialect = options?.dialect ?? PrismaAdapter.inferDialectFromUrl(databaseUrl)
this.dialect = inferredDialect ?? 'generic'
this.prisma = new PrismaClient({
datasources: databaseUrl ? { db: { url: databaseUrl } } : undefined,
})
this.queryTimeout = options?.queryTimeout ?? 30000
}
async create(entity: string, data: Record<string, unknown>): Promise<unknown> {
try {
const model = this.getModel(entity)
const result = await this.withTimeout(
model.create({ data: data as never })
)
return result
} catch (error) {
throw this.handleError(error, 'create', entity)
}
}
async read(entity: string, id: string): Promise<unknown | null> {
try {
const model = this.getModel(entity)
const result = await this.withTimeout(
model.findUnique({ where: { id } as never })
)
return result
} catch (error) {
throw this.handleError(error, 'read', entity)
}
}
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
try {
const model = this.getModel(entity)
const result = await this.withTimeout(
model.update({
where: { id } as never,
data: data as never
})
)
return result
} catch (error) {
throw this.handleError(error, 'update', entity)
}
}
async delete(entity: string, id: string): Promise<boolean> {
try {
const model = this.getModel(entity)
await this.withTimeout(
model.delete({ where: { id } as never })
)
return true
} catch (error) {
if (this.isNotFoundError(error)) {
return false
}
throw this.handleError(error, 'delete', entity)
}
}
async list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
try {
const model = this.getModel(entity)
const page = options?.page || 1
const limit = options?.limit || 50
const skip = (page - 1) * limit
const where = options?.filter ? this.buildWhereClause(options.filter) : undefined
const orderBy = options?.sort ? this.buildOrderBy(options.sort) : undefined
const [data, total] = await Promise.all([
this.withTimeout(
model.findMany({
where: where as never,
orderBy: orderBy as never,
skip,
take: limit,
})
),
this.withTimeout(
model.count({ where: where as never })
)
]) as [unknown[], number]
return {
data: data as unknown[],
total,
page,
limit,
hasMore: skip + limit < total,
}
} catch (error) {
throw this.handleError(error, 'list', entity)
}
}
async findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
try {
const model = this.getModel(entity)
const where = filter ? this.buildWhereClause(filter) : undefined
const result = await this.withTimeout(
model.findFirst({ where: where as never })
)
return result
} catch (error) {
throw this.handleError(error, 'findFirst', entity)
}
}
async findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
try {
const model = this.getModel(entity)
const result = await this.withTimeout(
model.findUnique({ where: { [field]: value } as never })
)
return result
} catch (error) {
throw this.handleError(error, 'findByField', entity)
}
}
async upsert(
entity: string,
uniqueField: string,
uniqueValue: unknown,
createData: Record<string, unknown>,
updateData: Record<string, unknown>
): Promise<unknown> {
try {
const model = this.getModel(entity)
const result = await this.withTimeout(
model.upsert({
where: { [uniqueField]: uniqueValue } as never,
create: createData as never,
update: updateData as never,
})
)
return result
} catch (error) {
throw this.handleError(error, 'upsert', entity)
}
}
async updateByField(entity: string, field: string, value: unknown, data: Record<string, unknown>): Promise<unknown> {
try {
const model = this.getModel(entity)
const result = await this.withTimeout(
model.update({
where: { [field]: value } as never,
data: data as never,
})
)
return result
} catch (error) {
throw this.handleError(error, 'updateByField', entity)
}
}
async deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
try {
const model = this.getModel(entity)
await this.withTimeout(
model.delete({ where: { [field]: value } as never })
)
return true
} catch (error) {
if (this.isNotFoundError(error)) {
return false
}
throw this.handleError(error, 'deleteByField', entity)
}
}
async deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
try {
const model = this.getModel(entity)
const where = filter ? this.buildWhereClause(filter) : undefined
const result: { count: number } = await this.withTimeout(
model.deleteMany({ where: where as never })
)
return result.count
} catch (error) {
throw this.handleError(error, 'deleteMany', entity)
}
}
async updateMany(entity: string, filter: Record<string, unknown>, data: Record<string, unknown>): Promise<number> {
try {
const model = this.getModel(entity)
const where = this.buildWhereClause(filter)
const result: { count: number } = await this.withTimeout(
model.updateMany({ where: where as never, data: data as never })
)
return result.count
} catch (error) {
throw this.handleError(error, 'updateMany', entity)
}
}
async createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
try {
const model = this.getModel(entity)
const result: { count: number } = await this.withTimeout(
model.createMany({ data: data as never })
)
return result.count
} catch (error) {
throw this.handleError(error, 'createMany', entity)
}
}
async getCapabilities(): Promise<AdapterCapabilities> {
return this.buildCapabilities()
}
async close(): Promise<void> {
await this.prisma.$disconnect()
}
private getModel(entity: string): any {
const modelName = entity.charAt(0).toLowerCase() + entity.slice(1)
const model = (this.prisma as any)[modelName]
if (!model) {
throw DBALError.notFound(`Entity ${entity} not found`)
}
return model
}
private buildWhereClause(filter: Record<string, unknown>): Record<string, unknown> {
const where: Record<string, unknown> = {}
for (const [key, value] of Object.entries(filter)) {
if (value === null || value === undefined) {
where[key] = null
} else if (typeof value === 'object' && !Array.isArray(value)) {
where[key] = value
} else {
where[key] = value
}
}
return where
}
private buildOrderBy(sort: Record<string, 'asc' | 'desc'>): Record<string, string> {
return sort
}
private async withTimeout<T>(promise: Promise<T>): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(DBALError.timeout()), this.queryTimeout)
)
])
}
private isNotFoundError(error: unknown): boolean {
return error instanceof Error && error.message.includes('not found')
}
private handleError(error: unknown, operation: string, entity: string): DBALError {
if (error instanceof DBALError) {
return error
}
if (error instanceof Error) {
if (error.message.includes('Unique constraint')) {
return DBALError.conflict(`${entity} already exists`)
}
if (error.message.includes('Foreign key constraint')) {
return DBALError.validationError('Related resource not found')
}
if (error.message.includes('not found')) {
return DBALError.notFound(`${entity} not found`)
}
return DBALError.internal(`Database error during ${operation}: ${error.message}`)
}
return DBALError.internal(`Unknown error during ${operation}`)
}
private buildCapabilities(): AdapterCapabilities {
const fullTextSearch = this.dialect === 'postgres' || this.dialect === 'mysql'
return {
transactions: true,
joins: true,
fullTextSearch,
ttl: false,
jsonQueries: true,
aggregations: true,
relations: true,
}
}
private static inferDialectFromUrl(url?: string): PrismaAdapterDialect | undefined {
if (!url) {
return undefined
}
if (url.startsWith('postgresql://') || url.startsWith('postgres://')) {
return 'postgres'
}
if (url.startsWith('mysql://')) {
return 'mysql'
}
if (url.startsWith('file:') || url.startsWith('sqlite://')) {
return 'sqlite'
}
return undefined
}
}
export class PostgresAdapter extends PrismaAdapter {
constructor(databaseUrl?: string, options?: PrismaAdapterOptions) {
super(databaseUrl, { ...options, dialect: 'postgres' })
}
}
export class MySQLAdapter extends PrismaAdapter {
constructor(databaseUrl?: string, options?: PrismaAdapterOptions) {
super(databaseUrl, { ...options, dialect: 'mysql' })
}
}

View File

@@ -0,0 +1,38 @@
import { PrismaClient } from '@prisma/client'
import { PrismaAdapterDialect, type PrismaAdapterOptions, type PrismaContext } from './types'
export function createPrismaContext(
databaseUrl?: string,
options?: PrismaAdapterOptions
): PrismaContext {
const inferredDialect = options?.dialect ?? inferDialectFromUrl(databaseUrl)
const prisma = new PrismaClient({
datasources: databaseUrl ? { db: { url: databaseUrl } } : undefined,
})
return {
prisma,
queryTimeout: options?.queryTimeout ?? 30000,
dialect: inferredDialect ?? 'generic'
}
}
export function inferDialectFromUrl(url?: string): PrismaAdapterDialect | undefined {
if (!url) {
return undefined
}
if (url.startsWith('postgresql://') || url.startsWith('postgres://')) {
return 'postgres'
}
if (url.startsWith('mysql://')) {
return 'mysql'
}
if (url.startsWith('file:') || url.startsWith('sqlite://')) {
return 'sqlite'
}
return undefined
}

View File

@@ -0,0 +1,121 @@
import type { DBALAdapter } from '../adapter'
import type { ListOptions, ListResult } from '../../core/foundation/types'
import { createPrismaContext } from './context'
import type { PrismaAdapterOptions, PrismaAdapterDialect, PrismaContext } from './types'
import {
createRecord,
deleteRecord,
readRecord,
updateRecord
} from './operations/crud'
import {
createMany,
deleteByField,
deleteMany,
updateByField,
updateMany,
upsertRecord
} from './operations/bulk'
import {
findByField,
findFirstRecord,
listRecords
} from './operations/query'
import { buildCapabilities } from './operations/capabilities'
export class PrismaAdapter implements DBALAdapter {
protected context: PrismaContext
constructor(databaseUrl?: string, options?: PrismaAdapterOptions) {
this.context = createPrismaContext(databaseUrl, options)
}
create(entity: string, data: Record<string, unknown>): Promise<unknown> {
return createRecord(this.context, entity, data)
}
read(entity: string, id: string): Promise<unknown | null> {
return readRecord(this.context, entity, id)
}
update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
return updateRecord(this.context, entity, id, data)
}
delete(entity: string, id: string): Promise<boolean> {
return deleteRecord(this.context, entity, id)
}
list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
return listRecords(this.context, entity, options)
}
findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
return findFirstRecord(this.context, entity, filter)
}
findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
return findByField(this.context, entity, field, value)
}
upsert(
entity: string,
uniqueField: string,
uniqueValue: unknown,
createData: Record<string, unknown>,
updateData: Record<string, unknown>
): Promise<unknown> {
return upsertRecord(this.context, entity, uniqueField, uniqueValue, createData, updateData)
}
updateByField(
entity: string,
field: string,
value: unknown,
data: Record<string, unknown>
): Promise<unknown> {
return updateByField(this.context, entity, field, value, data)
}
deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
return deleteByField(this.context, entity, field, value)
}
deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
return deleteMany(this.context, entity, filter)
}
updateMany(
entity: string,
filter: Record<string, unknown>,
data: Record<string, unknown>
): Promise<number> {
return updateMany(this.context, entity, filter, data)
}
createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
return createMany(this.context, entity, data)
}
getCapabilities() {
return Promise.resolve(buildCapabilities(this.context))
}
async close(): Promise<void> {
await this.context.prisma.$disconnect()
}
}
export class PostgresAdapter extends PrismaAdapter {
constructor(databaseUrl?: string, options?: PrismaAdapterOptions) {
super(databaseUrl, { ...options, dialect: 'postgres' })
}
}
export class MySQLAdapter extends PrismaAdapter {
constructor(databaseUrl?: string, options?: PrismaAdapterOptions) {
super(databaseUrl, { ...options, dialect: 'mysql' })
}
}
export { PrismaAdapterOptions, PrismaAdapterDialect }

View File

@@ -0,0 +1,121 @@
import type { PrismaContext } from '../types'
import { handlePrismaError, buildWhereClause, getModel, withTimeout, isNotFoundError } from './utils'
export async function upsertRecord(
context: PrismaContext,
entity: string,
uniqueField: string,
uniqueValue: unknown,
createData: Record<string, unknown>,
updateData: Record<string, unknown>
): Promise<unknown> {
try {
const model = getModel(context, entity)
return await withTimeout(
context,
model.upsert({
where: { [uniqueField]: uniqueValue } as never,
create: createData as never,
update: updateData as never,
})
)
} catch (error) {
throw handlePrismaError(error, 'upsert', entity)
}
}
export async function updateByField(
context: PrismaContext,
entity: string,
field: string,
value: unknown,
data: Record<string, unknown>
): Promise<unknown> {
try {
const model = getModel(context, entity)
return await withTimeout(
context,
model.update({
where: { [field]: value } as never,
data: data as never,
})
)
} catch (error) {
throw handlePrismaError(error, 'updateByField', entity)
}
}
export async function deleteByField(
context: PrismaContext,
entity: string,
field: string,
value: unknown
): Promise<boolean> {
try {
const model = getModel(context, entity)
await withTimeout(
context,
model.delete({ where: { [field]: value } as never })
)
return true
} catch (error) {
if (isNotFoundError(error)) {
return false
}
throw handlePrismaError(error, 'deleteByField', entity)
}
}
export async function deleteMany(
context: PrismaContext,
entity: string,
filter?: Record<string, unknown>
): Promise<number> {
try {
const model = getModel(context, entity)
const where = filter ? buildWhereClause(filter) : undefined
const result: { count: number } = await withTimeout(
context,
model.deleteMany({ where: where as never })
)
return result.count
} catch (error) {
throw handlePrismaError(error, 'deleteMany', entity)
}
}
export async function updateMany(
context: PrismaContext,
entity: string,
filter: Record<string, unknown>,
data: Record<string, unknown>
): Promise<number> {
try {
const model = getModel(context, entity)
const where = buildWhereClause(filter)
const result: { count: number } = await withTimeout(
context,
model.updateMany({ where: where as never, data: data as never })
)
return result.count
} catch (error) {
throw handlePrismaError(error, 'updateMany', entity)
}
}
export async function createMany(
context: PrismaContext,
entity: string,
data: Record<string, unknown>[]
): Promise<number> {
try {
const model = getModel(context, entity)
const result: { count: number } = await withTimeout(
context,
model.createMany({ data: data as never })
)
return result.count
} catch (error) {
throw handlePrismaError(error, 'createMany', entity)
}
}

View File

@@ -0,0 +1,16 @@
import type { AdapterCapabilities } from '../adapter'
import type { PrismaContext } from '../types'
export function buildCapabilities(context: PrismaContext): AdapterCapabilities {
const fullTextSearch = context.dialect === 'postgres' || context.dialect === 'mysql'
return {
transactions: true,
joins: true,
fullTextSearch,
ttl: false,
jsonQueries: true,
aggregations: true,
relations: true,
}
}

View File

@@ -0,0 +1,71 @@
import type { PrismaContext } from '../types'
import { handlePrismaError, getModel, withTimeout, isNotFoundError } from './utils'
export async function createRecord(
context: PrismaContext,
entity: string,
data: Record<string, unknown>
): Promise<unknown> {
try {
const model = getModel(context, entity)
return await withTimeout(context, model.create({ data: data as never }))
} catch (error) {
throw handlePrismaError(error, 'create', entity)
}
}
export async function readRecord(
context: PrismaContext,
entity: string,
id: string
): Promise<unknown | null> {
try {
const model = getModel(context, entity)
return await withTimeout(
context,
model.findUnique({ where: { id } as never })
)
} catch (error) {
throw handlePrismaError(error, 'read', entity)
}
}
export async function updateRecord(
context: PrismaContext,
entity: string,
id: string,
data: Record<string, unknown>
): Promise<unknown> {
try {
const model = getModel(context, entity)
return await withTimeout(
context,
model.update({
where: { id } as never,
data: data as never
})
)
} catch (error) {
throw handlePrismaError(error, 'update', entity)
}
}
export async function deleteRecord(
context: PrismaContext,
entity: string,
id: string
): Promise<boolean> {
try {
const model = getModel(context, entity)
await withTimeout(
context,
model.delete({ where: { id } as never })
)
return true
} catch (error) {
if (isNotFoundError(error)) {
return false
}
throw handlePrismaError(error, 'delete', entity)
}
}

View File

@@ -0,0 +1,79 @@
import type { ListOptions, ListResult } from '../../core/foundation/types'
import type { PrismaContext } from '../types'
import { handlePrismaError, buildWhereClause, buildOrderBy, getModel, withTimeout } from './utils'
export async function listRecords(
context: PrismaContext,
entity: string,
options?: ListOptions
): Promise<ListResult<unknown>> {
try {
const model = getModel(context, entity)
const page = options?.page || 1
const limit = options?.limit || 50
const skip = (page - 1) * limit
const where = options?.filter ? buildWhereClause(options.filter) : undefined
const orderBy = options?.sort ? buildOrderBy(options.sort) : undefined
const [data, total] = await Promise.all([
withTimeout(
context,
model.findMany({
where: where as never,
orderBy: orderBy as never,
skip,
take: limit,
})
),
withTimeout(
context,
model.count({ where: where as never })
)
]) as [unknown[], number]
return {
data: data as unknown[],
total,
page,
limit,
hasMore: skip + limit < total,
}
} catch (error) {
throw handlePrismaError(error, 'list', entity)
}
}
export async function findFirstRecord(
context: PrismaContext,
entity: string,
filter?: Record<string, unknown>
): Promise<unknown | null> {
try {
const model = getModel(context, entity)
const where = filter ? buildWhereClause(filter) : undefined
return await withTimeout(
context,
model.findFirst({ where: where as never })
)
} catch (error) {
throw handlePrismaError(error, 'findFirst', entity)
}
}
export async function findByField(
context: PrismaContext,
entity: string,
field: string,
value: unknown
): Promise<unknown | null> {
try {
const model = getModel(context, entity)
return await withTimeout(
context,
model.findUnique({ where: { [field]: value } as never })
)
} catch (error) {
throw handlePrismaError(error, 'findByField', entity)
}
}

View File

@@ -0,0 +1,71 @@
import type { PrismaContext } from '../types'
import { DBALError } from '../../core/foundation/errors'
export function getModel(context: PrismaContext, entity: string): any {
const modelName = entity.charAt(0).toLowerCase() + entity.slice(1)
const model = (context.prisma as any)[modelName]
if (!model) {
throw DBALError.notFound(`Entity ${entity} not found`)
}
return model
}
export function buildWhereClause(filter: Record<string, unknown>): Record<string, unknown> {
const where: Record<string, unknown> = {}
for (const [key, value] of Object.entries(filter)) {
if (value === null || value === undefined) {
where[key] = null
} else if (typeof value === 'object' && !Array.isArray(value)) {
where[key] = value
} else {
where[key] = value
}
}
return where
}
export function buildOrderBy(sort: Record<string, 'asc' | 'desc'>): Record<string, string> {
return sort
}
export async function withTimeout<T>(context: PrismaContext, promise: Promise<T>): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(DBALError.timeout()), context.queryTimeout)
)
])
}
export function isNotFoundError(error: unknown): boolean {
return error instanceof Error && error.message.includes('not found')
}
export function handlePrismaError(
error: unknown,
operation: string,
entity: string
): DBALError {
if (error instanceof DBALError) {
return error
}
if (error instanceof Error) {
if (error.message.includes('Unique constraint')) {
return DBALError.conflict(`${entity} already exists`)
}
if (error.message.includes('Foreign key constraint')) {
return DBALError.validationError('Related resource not found')
}
if (error.message.includes('not found')) {
return DBALError.notFound(`${entity} not found`)
}
return DBALError.internal(`Database error during ${operation}: ${error.message}`)
}
return DBALError.internal(`Unknown error during ${operation}`)
}

View File

@@ -0,0 +1,38 @@
import type { AdapterCapabilities } from '../adapter'
export type PrismaAdapterDialect = 'postgres' | 'mysql' | 'sqlite' | 'generic'
export interface PrismaAdapterOptions {
queryTimeout?: number
dialect?: PrismaAdapterDialect
}
export interface PrismaContext {
prisma: any
queryTimeout: number
dialect: PrismaAdapterDialect
}
export interface PrismaOperations {
create(entity: string, data: Record<string, unknown>): Promise<unknown>
read(entity: string, id: string): Promise<unknown | null>
update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown>
delete(entity: string, id: string): Promise<boolean>
list(entity: string, options?: any): Promise<any>
findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null>
findByField(entity: string, field: string, value: unknown): Promise<unknown | null>
upsert(
entity: string,
uniqueField: string,
uniqueValue: unknown,
createData: Record<string, unknown>,
updateData: Record<string, unknown>
): Promise<unknown>
updateByField(entity: string, field: string, value: unknown, data: Record<string, unknown>): Promise<unknown>
deleteByField(entity: string, field: string, value: unknown): Promise<boolean>
deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number>
createMany(entity: string, data: Record<string, unknown>[]): Promise<number>
updateMany(entity: string, filter: Record<string, unknown>, data: Record<string, unknown>): Promise<number>
getCapabilities(): Promise<AdapterCapabilities>
close(): Promise<void>
}

View File

@@ -1,13 +1,13 @@
export * from './blob-storage'
export { MemoryStorage } from './providers/memory-storage'
export { S3Storage } from './providers/s3-storage'
export { FilesystemStorage } from './providers/filesystem-storage'
export { S3Storage } from './providers/s3'
export { FilesystemStorage } from './providers/filesystem'
export { TenantAwareBlobStorage } from './providers/tenant-aware-storage'
import type { BlobStorage, BlobStorageConfig } from './blob-storage'
import { MemoryStorage } from './providers/memory-storage'
import { S3Storage } from './providers/s3-storage'
import { FilesystemStorage } from './providers/filesystem-storage'
import { S3Storage } from './providers/s3'
import { FilesystemStorage } from './providers/filesystem'
/**
* Factory function to create blob storage instances

View File

@@ -1,410 +0,0 @@
import type {
BlobStorage,
BlobMetadata,
BlobListResult,
UploadOptions,
DownloadOptions,
BlobListOptions,
BlobStorageConfig,
} from '../blob-storage'
import { DBALError } from '../../core/foundation/errors'
import { promises as fs } from 'fs'
import { createReadStream, createWriteStream } from 'fs'
import path from 'path'
import { createHash } from 'crypto'
import { pipeline } from 'stream/promises'
/**
* Filesystem blob storage implementation
* Compatible with local filesystem, Samba/CIFS, NFS
*/
export class FilesystemStorage implements BlobStorage {
private basePath: string
constructor(config: BlobStorageConfig) {
if (!config.filesystem) {
throw new Error('Filesystem configuration required')
}
this.basePath = config.filesystem.basePath
if (config.filesystem.createIfNotExists) {
this.ensureBasePath()
}
}
private async ensureBasePath() {
try {
await fs.mkdir(this.basePath, { recursive: true })
} catch (error: any) {
throw new Error(`Failed to create base path: ${error.message}`)
}
}
private getFullPath(key: string): string {
// Prevent directory traversal attacks
const normalized = path.normalize(key).replace(/^(\.\.(\/|\\|$))+/, '')
return path.join(this.basePath, normalized)
}
private getMetadataPath(key: string): string {
return this.getFullPath(key) + '.meta.json'
}
async upload(
key: string,
data: Buffer | Uint8Array,
options: UploadOptions = {}
): Promise<BlobMetadata> {
const filePath = this.getFullPath(key)
const metaPath = this.getMetadataPath(key)
try {
// Create directory if needed
await fs.mkdir(path.dirname(filePath), { recursive: true })
// Check if file exists and overwrite is false
if (!options.overwrite) {
try {
await fs.access(filePath)
throw DBALError.conflict(`Blob already exists: ${key}`)
} catch (error: any) {
if (error.code !== 'ENOENT') {
throw error
}
}
}
// Write file
await fs.writeFile(filePath, data)
// Generate metadata
const buffer = Buffer.from(data)
const etag = this.generateEtag(buffer)
const metadata: BlobMetadata = {
key,
size: buffer.length,
contentType: options.contentType || 'application/octet-stream',
etag,
lastModified: new Date(),
customMetadata: options.metadata,
}
// Write metadata
await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))
return metadata
} catch (error: any) {
if (error instanceof DBALError) {
throw error
}
throw DBALError.internal(`Filesystem upload failed: ${error.message}`)
}
}
async uploadStream(
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions = {}
): Promise<BlobMetadata> {
const filePath = this.getFullPath(key)
const metaPath = this.getMetadataPath(key)
try {
// Create directory if needed
await fs.mkdir(path.dirname(filePath), { recursive: true })
// Check if file exists and overwrite is false
if (!options.overwrite) {
try {
await fs.access(filePath)
throw DBALError.conflict(`Blob already exists: ${key}`)
} catch (error: any) {
if (error.code !== 'ENOENT') {
throw error
}
}
}
// Write stream to file
const writeStream = createWriteStream(filePath)
if ('getReader' in stream) {
// Web ReadableStream
const reader = stream.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) break
writeStream.write(Buffer.from(value))
}
writeStream.end()
} else {
// Node.js ReadableStream
await pipeline(stream, writeStream)
}
// Get file stats for actual size
const stats = await fs.stat(filePath)
// Generate etag from file
const buffer = await fs.readFile(filePath)
const etag = this.generateEtag(buffer)
const metadata: BlobMetadata = {
key,
size: stats.size,
contentType: options.contentType || 'application/octet-stream',
etag,
lastModified: stats.mtime,
customMetadata: options.metadata,
}
// Write metadata
await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))
return metadata
} catch (error: any) {
if (error instanceof DBALError) {
throw error
}
throw DBALError.internal(`Filesystem stream upload failed: ${error.message}`)
}
}
async download(
key: string,
options: DownloadOptions = {}
): Promise<Buffer> {
const filePath = this.getFullPath(key)
try {
let data = await fs.readFile(filePath)
if (options.offset !== undefined || options.length !== undefined) {
const offset = options.offset || 0
const length = options.length || (data.length - offset)
if (offset >= data.length) {
throw DBALError.validationError('Offset exceeds blob size')
}
data = data.subarray(offset, offset + length)
}
return data
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
if (error instanceof DBALError) {
throw error
}
throw DBALError.internal(`Filesystem download failed: ${error.message}`)
}
}
async downloadStream(
key: string,
options: DownloadOptions = {}
): Promise<NodeJS.ReadableStream> {
const filePath = this.getFullPath(key)
try {
await fs.access(filePath)
const streamOptions: any = {}
if (options.offset !== undefined) {
streamOptions.start = options.offset
}
if (options.length !== undefined) {
streamOptions.end = (options.offset || 0) + options.length - 1
}
return createReadStream(filePath, streamOptions)
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`Filesystem download stream failed: ${error.message}`)
}
}
async delete(key: string): Promise<boolean> {
const filePath = this.getFullPath(key)
const metaPath = this.getMetadataPath(key)
try {
await fs.unlink(filePath)
// Try to delete metadata (ignore if doesn't exist)
try {
await fs.unlink(metaPath)
} catch (error: any) {
// Ignore if metadata doesn't exist
}
return true
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`Filesystem delete failed: ${error.message}`)
}
}
async exists(key: string): Promise<boolean> {
const filePath = this.getFullPath(key)
try {
await fs.access(filePath)
return true
} catch {
return false
}
}
async getMetadata(key: string): Promise<BlobMetadata> {
const filePath = this.getFullPath(key)
const metaPath = this.getMetadataPath(key)
try {
// Check if file exists
const stats = await fs.stat(filePath)
// Try to read metadata file
try {
const metaContent = await fs.readFile(metaPath, 'utf-8')
return JSON.parse(metaContent)
} catch {
// Generate metadata from file if meta file doesn't exist
const data = await fs.readFile(filePath)
return {
key,
size: stats.size,
contentType: 'application/octet-stream',
etag: this.generateEtag(data),
lastModified: stats.mtime,
}
}
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`Filesystem get metadata failed: ${error.message}`)
}
}
async list(options: BlobListOptions = {}): Promise<BlobListResult> {
const prefix = options.prefix || ''
const maxKeys = options.maxKeys || 1000
try {
const items: BlobMetadata[] = []
await this.walkDirectory(this.basePath, prefix, maxKeys, items)
return {
items: items.slice(0, maxKeys),
isTruncated: items.length > maxKeys,
nextToken: items.length > maxKeys ? items[maxKeys].key : undefined,
}
} catch (error: any) {
throw DBALError.internal(`Filesystem list failed: ${error.message}`)
}
}
private async walkDirectory(
dir: string,
prefix: string,
maxKeys: number,
items: BlobMetadata[]
) {
if (items.length >= maxKeys) return
const entries = await fs.readdir(dir, { withFileTypes: true })
for (const entry of entries) {
if (items.length >= maxKeys) break
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
await this.walkDirectory(fullPath, prefix, maxKeys, items)
} else if (!entry.name.endsWith('.meta.json')) {
const relativePath = path.relative(this.basePath, fullPath)
const normalizedKey = relativePath.split(path.sep).join('/')
if (!prefix || normalizedKey.startsWith(prefix)) {
try {
const metadata = await this.getMetadata(normalizedKey)
items.push(metadata)
} catch {
// Skip files that can't be read
}
}
}
}
}
async generatePresignedUrl(
key: string,
expirationSeconds: number = 3600
): Promise<string> {
// Filesystem storage doesn't support presigned URLs
return ''
}
async copy(
sourceKey: string,
destKey: string
): Promise<BlobMetadata> {
const sourcePath = this.getFullPath(sourceKey)
const destPath = this.getFullPath(destKey)
const sourceMetaPath = this.getMetadataPath(sourceKey)
const destMetaPath = this.getMetadataPath(destKey)
try {
// Create destination directory if needed
await fs.mkdir(path.dirname(destPath), { recursive: true })
// Copy file
await fs.copyFile(sourcePath, destPath)
// Copy or regenerate metadata
try {
await fs.copyFile(sourceMetaPath, destMetaPath)
// Update lastModified in metadata
const metadata = JSON.parse(await fs.readFile(destMetaPath, 'utf-8'))
metadata.lastModified = new Date()
metadata.key = destKey
await fs.writeFile(destMetaPath, JSON.stringify(metadata, null, 2))
return metadata
} catch {
// Regenerate metadata if copy fails
return await this.getMetadata(destKey)
}
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Source blob not found: ${sourceKey}`)
}
throw DBALError.internal(`Filesystem copy failed: ${error.message}`)
}
}
async getTotalSize(): Promise<number> {
const items = await this.list({ maxKeys: Number.MAX_SAFE_INTEGER })
return items.items.reduce((sum, item) => sum + item.size, 0)
}
async getObjectCount(): Promise<number> {
const items = await this.list({ maxKeys: Number.MAX_SAFE_INTEGER })
return items.items.length
}
private generateEtag(data: Buffer): string {
const hash = createHash('md5').update(data).digest('hex')
return `"${hash}"`
}
}

View File

@@ -0,0 +1,28 @@
import type { BlobStorageConfig } from '../../blob-storage'
import { promises as fs } from 'fs'
export interface FilesystemContext {
basePath: string
}
export function createFilesystemContext(config: BlobStorageConfig): FilesystemContext {
if (!config.filesystem) {
throw new Error('Filesystem configuration required')
}
const basePath = config.filesystem.basePath
if (config.filesystem.createIfNotExists) {
void ensureBasePath(basePath)
}
return { basePath }
}
async function ensureBasePath(basePath: string) {
try {
await fs.mkdir(basePath, { recursive: true })
} catch (error: any) {
throw new Error(`Failed to create base path: ${error.message}`)
}
}

View File

@@ -0,0 +1,98 @@
import { promises as fs } from 'fs'
import type {
BlobStorage,
BlobMetadata,
BlobListResult,
UploadOptions,
DownloadOptions,
BlobListOptions,
BlobStorageConfig,
} from '../../blob-storage'
import { createFilesystemContext, type FilesystemContext } from './context'
import { buildFullPath } from './paths'
import { copyBlob, deleteBlob, objectCount, totalSize } from './operations/maintenance'
import { downloadBuffer, downloadStream } from './operations/downloads'
import { readMetadata } from './operations/metadata'
import { listBlobs } from './operations/listing'
import { uploadBuffer, uploadStream } from './operations/uploads'
export class FilesystemStorage implements BlobStorage {
private readonly context: FilesystemContext
constructor(config: BlobStorageConfig) {
this.context = createFilesystemContext(config)
}
upload(
key: string,
data: Buffer | Uint8Array,
options: UploadOptions = {}
): Promise<BlobMetadata> {
return uploadBuffer(this.context, key, data, options)
}
uploadStream(
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions = {}
): Promise<BlobMetadata> {
return uploadStream(this.context, key, stream, size, options)
}
download(
key: string,
options: DownloadOptions = {}
): Promise<Buffer> {
return downloadBuffer(this.context, key, options)
}
downloadStream(
key: string,
options: DownloadOptions = {}
): Promise<NodeJS.ReadableStream> {
return downloadStream(this.context, key, options)
}
delete(key: string): Promise<boolean> {
return deleteBlob(this.context, key)
}
async exists(key: string): Promise<boolean> {
const filePath = buildFullPath(this.context.basePath, key)
try {
await fs.access(filePath)
return true
} catch {
return false
}
}
getMetadata(key: string): Promise<BlobMetadata> {
return readMetadata(this.context, key)
}
list(options: BlobListOptions = {}): Promise<BlobListResult> {
return listBlobs(this.context, options)
}
async generatePresignedUrl(
key: string,
expirationSeconds: number = 3600
): Promise<string> {
return ''
}
copy(sourceKey: string, destKey: string): Promise<BlobMetadata> {
return copyBlob(this.context, sourceKey, destKey)
}
getTotalSize(): Promise<number> {
return totalSize(this.context)
}
getObjectCount(): Promise<number> {
return objectCount(this.context)
}
}

View File

@@ -0,0 +1,65 @@
import { promises as fs, createReadStream } from 'fs'
import type { DownloadOptions } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { FilesystemContext } from '../context'
import { buildFullPath } from '../paths'
export async function downloadBuffer(
context: FilesystemContext,
key: string,
options: DownloadOptions
): Promise<Buffer> {
const filePath = buildFullPath(context.basePath, key)
try {
let data = await fs.readFile(filePath)
if (options.offset !== undefined || options.length !== undefined) {
const offset = options.offset || 0
const length = options.length || (data.length - offset)
if (offset >= data.length) {
throw DBALError.validationError('Offset exceeds blob size')
}
data = data.subarray(offset, offset + length)
}
return data
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
if (error instanceof DBALError) {
throw error
}
throw DBALError.internal(`Filesystem download failed: ${error.message}`)
}
}
export async function downloadStream(
context: FilesystemContext,
key: string,
options: DownloadOptions
): Promise<NodeJS.ReadableStream> {
const filePath = buildFullPath(context.basePath, key)
try {
await fs.access(filePath)
const streamOptions: any = {}
if (options.offset !== undefined) {
streamOptions.start = options.offset
}
if (options.length !== undefined) {
streamOptions.end = (options.offset || 0) + options.length - 1
}
return createReadStream(filePath, streamOptions)
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`Filesystem download stream failed: ${error.message}`)
}
}

View File

@@ -0,0 +1,62 @@
import { promises as fs } from 'fs'
import path from 'path'
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { FilesystemContext } from '../context'
import { buildFullPath } from '../paths'
import { readMetadata } from './metadata'
export async function listBlobs(
context: FilesystemContext,
options: BlobListOptions
): Promise<BlobListResult> {
const prefix = options.prefix || ''
const maxKeys = options.maxKeys || 1000
try {
const items: BlobMetadata[] = []
await walkDirectory(context, context.basePath, prefix, maxKeys, items)
return {
items: items.slice(0, maxKeys),
isTruncated: items.length > maxKeys,
nextToken: items.length > maxKeys ? items[maxKeys].key : undefined,
}
} catch (error: any) {
throw DBALError.internal(`Filesystem list failed: ${error.message}`)
}
}
async function walkDirectory(
context: FilesystemContext,
dir: string,
prefix: string,
maxKeys: number,
items: BlobMetadata[]
) {
if (items.length >= maxKeys) return
const entries = await fs.readdir(dir, { withFileTypes: true })
for (const entry of entries) {
if (items.length >= maxKeys) break
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
await walkDirectory(context, fullPath, prefix, maxKeys, items)
} else if (!entry.name.endsWith('.meta.json')) {
const relativePath = path.relative(context.basePath, fullPath)
const normalizedKey = relativePath.split(path.sep).join('/')
if (!prefix || normalizedKey.startsWith(prefix)) {
try {
const metadata = await readMetadata(context, normalizedKey)
items.push(metadata)
} catch {
// Skip files that can't be read
}
}
}
}
}

View File

@@ -0,0 +1,75 @@
import { promises as fs } from 'fs'
import path from 'path'
import type { BlobMetadata } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { FilesystemContext } from '../context'
import { buildFullPath, buildMetadataPath } from '../paths'
import { readMetadata } from './metadata'
import { listBlobs } from './listing'
export async function deleteBlob(
context: FilesystemContext,
key: string
): Promise<boolean> {
const filePath = buildFullPath(context.basePath, key)
const metaPath = buildMetadataPath(context.basePath, key)
try {
await fs.unlink(filePath)
try {
await fs.unlink(metaPath)
} catch {
// Ignore missing metadata files
}
return true
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`Filesystem delete failed: ${error.message}`)
}
}
export async function copyBlob(
context: FilesystemContext,
sourceKey: string,
destKey: string
): Promise<BlobMetadata> {
const sourcePath = buildFullPath(context.basePath, sourceKey)
const destPath = buildFullPath(context.basePath, destKey)
const sourceMetaPath = buildMetadataPath(context.basePath, sourceKey)
const destMetaPath = buildMetadataPath(context.basePath, destKey)
try {
await fs.mkdir(path.dirname(destPath), { recursive: true })
await fs.copyFile(sourcePath, destPath)
try {
await fs.copyFile(sourceMetaPath, destMetaPath)
const metadata = JSON.parse(await fs.readFile(destMetaPath, 'utf-8'))
metadata.lastModified = new Date()
metadata.key = destKey
await fs.writeFile(destMetaPath, JSON.stringify(metadata, null, 2))
return metadata
} catch {
return await readMetadata(context, destKey)
}
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Source blob not found: ${sourceKey}`)
}
throw DBALError.internal(`Filesystem copy failed: ${error.message}`)
}
}
export async function totalSize(context: FilesystemContext): Promise<number> {
const items = await listBlobs(context, { maxKeys: Number.MAX_SAFE_INTEGER })
return items.items.reduce((sum, item) => sum + item.size, 0)
}
export async function objectCount(context: FilesystemContext): Promise<number> {
const items = await listBlobs(context, { maxKeys: Number.MAX_SAFE_INTEGER })
return items.items.length
}

View File

@@ -0,0 +1,51 @@
import { promises as fs } from 'fs'
import { createHash } from 'crypto'
import type { BlobMetadata } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { FilesystemContext } from '../context'
import { buildFullPath, buildMetadataPath } from '../paths'
export async function readMetadata(
context: FilesystemContext,
key: string
): Promise<BlobMetadata> {
const filePath = buildFullPath(context.basePath, key)
const metaPath = buildMetadataPath(context.basePath, key)
try {
const stats = await fs.stat(filePath)
try {
const metaContent = await fs.readFile(metaPath, 'utf-8')
return JSON.parse(metaContent)
} catch {
const data = await fs.readFile(filePath)
return {
key,
size: stats.size,
contentType: 'application/octet-stream',
etag: generateEtag(data),
lastModified: stats.mtime,
}
}
} catch (error: any) {
if (error.code === 'ENOENT') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`Filesystem get metadata failed: ${error.message}`)
}
}
export async function writeMetadata(
context: FilesystemContext,
key: string,
metadata: BlobMetadata
) {
const metaPath = buildMetadataPath(context.basePath, key)
await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))
}
export function generateEtag(data: Buffer): string {
const hash = createHash('md5').update(data).digest('hex')
return `"${hash}"`
}

View File

@@ -0,0 +1,109 @@
import { promises as fs, createWriteStream } from 'fs'
import path from 'path'
import { pipeline } from 'stream/promises'
import type { BlobMetadata, UploadOptions } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { FilesystemContext } from '../context'
import { buildFullPath, buildMetadataPath } from '../paths'
import { generateEtag, writeMetadata } from './metadata'
async function ensureWritableDestination(
filePath: string,
overwrite?: boolean
) {
await fs.mkdir(path.dirname(filePath), { recursive: true })
if (!overwrite) {
try {
await fs.access(filePath)
throw DBALError.conflict(`Blob already exists: ${filePath}`)
} catch (error: any) {
if (error.code !== 'ENOENT') {
throw error
}
}
}
}
export async function uploadBuffer(
context: FilesystemContext,
key: string,
data: Buffer | Uint8Array,
options: UploadOptions
): Promise<BlobMetadata> {
const filePath = buildFullPath(context.basePath, key)
const metaPath = buildMetadataPath(context.basePath, key)
try {
await ensureWritableDestination(filePath, options.overwrite)
await fs.writeFile(filePath, data)
const buffer = Buffer.from(data)
const metadata: BlobMetadata = {
key,
size: buffer.length,
contentType: options.contentType || 'application/octet-stream',
etag: generateEtag(buffer),
lastModified: new Date(),
customMetadata: options.metadata,
}
await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))
return metadata
} catch (error: any) {
if (error instanceof DBALError) {
throw error
}
throw DBALError.internal(`Filesystem upload failed: ${error.message}`)
}
}
export async function uploadStream(
context: FilesystemContext,
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions
): Promise<BlobMetadata> {
const filePath = buildFullPath(context.basePath, key)
try {
await ensureWritableDestination(filePath, options.overwrite)
const writeStream = createWriteStream(filePath)
if ('getReader' in stream) {
const reader = stream.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) break
writeStream.write(Buffer.from(value))
}
writeStream.end()
} else {
await pipeline(stream, writeStream)
}
const stats = await fs.stat(filePath)
const buffer = await fs.readFile(filePath)
const metadata: BlobMetadata = {
key,
size: stats.size,
contentType: options.contentType || 'application/octet-stream',
etag: generateEtag(buffer),
lastModified: stats.mtime,
customMetadata: options.metadata,
}
await writeMetadata(context, key, metadata)
return metadata
} catch (error: any) {
if (error instanceof DBALError) {
throw error
}
throw DBALError.internal(`Filesystem stream upload failed: ${error.message}`)
}
}

View File

@@ -0,0 +1,11 @@
import path from 'path'
import { sanitizeKey } from './sanitize-key'
export function buildFullPath(basePath: string, key: string): string {
const normalized = sanitizeKey(key)
return path.join(basePath, normalized)
}
export function buildMetadataPath(basePath: string, key: string): string {
return buildFullPath(basePath, key) + '.meta.json'
}

View File

@@ -0,0 +1,3 @@
export function sanitizeKey(key: string): string {
return key.replace(/^(\.\.(\/|\\|$))+/, '')
}

View File

@@ -1,230 +1 @@
import type {
BlobStorage,
BlobMetadata,
BlobListResult,
UploadOptions,
DownloadOptions,
BlobListOptions,
} from '../blob-storage'
import { DBALError } from '../../core/foundation/errors'
import { createHash } from 'crypto'
interface BlobData {
data: Buffer
contentType: string
etag: string
lastModified: Date
metadata: Record<string, string>
}
/**
* In-memory blob storage implementation
* Useful for testing and development
*/
export class MemoryStorage implements BlobStorage {
private store: Map<string, BlobData> = new Map()
async upload(
key: string,
data: Buffer | Uint8Array,
options: UploadOptions = {}
): Promise<BlobMetadata> {
const buffer = Buffer.from(data)
if (!options.overwrite && this.store.has(key)) {
throw DBALError.conflict(`Blob already exists: ${key}`)
}
const blob: BlobData = {
data: buffer,
contentType: options.contentType || 'application/octet-stream',
etag: this.generateEtag(buffer),
lastModified: new Date(),
metadata: options.metadata || {},
}
this.store.set(key, blob)
return this.makeBlobMetadata(key, blob)
}
async uploadStream(
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions = {}
): Promise<BlobMetadata> {
// Collect stream data into buffer
const chunks: Buffer[] = []
if ('getReader' in stream) {
// Web ReadableStream
const reader = stream.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) break
chunks.push(Buffer.from(value))
}
} else {
// Node.js ReadableStream
for await (const chunk of stream) {
chunks.push(Buffer.from(chunk))
}
}
const buffer = Buffer.concat(chunks)
return this.upload(key, buffer, options)
}
async download(
key: string,
options: DownloadOptions = {}
): Promise<Buffer> {
const blob = this.store.get(key)
if (!blob) {
throw DBALError.notFound(`Blob not found: ${key}`)
}
let data = blob.data
if (options.offset !== undefined || options.length !== undefined) {
const offset = options.offset || 0
const length = options.length || (data.length - offset)
if (offset >= data.length) {
throw DBALError.validationError('Offset exceeds blob size')
}
data = data.subarray(offset, offset + length)
}
return data
}
async downloadStream(
key: string,
options: DownloadOptions = {}
): Promise<ReadableStream | NodeJS.ReadableStream> {
const data = await this.download(key, options)
// Return a readable stream
if (typeof ReadableStream !== 'undefined') {
// Web ReadableStream
return new ReadableStream({
start(controller) {
controller.enqueue(data)
controller.close()
},
})
} else {
// Node.js ReadableStream
const { Readable } = await import('stream')
return Readable.from(data)
}
}
async delete(key: string): Promise<boolean> {
if (!this.store.has(key)) {
throw DBALError.notFound(`Blob not found: ${key}`)
}
this.store.delete(key)
return true
}
async exists(key: string): Promise<boolean> {
return this.store.has(key)
}
async getMetadata(key: string): Promise<BlobMetadata> {
const blob = this.store.get(key)
if (!blob) {
throw DBALError.notFound(`Blob not found: ${key}`)
}
return this.makeBlobMetadata(key, blob)
}
async list(options: BlobListOptions = {}): Promise<BlobListResult> {
const prefix = options.prefix || ''
const maxKeys = options.maxKeys || 1000
const items: BlobMetadata[] = []
let nextToken: string | undefined
for (const [key, blob] of this.store.entries()) {
if (!prefix || key.startsWith(prefix)) {
if (items.length >= maxKeys) {
nextToken = key
break
}
items.push(this.makeBlobMetadata(key, blob))
}
}
return {
items,
nextToken,
isTruncated: nextToken !== undefined,
}
}
async generatePresignedUrl(
key: string,
expirationSeconds: number = 3600
): Promise<string> {
// Memory storage doesn't support presigned URLs
return ''
}
async copy(
sourceKey: string,
destKey: string
): Promise<BlobMetadata> {
const sourceBlob = this.store.get(sourceKey)
if (!sourceBlob) {
throw DBALError.notFound(`Source blob not found: ${sourceKey}`)
}
const destBlob: BlobData = {
...sourceBlob,
data: Buffer.from(sourceBlob.data),
lastModified: new Date(),
}
this.store.set(destKey, destBlob)
return this.makeBlobMetadata(destKey, destBlob)
}
async getTotalSize(): Promise<number> {
let total = 0
for (const blob of this.store.values()) {
total += blob.data.length
}
return total
}
async getObjectCount(): Promise<number> {
return this.store.size
}
private generateEtag(data: Buffer): string {
const hash = createHash('md5').update(data).digest('hex')
return `"${hash}"`
}
private makeBlobMetadata(key: string, blob: BlobData): BlobMetadata {
return {
key,
size: blob.data.length,
contentType: blob.contentType,
etag: blob.etag,
lastModified: blob.lastModified,
customMetadata: blob.metadata,
}
}
}
export { MemoryStorage } from './memory-storage/index'

View File

@@ -0,0 +1,48 @@
import { DBALError } from '../../core/foundation/errors'
import type { DownloadOptions } from '../blob-storage'
import type { MemoryStore } from './store'
import { getBlobOrThrow, normalizeKey } from './utils'
export const downloadBuffer = (
store: MemoryStore,
key: string,
options: DownloadOptions = {},
): Buffer => {
const normalizedKey = normalizeKey(key)
const blob = getBlobOrThrow(store, normalizedKey)
let data = blob.data
if (options.offset !== undefined || options.length !== undefined) {
const offset = options.offset || 0
const length = options.length || (data.length - offset)
if (offset >= data.length) {
throw DBALError.validationError('Offset exceeds blob size')
}
data = data.subarray(offset, offset + length)
}
return data
}
export const downloadStream = async (
store: MemoryStore,
key: string,
options?: DownloadOptions,
) => {
const data = downloadBuffer(store, key, options)
if (typeof ReadableStream !== 'undefined') {
return new ReadableStream({
start(controller) {
controller.enqueue(data)
controller.close()
},
})
}
const { Readable } = await import('stream')
return Readable.from(data)
}

View File

@@ -0,0 +1,73 @@
import type {
BlobStorage,
BlobMetadata,
BlobListResult,
UploadOptions,
DownloadOptions,
BlobListOptions,
} from '../blob-storage'
import { createStore } from './store'
import { uploadBuffer, uploadFromStream } from './uploads'
import { downloadBuffer, downloadStream } from './downloads'
import { copyBlob, deleteBlob, getMetadata, listBlobs, getObjectCount, getTotalSize } from './management'
import { normalizeKey } from './utils'
export class MemoryStorage implements BlobStorage {
private store = createStore()
async upload(key: string, data: Buffer | Uint8Array, options: UploadOptions = {}): Promise<BlobMetadata> {
return uploadBuffer(this.store, key, data, options)
}
async uploadStream(
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
_size: number,
options: UploadOptions = {},
): Promise<BlobMetadata> {
return uploadFromStream(this.store, key, stream, options)
}
async download(key: string, options: DownloadOptions = {}): Promise<Buffer> {
return downloadBuffer(this.store, key, options)
}
async downloadStream(
key: string,
options: DownloadOptions = {},
): Promise<ReadableStream | NodeJS.ReadableStream> {
return downloadStream(this.store, key, options)
}
async delete(key: string): Promise<boolean> {
return deleteBlob(this.store, key)
}
async exists(key: string): Promise<boolean> {
return this.store.has(normalizeKey(key))
}
async getMetadata(key: string): Promise<BlobMetadata> {
return getMetadata(this.store, key)
}
async list(options: BlobListOptions = {}): Promise<BlobListResult> {
return listBlobs(this.store, options)
}
async generatePresignedUrl(_key: string, _expirationSeconds: number = 3600): Promise<string> {
return ''
}
async copy(sourceKey: string, destKey: string): Promise<BlobMetadata> {
return copyBlob(this.store, sourceKey, destKey)
}
async getTotalSize(): Promise<number> {
return getTotalSize(this.store)
}
async getObjectCount(): Promise<number> {
return getObjectCount(this.store)
}
}

View File

@@ -0,0 +1,72 @@
import { DBALError } from '../../core/foundation/errors'
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../blob-storage'
import type { MemoryStore } from './store'
import { toBlobMetadata } from './serialization'
import { cleanupStoreEntry, getBlobOrThrow, normalizeKey } from './utils'
export const deleteBlob = async (store: MemoryStore, key: string): Promise<boolean> => {
const normalizedKey = normalizeKey(key)
if (!store.has(normalizedKey)) {
throw DBALError.notFound(`Blob not found: ${normalizedKey}`)
}
cleanupStoreEntry(store, normalizedKey)
return true
}
export const getMetadata = (store: MemoryStore, key: string): BlobMetadata => {
const normalizedKey = normalizeKey(key)
const blob = getBlobOrThrow(store, normalizedKey)
return toBlobMetadata(normalizedKey, blob)
}
export const listBlobs = (store: MemoryStore, options: BlobListOptions = {}): BlobListResult => {
const prefix = options.prefix ? normalizeKey(options.prefix) : ''
const maxKeys = options.maxKeys || 1000
const items: BlobMetadata[] = []
let nextToken: string | undefined
for (const [key, blob] of store.entries()) {
if (!prefix || key.startsWith(prefix)) {
if (items.length >= maxKeys) {
nextToken = key
break
}
items.push(toBlobMetadata(key, blob))
}
}
return {
items,
nextToken,
isTruncated: nextToken !== undefined,
}
}
export const copyBlob = (store: MemoryStore, sourceKey: string, destKey: string): BlobMetadata => {
const normalizedSourceKey = normalizeKey(sourceKey)
const normalizedDestKey = normalizeKey(destKey)
const sourceBlob = getBlobOrThrow(store, normalizedSourceKey)
const destBlob = {
...sourceBlob,
data: Buffer.from(sourceBlob.data),
lastModified: new Date(),
}
store.set(normalizedDestKey, destBlob)
return toBlobMetadata(normalizedDestKey, destBlob)
}
export const getTotalSize = (store: MemoryStore): number => {
let total = 0
for (const blob of store.values()) {
total += blob.data.length
}
return total
}
export const getObjectCount = (store: MemoryStore): number => store.size

View File

@@ -0,0 +1,43 @@
import { createHash } from 'crypto'
import type { UploadOptions, BlobMetadata } from '../blob-storage'
import type { BlobData } from './store'
export const generateEtag = (data: Buffer): string => `"${createHash('md5').update(data).digest('hex')}"`
export const toBlobData = (data: Buffer, options: UploadOptions = {}): BlobData => ({
data,
contentType: options.contentType || 'application/octet-stream',
etag: generateEtag(data),
lastModified: new Date(),
metadata: options.metadata || {},
})
export const toBlobMetadata = (key: string, blob: BlobData): BlobMetadata => ({
key,
size: blob.data.length,
contentType: blob.contentType,
etag: blob.etag,
lastModified: blob.lastModified,
customMetadata: blob.metadata,
})
export const collectStream = async (
stream: ReadableStream | NodeJS.ReadableStream,
): Promise<Buffer> => {
const chunks: Buffer[] = []
if ('getReader' in stream) {
const reader = stream.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) break
chunks.push(Buffer.from(value))
}
} else {
for await (const chunk of stream) {
chunks.push(Buffer.from(chunk))
}
}
return Buffer.concat(chunks)
}

View File

@@ -0,0 +1,11 @@
export interface BlobData {
data: Buffer
contentType: string
etag: string
lastModified: Date
metadata: Record<string, string>
}
export type MemoryStore = Map<string, BlobData>
export const createStore = (): MemoryStore => new Map()

View File

@@ -0,0 +1,34 @@
import { DBALError } from '../../core/foundation/errors'
import type { UploadOptions } from '../blob-storage'
import type { MemoryStore } from './store'
import { collectStream, toBlobData, toBlobMetadata } from './serialization'
import { normalizeKey } from './utils'
export const uploadBuffer = (
store: MemoryStore,
key: string,
data: Buffer | Uint8Array,
options: UploadOptions = {},
) => {
const normalizedKey = normalizeKey(key)
const buffer = Buffer.from(data)
if (!options.overwrite && store.has(normalizedKey)) {
throw DBALError.conflict(`Blob already exists: ${normalizedKey}`)
}
const blob = toBlobData(buffer, options)
store.set(normalizedKey, blob)
return toBlobMetadata(normalizedKey, blob)
}
export const uploadFromStream = async (
store: MemoryStore,
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
options?: UploadOptions,
) => {
const buffer = await collectStream(stream)
return uploadBuffer(store, key, buffer, options)
}

View File

@@ -0,0 +1,18 @@
import { DBALError } from '../../core/foundation/errors'
import type { BlobData, MemoryStore } from './store'
export const normalizeKey = (key: string): string => key.replace(/^\/+/, '').trim()
export const getBlobOrThrow = (store: MemoryStore, key: string): BlobData => {
const blob = store.get(key)
if (!blob) {
throw DBALError.notFound(`Blob not found: ${key}`)
}
return blob
}
export const cleanupStoreEntry = (store: MemoryStore, key: string): void => {
store.delete(key)
}

View File

@@ -1,361 +0,0 @@
import type {
BlobStorage,
BlobMetadata,
BlobListResult,
UploadOptions,
DownloadOptions,
BlobListOptions,
BlobStorageConfig,
} from '../blob-storage'
import { DBALError } from '../../core/foundation/errors'
/**
* S3-compatible blob storage implementation
* Uses AWS SDK v3 for S3 operations
* Compatible with MinIO and other S3-compatible services
*/
export class S3Storage implements BlobStorage {
private s3Client: any
private bucket: string
constructor(config: BlobStorageConfig) {
if (!config.s3) {
throw new Error('S3 configuration required')
}
this.bucket = config.s3.bucket
// Lazy-load AWS SDK to avoid bundling if not used
this.initializeS3Client(config.s3)
}
private async initializeS3Client(s3Config: NonNullable<BlobStorageConfig['s3']>) {
try {
// Dynamic import to avoid bundling AWS SDK if not installed
// @ts-ignore - Optional dependency
const s3Module = await import('@aws-sdk/client-s3').catch(() => null)
if (!s3Module) {
throw new Error('@aws-sdk/client-s3 is not installed. Install it with: npm install @aws-sdk/client-s3')
}
const { S3Client } = s3Module
this.s3Client = new S3Client({
region: s3Config.region,
credentials: s3Config.accessKeyId && s3Config.secretAccessKey ? {
accessKeyId: s3Config.accessKeyId,
secretAccessKey: s3Config.secretAccessKey,
} : undefined,
endpoint: s3Config.endpoint,
forcePathStyle: s3Config.forcePathStyle,
})
} catch (error) {
throw new Error('AWS SDK @aws-sdk/client-s3 not installed. Install with: npm install @aws-sdk/client-s3')
}
}
async upload(
key: string,
data: Buffer | Uint8Array,
options: UploadOptions = {}
): Promise<BlobMetadata> {
try {
const { PutObjectCommand } = await import('@aws-sdk/client-s3')
const command = new PutObjectCommand({
Bucket: this.bucket,
Key: key,
Body: data,
ContentType: options.contentType,
Metadata: options.metadata,
})
const response = await this.s3Client.send(command)
return {
key,
size: data.length,
contentType: options.contentType || 'application/octet-stream',
etag: response.ETag || '',
lastModified: new Date(),
customMetadata: options.metadata,
}
} catch (error: any) {
if (error.name === 'NoSuchBucket') {
throw DBALError.notFound(`Bucket not found: ${this.bucket}`)
}
throw DBALError.internal(`S3 upload failed: ${error.message}`)
}
}
async uploadStream(
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions = {}
): Promise<BlobMetadata> {
try {
const { Upload } = await import('@aws-sdk/lib-storage')
const upload = new Upload({
client: this.s3Client,
params: {
Bucket: this.bucket,
Key: key,
Body: stream as any, // Type compatibility between Node.js and Web streams
ContentType: options.contentType,
Metadata: options.metadata,
},
})
const response = await upload.done()
return {
key,
size,
contentType: options.contentType || 'application/octet-stream',
etag: response.ETag || '',
lastModified: new Date(),
customMetadata: options.metadata,
}
} catch (error: any) {
throw DBALError.internal(`S3 stream upload failed: ${error.message}`)
}
}
async download(
key: string,
options: DownloadOptions = {}
): Promise<Buffer> {
try {
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
const range = this.buildRangeHeader(options)
const command = new GetObjectCommand({
Bucket: this.bucket,
Key: key,
Range: range,
})
const response = await this.s3Client.send(command)
// Convert stream to buffer
const chunks: Uint8Array[] = []
for await (const chunk of response.Body as any) {
chunks.push(chunk)
}
return Buffer.concat(chunks)
} catch (error: any) {
if (error.name === 'NoSuchKey') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`S3 download failed: ${error.message}`)
}
}
async downloadStream(
key: string,
options: DownloadOptions = {}
): Promise<ReadableStream | NodeJS.ReadableStream> {
try {
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
const range = this.buildRangeHeader(options)
const command = new GetObjectCommand({
Bucket: this.bucket,
Key: key,
Range: range,
})
const response = await this.s3Client.send(command)
return response.Body as any
} catch (error: any) {
if (error.name === 'NoSuchKey') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`S3 download stream failed: ${error.message}`)
}
}
async delete(key: string): Promise<boolean> {
try {
const { DeleteObjectCommand } = await import('@aws-sdk/client-s3')
const command = new DeleteObjectCommand({
Bucket: this.bucket,
Key: key,
})
await this.s3Client.send(command)
return true
} catch (error: any) {
throw DBALError.internal(`S3 delete failed: ${error.message}`)
}
}
async exists(key: string): Promise<boolean> {
try {
await this.getMetadata(key)
return true
} catch (error) {
if (error instanceof DBALError && error.code === 404) {
return false
}
throw error
}
}
async getMetadata(key: string): Promise<BlobMetadata> {
try {
const { HeadObjectCommand } = await import('@aws-sdk/client-s3')
const command = new HeadObjectCommand({
Bucket: this.bucket,
Key: key,
})
const response = await this.s3Client.send(command)
return {
key,
size: response.ContentLength || 0,
contentType: response.ContentType || 'application/octet-stream',
etag: response.ETag || '',
lastModified: response.LastModified || new Date(),
customMetadata: response.Metadata,
}
} catch (error: any) {
if (error.name === 'NotFound') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`S3 head object failed: ${error.message}`)
}
}
async list(options: BlobListOptions = {}): Promise<BlobListResult> {
try {
const { ListObjectsV2Command } = await import('@aws-sdk/client-s3')
const command = new ListObjectsV2Command({
Bucket: this.bucket,
Prefix: options.prefix,
ContinuationToken: options.continuationToken,
MaxKeys: options.maxKeys || 1000,
})
const response = await this.s3Client.send(command)
const items: BlobMetadata[] = (response.Contents || []).map(obj => ({
key: obj.Key || '',
size: obj.Size || 0,
contentType: 'application/octet-stream', // S3 list doesn't return content type
etag: obj.ETag || '',
lastModified: obj.LastModified || new Date(),
}))
return {
items,
nextToken: response.NextContinuationToken,
isTruncated: response.IsTruncated || false,
}
} catch (error: any) {
throw DBALError.internal(`S3 list failed: ${error.message}`)
}
}
async generatePresignedUrl(
key: string,
expirationSeconds: number = 3600
): Promise<string> {
try {
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
const { getSignedUrl } = await import('@aws-sdk/s3-request-presigner')
const command = new GetObjectCommand({
Bucket: this.bucket,
Key: key,
})
return await getSignedUrl(this.s3Client, command, {
expiresIn: expirationSeconds,
})
} catch (error: any) {
throw DBALError.internal(`S3 presigned URL generation failed: ${error.message}`)
}
}
async copy(
sourceKey: string,
destKey: string
): Promise<BlobMetadata> {
try {
const { CopyObjectCommand } = await import('@aws-sdk/client-s3')
const command = new CopyObjectCommand({
Bucket: this.bucket,
CopySource: `${this.bucket}/${sourceKey}`,
Key: destKey,
})
const response = await this.s3Client.send(command)
return await this.getMetadata(destKey)
} catch (error: any) {
if (error.name === 'NoSuchKey') {
throw DBALError.notFound(`Source blob not found: ${sourceKey}`)
}
throw DBALError.internal(`S3 copy failed: ${error.message}`)
}
}
async getTotalSize(): Promise<number> {
// Note: This requires listing all objects and summing sizes
// For large buckets, this can be expensive
const result = await this.list({ maxKeys: 1000 })
let total = result.items.reduce((sum, item) => sum + item.size, 0)
// Handle pagination if needed
let nextToken = result.nextToken
while (nextToken) {
const pageResult = await this.list({
maxKeys: 1000,
continuationToken: nextToken
})
total += pageResult.items.reduce((sum, item) => sum + item.size, 0)
nextToken = pageResult.nextToken
}
return total
}
async getObjectCount(): Promise<number> {
// Similar to getTotalSize, requires listing
const result = await this.list({ maxKeys: 1000 })
let count = result.items.length
let nextToken = result.nextToken
while (nextToken) {
const pageResult = await this.list({
maxKeys: 1000,
continuationToken: nextToken
})
count += pageResult.items.length
nextToken = pageResult.nextToken
}
return count
}
private buildRangeHeader(options: DownloadOptions): string | undefined {
if (options.offset === undefined && options.length === undefined) {
return undefined
}
const offset = options.offset || 0
const end = options.length !== undefined ? offset + options.length - 1 : undefined
return end !== undefined ? `bytes=${offset}-${end}` : `bytes=${offset}-`
}
}

View File

@@ -0,0 +1,39 @@
import type { BlobStorageConfig } from '../../blob-storage'
export interface S3Context {
bucket: string
s3Client: any
}
export async function createS3Context(config: BlobStorageConfig): Promise<S3Context> {
if (!config.s3) {
throw new Error('S3 configuration required')
}
const { bucket, ...s3Config } = config.s3
try {
// @ts-ignore - optional dependency
const s3Module = await import('@aws-sdk/client-s3').catch(() => null)
if (!s3Module) {
throw new Error('@aws-sdk/client-s3 is not installed. Install it with: npm install @aws-sdk/client-s3')
}
const { S3Client } = s3Module
return {
bucket,
s3Client: new S3Client({
region: s3Config.region,
credentials: s3Config.accessKeyId && s3Config.secretAccessKey ? {
accessKeyId: s3Config.accessKeyId,
secretAccessKey: s3Config.secretAccessKey,
} : undefined,
endpoint: s3Config.endpoint,
forcePathStyle: s3Config.forcePathStyle,
})
}
} catch (error) {
throw new Error('AWS SDK @aws-sdk/client-s3 not installed. Install with: npm install @aws-sdk/client-s3')
}
}

View File

@@ -0,0 +1,114 @@
import type {
BlobStorage,
BlobMetadata,
BlobListResult,
UploadOptions,
DownloadOptions,
BlobListOptions,
BlobStorageConfig,
} from '../../blob-storage'
import { DBALError } from '../../core/foundation/errors'
import type { S3Context } from './client'
import { createS3Context } from './client'
import { downloadBuffer, downloadStream } from './operations/downloads'
import { listBlobs, sumSizes, countObjects } from './operations/listing'
import { getMetadata, generatePresignedUrl } from './operations/metadata'
import { uploadBuffer, uploadStream } from './operations/uploads'
import { copyObject, deleteObject } from './operations/maintenance'
export class S3Storage implements BlobStorage {
private contextPromise: Promise<S3Context>
constructor(config: BlobStorageConfig) {
this.contextPromise = createS3Context(config)
}
private async context(): Promise<S3Context> {
return this.contextPromise
}
async upload(
key: string,
data: Buffer | Uint8Array,
options: UploadOptions = {}
): Promise<BlobMetadata> {
const context = await this.context()
return uploadBuffer(context, key, data, options)
}
async uploadStream(
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions = {}
): Promise<BlobMetadata> {
const context = await this.context()
return uploadStream(context, key, stream, size, options)
}
async download(
key: string,
options: DownloadOptions = {}
): Promise<Buffer> {
const context = await this.context()
return downloadBuffer(context, key, options)
}
async downloadStream(
key: string,
options: DownloadOptions = {}
): Promise<ReadableStream | NodeJS.ReadableStream> {
const context = await this.context()
return downloadStream(context, key, options)
}
async delete(key: string): Promise<boolean> {
const context = await this.context()
return deleteObject(context, key)
}
async exists(key: string): Promise<boolean> {
try {
await this.getMetadata(key)
return true
} catch (error) {
if (error instanceof DBALError && error.code === 404) {
return false
}
throw error
}
}
async getMetadata(key: string): Promise<BlobMetadata> {
const context = await this.context()
return getMetadata(context, key)
}
async list(options: BlobListOptions = {}): Promise<BlobListResult> {
const context = await this.context()
return listBlobs(context, options)
}
async generatePresignedUrl(
key: string,
expirationSeconds: number = 3600
): Promise<string> {
const context = await this.context()
return generatePresignedUrl(context, key, expirationSeconds)
}
async copy(sourceKey: string, destKey: string): Promise<BlobMetadata> {
const context = await this.context()
return copyObject(context, sourceKey, destKey)
}
async getTotalSize(): Promise<number> {
const context = await this.context()
return sumSizes(context)
}
async getObjectCount(): Promise<number> {
const context = await this.context()
return countObjects(context)
}
}

View File

@@ -0,0 +1,58 @@
import type { DownloadOptions } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import { buildRangeHeader } from '../range'
import type { S3Context } from '../client'
export async function downloadBuffer(
context: S3Context,
key: string,
options: DownloadOptions
): Promise<Buffer> {
try {
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
const command = new GetObjectCommand({
Bucket: context.bucket,
Key: key,
Range: buildRangeHeader(options),
})
const response = await context.s3Client.send(command)
const chunks: Uint8Array[] = []
for await (const chunk of response.Body as any) {
chunks.push(chunk)
}
return Buffer.concat(chunks)
} catch (error: any) {
if (error.name === 'NoSuchKey') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`S3 download failed: ${error.message}`)
}
}
export async function downloadStream(
context: S3Context,
key: string,
options: DownloadOptions
): Promise<ReadableStream | NodeJS.ReadableStream> {
try {
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
const command = new GetObjectCommand({
Bucket: context.bucket,
Key: key,
Range: buildRangeHeader(options),
})
const response = await context.s3Client.send(command)
return response.Body as any
} catch (error: any) {
if (error.name === 'NoSuchKey') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`S3 download stream failed: ${error.message}`)
}
}

View File

@@ -0,0 +1,71 @@
import type { BlobListOptions, BlobListResult, BlobMetadata } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { S3Context } from '../client'
export async function listBlobs(
context: S3Context,
options: BlobListOptions
): Promise<BlobListResult> {
try {
const { ListObjectsV2Command } = await import('@aws-sdk/client-s3')
const command = new ListObjectsV2Command({
Bucket: context.bucket,
Prefix: options.prefix,
ContinuationToken: options.continuationToken,
MaxKeys: options.maxKeys || 1000,
})
const response = await context.s3Client.send(command)
const items: BlobMetadata[] = (response.Contents || []).map(obj => ({
key: obj.Key || '',
size: obj.Size || 0,
contentType: 'application/octet-stream',
etag: obj.ETag || '',
lastModified: obj.LastModified || new Date(),
}))
return {
items,
nextToken: response.NextContinuationToken,
isTruncated: response.IsTruncated || false,
}
} catch (error: any) {
throw DBALError.internal(`S3 list failed: ${error.message}`)
}
}
export async function sumSizes(context: S3Context): Promise<number> {
const result = await listBlobs(context, { maxKeys: 1000 })
let total = result.items.reduce((sum, item) => sum + item.size, 0)
let nextToken = result.nextToken
while (nextToken) {
const pageResult = await listBlobs(context, {
maxKeys: 1000,
continuationToken: nextToken
})
total += pageResult.items.reduce((sum, item) => sum + item.size, 0)
nextToken = pageResult.nextToken
}
return total
}
export async function countObjects(context: S3Context): Promise<number> {
const result = await listBlobs(context, { maxKeys: 1000 })
let count = result.items.length
let nextToken = result.nextToken
while (nextToken) {
const pageResult = await listBlobs(context, {
maxKeys: 1000,
continuationToken: nextToken
})
count += pageResult.items.length
nextToken = pageResult.nextToken
}
return count
}

View File

@@ -0,0 +1,48 @@
import type { BlobMetadata } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { S3Context } from '../client'
import { getMetadata } from './metadata'
export async function deleteObject(
context: S3Context,
key: string
): Promise<boolean> {
try {
const { DeleteObjectCommand } = await import('@aws-sdk/client-s3')
const command = new DeleteObjectCommand({
Bucket: context.bucket,
Key: key,
})
await context.s3Client.send(command)
return true
} catch (error: any) {
throw DBALError.internal(`S3 delete failed: ${error.message}`)
}
}
export async function copyObject(
context: S3Context,
sourceKey: string,
destKey: string
): Promise<BlobMetadata> {
try {
const { CopyObjectCommand } = await import('@aws-sdk/client-s3')
const command = new CopyObjectCommand({
Bucket: context.bucket,
CopySource: `${context.bucket}/${sourceKey}`,
Key: destKey,
})
await context.s3Client.send(command)
return await getMetadata(context, destKey)
} catch (error: any) {
if (error.name === 'NoSuchKey') {
throw DBALError.notFound(`Source blob not found: ${sourceKey}`)
}
throw DBALError.internal(`S3 copy failed: ${error.message}`)
}
}

View File

@@ -0,0 +1,55 @@
import type { BlobMetadata } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { S3Context } from '../client'
export async function getMetadata(
context: S3Context,
key: string
): Promise<BlobMetadata> {
try {
const { HeadObjectCommand } = await import('@aws-sdk/client-s3')
const command = new HeadObjectCommand({
Bucket: context.bucket,
Key: key,
})
const response = await context.s3Client.send(command)
return {
key,
size: response.ContentLength || 0,
contentType: response.ContentType || 'application/octet-stream',
etag: response.ETag || '',
lastModified: response.LastModified || new Date(),
customMetadata: response.Metadata,
}
} catch (error: any) {
if (error.name === 'NotFound') {
throw DBALError.notFound(`Blob not found: ${key}`)
}
throw DBALError.internal(`S3 head object failed: ${error.message}`)
}
}
export async function generatePresignedUrl(
context: S3Context,
key: string,
expirationSeconds: number
): Promise<string> {
try {
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
const { getSignedUrl } = await import('@aws-sdk/s3-request-presigner')
const command = new GetObjectCommand({
Bucket: context.bucket,
Key: key,
})
return await getSignedUrl(context.s3Client, command, {
expiresIn: expirationSeconds,
})
} catch (error: any) {
throw DBALError.internal(`S3 presigned URL generation failed: ${error.message}`)
}
}

View File

@@ -0,0 +1,74 @@
import type { BlobMetadata, UploadOptions } from '../../../blob-storage'
import { DBALError } from '../../../core/foundation/errors'
import type { S3Context } from '../client'
export async function uploadBuffer(
context: S3Context,
key: string,
data: Buffer | Uint8Array,
options: UploadOptions
): Promise<BlobMetadata> {
try {
const { PutObjectCommand } = await import('@aws-sdk/client-s3')
const command = new PutObjectCommand({
Bucket: context.bucket,
Key: key,
Body: data,
ContentType: options.contentType,
Metadata: options.metadata,
})
const response = await context.s3Client.send(command)
return {
key,
size: data.length,
contentType: options.contentType || 'application/octet-stream',
etag: response.ETag || '',
lastModified: new Date(),
customMetadata: options.metadata,
}
} catch (error: any) {
if (error.name === 'NoSuchBucket') {
throw DBALError.notFound(`Bucket not found: ${context.bucket}`)
}
throw DBALError.internal(`S3 upload failed: ${error.message}`)
}
}
export async function uploadStream(
context: S3Context,
key: string,
stream: ReadableStream | NodeJS.ReadableStream,
size: number,
options: UploadOptions
): Promise<BlobMetadata> {
try {
const { Upload } = await import('@aws-sdk/lib-storage')
const upload = new Upload({
client: context.s3Client,
params: {
Bucket: context.bucket,
Key: key,
Body: stream as any,
ContentType: options.contentType,
Metadata: options.metadata,
},
})
const response = await upload.done()
return {
key,
size,
contentType: options.contentType || 'application/octet-stream',
etag: response.ETag || '',
lastModified: new Date(),
customMetadata: options.metadata,
}
} catch (error: any) {
throw DBALError.internal(`S3 stream upload failed: ${error.message}`)
}
}

View File

@@ -0,0 +1,12 @@
import type { DownloadOptions } from '../../blob-storage'
export function buildRangeHeader(options: DownloadOptions): string | undefined {
if (options.offset === undefined && options.length === undefined) {
return undefined
}
const offset = options.offset || 0
const end = options.length !== undefined ? offset + options.length - 1 : undefined
return end !== undefined ? `bytes=${offset}-${end}` : `bytes=${offset}-`
}

View File

@@ -1,260 +1,5 @@
/**
* Tenant-Aware Blob Storage
*
* Wraps BlobStorage with multi-tenant support including:
* - Namespace isolation
* - Access control
* - Quota management
* - Virtual root directories
*/
import { BlobStorage, BlobMetadata, UploadOptions, DownloadOptions, BlobListOptions, BlobListResult } from '../blob-storage'
import { TenantContext, TenantManager } from '../core/tenant-context'
import { DBALError } from '../../core/foundation/errors'
import { Readable } from 'stream'
export class TenantAwareBlobStorage implements BlobStorage {
constructor(
private readonly baseStorage: BlobStorage,
private readonly tenantManager: TenantManager,
private readonly tenantId: string,
private readonly userId: string
) {}
private async getContext(): Promise<TenantContext> {
return this.tenantManager.getTenantContext(this.tenantId, this.userId)
}
private getScopedKey(key: string, namespace: string): string {
// Remove leading slash if present
const cleanKey = key.startsWith('/') ? key.substring(1) : key
return `${namespace}${cleanKey}`
}
private unscopeKey(scopedKey: string, namespace: string): string {
if (scopedKey.startsWith(namespace)) {
return scopedKey.substring(namespace.length)
}
return scopedKey
}
async upload(key: string, data: Buffer, options?: UploadOptions): Promise<BlobMetadata> {
const context = await this.getContext()
// Check permissions
if (!context.canWrite('blob')) {
throw DBALError.forbidden('Permission denied: cannot upload blobs')
}
// Check quota
const size = data.length
if (!context.canUploadBlob(size)) {
throw DBALError.rateLimitExceeded()
}
const scopedKey = this.getScopedKey(key, context.namespace)
const metadata = await this.baseStorage.upload(scopedKey, data, options)
// Update quota
await this.tenantManager.updateBlobUsage(this.tenantId, size, 1)
// Return metadata with unscoped key
return {
...metadata,
key
}
}
async uploadStream(key: string, stream: Readable, size: number, options?: UploadOptions): Promise<BlobMetadata> {
const context = await this.getContext()
// Check permissions
if (!context.canWrite('blob')) {
throw DBALError.forbidden('Permission denied: cannot upload blobs')
}
// Check quota
if (!context.canUploadBlob(size)) {
throw DBALError.rateLimitExceeded()
}
const scopedKey = this.getScopedKey(key, context.namespace)
const metadata = await this.baseStorage.uploadStream(scopedKey, stream, size, options)
// Update quota
await this.tenantManager.updateBlobUsage(this.tenantId, size, 1)
// Return metadata with unscoped key
return {
...metadata,
key
}
}
async download(key: string): Promise<Buffer> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob')) {
throw DBALError.forbidden('Permission denied: cannot download blobs')
}
const scopedKey = this.getScopedKey(key, context.namespace)
return this.baseStorage.download(scopedKey)
}
async downloadStream(key: string, options?: DownloadOptions): Promise<ReadableStream | NodeJS.ReadableStream> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob')) {
throw DBALError.forbidden('Permission denied: cannot download blobs')
}
const scopedKey = this.getScopedKey(key, context.namespace)
return this.baseStorage.downloadStream(scopedKey, options)
}
async delete(key: string): Promise<boolean> {
const context = await this.getContext()
// Check permissions
if (!context.canDelete('blob')) {
throw DBALError.forbidden('Permission denied: cannot delete blobs')
}
const scopedKey = this.getScopedKey(key, context.namespace)
// Get metadata before deletion to update quota
try {
const metadata = await this.baseStorage.getMetadata(scopedKey)
const deleted = await this.baseStorage.delete(scopedKey)
if (deleted) {
// Update quota
await this.tenantManager.updateBlobUsage(this.tenantId, -metadata.size, -1)
}
return deleted
} catch (error) {
// If metadata fetch fails, try delete anyway
return this.baseStorage.delete(scopedKey)
}
}
async exists(key: string): Promise<boolean> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob')) {
throw DBALError.forbidden('Permission denied: cannot check blob existence')
}
const scopedKey = this.getScopedKey(key, context.namespace)
return this.baseStorage.exists(scopedKey)
}
async copy(sourceKey: string, destKey: string): Promise<BlobMetadata> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob') || !context.canWrite('blob')) {
throw DBALError.forbidden('Permission denied: cannot copy blobs')
}
// Get source metadata to check quota
const sourceScoped = this.getScopedKey(sourceKey, context.namespace)
const sourceMetadata = await this.baseStorage.getMetadata(sourceScoped)
// Check quota for destination
if (!context.canUploadBlob(sourceMetadata.size)) {
throw DBALError.rateLimitExceeded()
}
const destScoped = this.getScopedKey(destKey, context.namespace)
const metadata = await this.baseStorage.copy(sourceScoped, destScoped)
// Update quota
await this.tenantManager.updateBlobUsage(this.tenantId, sourceMetadata.size, 1)
return {
...metadata,
key: destKey
}
}
async list(options?: BlobListOptions): Promise<BlobListResult> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob')) {
throw DBALError.forbidden('Permission denied: cannot list blobs')
}
// Add namespace prefix to options
const scopedOptions: BlobListOptions = {
...options,
prefix: options?.prefix
? this.getScopedKey(options.prefix, context.namespace)
: context.namespace
}
const result = await this.baseStorage.list(scopedOptions)
// Unscope keys in results
return {
...result,
items: result.items.map(item => ({
...item,
key: this.unscopeKey(item.key, context.namespace)
}))
}
}
async getMetadata(key: string): Promise<BlobMetadata> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob')) {
throw DBALError.forbidden('Permission denied: cannot get blob metadata')
}
const scopedKey = this.getScopedKey(key, context.namespace)
const metadata = await this.baseStorage.getMetadata(scopedKey)
return {
...metadata,
key
}
}
async getStats(): Promise<{ count: number; totalSize: number }> {
const context = await this.getContext()
// Return tenant's current usage from quota
return {
count: context.quota.currentBlobCount,
totalSize: context.quota.currentBlobStorageBytes
}
}
async generatePresignedUrl(key: string, expiresIn: number): Promise<string> {
const context = await this.getContext()
// Check permissions
if (!context.canRead('blob')) {
throw DBALError.forbidden('Permission denied: cannot generate presigned URL')
}
const scopedKey = this.getScopedKey(key, context.namespace)
return this.baseStorage.generatePresignedUrl(scopedKey, expiresIn)
}
async getTotalSize(): Promise<number> {
return this.baseStorage.getTotalSize()
}
async getObjectCount(): Promise<number> {
return this.baseStorage.getObjectCount()
}
}
export { TenantAwareBlobStorage } from './tenant-aware-storage/index'
export type { TenantAwareDeps } from './tenant-aware-storage/context'
export { scopeKey, unscopeKey } from './tenant-aware-storage/context'
export { ensurePermission, resolveTenantContext } from './tenant-aware-storage/tenant-context'
export { auditCopy, auditDeletion, auditUpload } from './tenant-aware-storage/audit-hooks'

View File

@@ -0,0 +1,17 @@
import type { TenantAwareDeps } from './context'
const recordUsageChange = async (deps: TenantAwareDeps, bytesChange: number, countChange: number): Promise<void> => {
await deps.tenantManager.updateBlobUsage(deps.tenantId, bytesChange, countChange)
}
export const auditUpload = async (deps: TenantAwareDeps, sizeBytes: number): Promise<void> => {
await recordUsageChange(deps, sizeBytes, 1)
}
export const auditDeletion = async (deps: TenantAwareDeps, sizeBytes: number): Promise<void> => {
await recordUsageChange(deps, -sizeBytes, -1)
}
export const auditCopy = async (deps: TenantAwareDeps, sizeBytes: number): Promise<void> => {
await recordUsageChange(deps, sizeBytes, 1)
}

View File

@@ -0,0 +1,21 @@
import type { TenantManager } from '../../core/foundation/tenant-context'
import type { BlobStorage } from '../blob-storage'
export interface TenantAwareDeps {
baseStorage: BlobStorage
tenantManager: TenantManager
tenantId: string
userId: string
}
export const scopeKey = (key: string, namespace: string): string => {
const cleanKey = key.startsWith('/') ? key.substring(1) : key
return `${namespace}${cleanKey}`
}
export const unscopeKey = (scopedKey: string, namespace: string): string => {
if (scopedKey.startsWith(namespace)) {
return scopedKey.substring(namespace.length)
}
return scopedKey
}

View File

@@ -0,0 +1,66 @@
import type { BlobListOptions, BlobListResult, BlobMetadata, BlobStorage, DownloadOptions, UploadOptions } from '../blob-storage'
import type { TenantManager } from '../../core/foundation/tenant-context'
import type { TenantAwareDeps } from './context'
import { deleteBlob, exists, copyBlob, getStats } from './mutations'
import { downloadBuffer, downloadStream, generatePresignedUrl, getMetadata, listBlobs } from './reads'
import { uploadBuffer, uploadStream } from './uploads'
export class TenantAwareBlobStorage implements BlobStorage {
private readonly deps: TenantAwareDeps
constructor(baseStorage: BlobStorage, tenantManager: TenantManager, tenantId: string, userId: string) {
this.deps = { baseStorage, tenantManager, tenantId, userId }
}
async upload(key: string, data: Buffer, options?: UploadOptions): Promise<BlobMetadata> {
return uploadBuffer(this.deps, key, data, options)
}
async uploadStream(key: string, stream: NodeJS.ReadableStream, size: number, options?: UploadOptions): Promise<BlobMetadata> {
return uploadStream(this.deps, key, stream, size, options)
}
async download(key: string): Promise<Buffer> {
return downloadBuffer(this.deps, key)
}
async downloadStream(key: string, options?: DownloadOptions): Promise<ReadableStream | NodeJS.ReadableStream> {
return downloadStream(this.deps, key, options)
}
async delete(key: string): Promise<boolean> {
return deleteBlob(this.deps, key)
}
async exists(key: string): Promise<boolean> {
return exists(this.deps, key)
}
async copy(sourceKey: string, destKey: string): Promise<BlobMetadata> {
return copyBlob(this.deps, sourceKey, destKey)
}
async list(options?: BlobListOptions): Promise<BlobListResult> {
return listBlobs(this.deps, options)
}
async getMetadata(key: string): Promise<BlobMetadata> {
return getMetadata(this.deps, key)
}
async getStats(): Promise<{ count: number; totalSize: number }> {
return getStats(this.deps)
}
async generatePresignedUrl(key: string, expiresIn: number): Promise<string> {
return generatePresignedUrl(this.deps, key, expiresIn)
}
async getTotalSize(): Promise<number> {
return this.deps.baseStorage.getTotalSize()
}
async getObjectCount(): Promise<number> {
return this.deps.baseStorage.getObjectCount()
}
}

View File

@@ -0,0 +1,69 @@
import { DBALError } from '../../core/foundation/errors'
import type { BlobMetadata } from '../blob-storage'
import { auditCopy, auditDeletion } from './audit-hooks'
import type { TenantAwareDeps } from './context'
import { scopeKey } from './context'
import { ensurePermission, resolveTenantContext } from './tenant-context'
export const deleteBlob = async (deps: TenantAwareDeps, key: string): Promise<boolean> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'delete')
const scopedKey = scopeKey(key, context.namespace)
try {
const metadata = await deps.baseStorage.getMetadata(scopedKey)
const deleted = await deps.baseStorage.delete(scopedKey)
if (deleted) {
await auditDeletion(deps, metadata.size)
}
return deleted
} catch {
return deps.baseStorage.delete(scopedKey)
}
}
export const exists = async (deps: TenantAwareDeps, key: string): Promise<boolean> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
const scopedKey = scopeKey(key, context.namespace)
return deps.baseStorage.exists(scopedKey)
}
export const copyBlob = async (
deps: TenantAwareDeps,
sourceKey: string,
destKey: string,
): Promise<BlobMetadata> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
ensurePermission(context, 'write')
const sourceScoped = scopeKey(sourceKey, context.namespace)
const sourceMetadata = await deps.baseStorage.getMetadata(sourceScoped)
if (!context.canUploadBlob(sourceMetadata.size)) {
throw DBALError.rateLimitExceeded()
}
const destScoped = scopeKey(destKey, context.namespace)
const metadata = await deps.baseStorage.copy(sourceScoped, destScoped)
await auditCopy(deps, sourceMetadata.size)
return {
...metadata,
key: destKey,
}
}
export const getStats = async (deps: TenantAwareDeps) => {
const context = await resolveTenantContext(deps)
return {
count: context.quota.currentBlobCount,
totalSize: context.quota.currentBlobStorageBytes,
}
}

View File

@@ -0,0 +1,72 @@
import type { DownloadOptions, BlobMetadata, BlobListOptions, BlobListResult } from '../blob-storage'
import type { TenantAwareDeps } from './context'
import { scopeKey, unscopeKey } from './context'
import { ensurePermission, resolveTenantContext } from './tenant-context'
export const downloadBuffer = async (deps: TenantAwareDeps, key: string): Promise<Buffer> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
const scopedKey = scopeKey(key, context.namespace)
return deps.baseStorage.download(scopedKey)
}
export const downloadStream = async (
deps: TenantAwareDeps,
key: string,
options?: DownloadOptions,
): Promise<ReadableStream | NodeJS.ReadableStream> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
const scopedKey = scopeKey(key, context.namespace)
return deps.baseStorage.downloadStream(scopedKey, options)
}
export const listBlobs = async (
deps: TenantAwareDeps,
options: BlobListOptions = {},
): Promise<BlobListResult> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
const scopedOptions: BlobListOptions = {
...options,
prefix: options.prefix ? scopeKey(options.prefix, context.namespace) : context.namespace,
}
const result = await deps.baseStorage.list(scopedOptions)
return {
...result,
items: result.items.map(item => ({
...item,
key: unscopeKey(item.key, context.namespace),
})),
}
}
export const getMetadata = async (deps: TenantAwareDeps, key: string): Promise<BlobMetadata> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
const scopedKey = scopeKey(key, context.namespace)
const metadata = await deps.baseStorage.getMetadata(scopedKey)
return {
...metadata,
key,
}
}
export const generatePresignedUrl = async (
deps: TenantAwareDeps,
key: string,
expiresIn: number,
): Promise<string> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'read')
const scopedKey = scopeKey(key, context.namespace)
return deps.baseStorage.generatePresignedUrl(scopedKey, expiresIn)
}

View File

@@ -0,0 +1,21 @@
import { DBALError } from '../../core/foundation/errors'
import type { TenantContext } from '../../core/foundation/tenant-context'
import type { TenantAwareDeps } from './context'
export const resolveTenantContext = async ({ tenantManager, tenantId, userId }: TenantAwareDeps): Promise<TenantContext> => {
return tenantManager.getTenantContext(tenantId, userId)
}
export const ensurePermission = (context: TenantContext, action: 'read' | 'write' | 'delete'): void => {
const accessCheck =
action === 'read' ? context.canRead('blob') : action === 'write' ? context.canWrite('blob') : context.canDelete('blob')
if (!accessCheck) {
const verbs: Record<typeof action, string> = {
read: 'read',
write: 'write',
delete: 'delete',
}
throw DBALError.forbidden(`Permission denied: cannot ${verbs[action]} blobs`)
}
}

View File

@@ -0,0 +1,53 @@
import { DBALError } from '../../core/foundation/errors'
import { auditUpload } from './audit-hooks'
import type { TenantAwareDeps } from './context'
import { scopeKey } from './context'
import { ensurePermission, resolveTenantContext } from './tenant-context'
import type { UploadOptions, BlobMetadata } from '../blob-storage'
export const uploadBuffer = async (
deps: TenantAwareDeps,
key: string,
data: Buffer,
options?: UploadOptions,
): Promise<BlobMetadata> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'write')
if (!context.canUploadBlob(data.length)) {
throw DBALError.rateLimitExceeded()
}
const scopedKey = scopeKey(key, context.namespace)
const metadata = await deps.baseStorage.upload(scopedKey, data, options)
await auditUpload(deps, data.length)
return {
...metadata,
key,
}
}
export const uploadStream = async (
deps: TenantAwareDeps,
key: string,
stream: NodeJS.ReadableStream,
size: number,
options?: UploadOptions,
): Promise<BlobMetadata> => {
const context = await resolveTenantContext(deps)
ensurePermission(context, 'write')
if (!context.canUploadBlob(size)) {
throw DBALError.rateLimitExceeded()
}
const scopedKey = scopeKey(key, context.namespace)
const metadata = await deps.baseStorage.uploadStream(scopedKey, stream, size, options)
await auditUpload(deps, size)
return {
...metadata,
key,
}
}

View File

@@ -1,168 +1 @@
/**
* @file websocket-bridge.ts
* @description WebSocket bridge adapter for remote DBAL daemon
*/
import type { DBALAdapter, AdapterCapabilities } from '../adapters/adapter'
import type { ListOptions, ListResult } from '../core/types'
import { DBALError } from '../core/foundation/errors'
import { generateRequestId } from './utils/generate-request-id'
import type { RPCMessage, RPCResponse, PendingRequest } from './utils/rpc-types'
export class WebSocketBridge implements DBALAdapter {
private ws: WebSocket | null = null
private endpoint: string
private auth?: { user: unknown, session: unknown }
private pendingRequests = new Map<string, PendingRequest>()
constructor(endpoint: string, auth?: { user: unknown, session: unknown }) {
this.endpoint = endpoint
this.auth = auth
}
private async connect(): Promise<void> {
if (this.ws?.readyState === WebSocket.OPEN) {
return
}
return new Promise((resolve, reject) => {
this.ws = new WebSocket(this.endpoint)
this.ws.onopen = () => {
resolve()
}
this.ws.onerror = (error) => {
reject(DBALError.internal(`WebSocket connection failed: ${error}`))
}
this.ws.onmessage = (event) => {
this.handleMessage(event.data)
}
this.ws.onclose = () => {
this.ws = null
}
})
}
private handleMessage(data: string): void {
try {
const response: RPCResponse = JSON.parse(data)
const pending = this.pendingRequests.get(response.id)
if (!pending) {
return
}
this.pendingRequests.delete(response.id)
if (response.error) {
const error = new DBALError(
response.error.message,
response.error.code,
response.error.details
)
pending.reject(error)
} else {
pending.resolve(response.result)
}
} catch (error) {
console.error('Failed to parse WebSocket message:', error)
}
}
private async call(method: string, ...params: unknown[]): Promise<unknown> {
await this.connect()
const id = generateRequestId()
const message: RPCMessage = { id, method, params }
return new Promise((resolve, reject) => {
this.pendingRequests.set(id, { resolve, reject })
if (this.ws?.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify(message))
} else {
this.pendingRequests.delete(id)
reject(DBALError.internal('WebSocket connection not open'))
}
setTimeout(() => {
if (this.pendingRequests.has(id)) {
this.pendingRequests.delete(id)
reject(DBALError.timeout('Request timed out'))
}
}, 30000)
})
}
async create(entity: string, data: Record<string, unknown>): Promise<unknown> {
return this.call('create', entity, data)
}
async read(entity: string, id: string): Promise<unknown | null> {
return this.call('read', entity, id)
}
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
return this.call('update', entity, id, data)
}
async delete(entity: string, id: string): Promise<boolean> {
return this.call('delete', entity, id) as Promise<boolean>
}
async list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
return this.call('list', entity, options) as Promise<ListResult<unknown>>
}
async findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
return this.call('findFirst', entity, filter)
}
async findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
return this.call('findByField', entity, field, value)
}
async upsert(
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>
): Promise<unknown> {
return this.call('upsert', entity, filter, createData, updateData)
}
async updateByField(entity: string, field: string, value: unknown, data: Record<string, unknown>): Promise<unknown> {
return this.call('updateByField', entity, field, value, data)
}
async deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
return this.call('deleteByField', entity, field, value) as Promise<boolean>
}
async deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
return this.call('deleteMany', entity, filter) as Promise<number>
}
async createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
return this.call('createMany', entity, data) as Promise<number>
}
async updateMany(entity: string, filter: Record<string, unknown>, data: Record<string, unknown>): Promise<number> {
return this.call('updateMany', entity, filter, data) as Promise<number>
}
async getCapabilities(): Promise<AdapterCapabilities> {
return this.call('getCapabilities') as Promise<AdapterCapabilities>
}
async close(): Promise<void> {
if (this.ws) {
this.ws.close()
this.ws = null
}
this.pendingRequests.clear()
}
}
export { WebSocketBridge } from './websocket-bridge/index'

View File

@@ -0,0 +1,90 @@
import { DBALError } from '../../core/foundation/errors'
import type { RPCMessage } from '../utils/rpc-types'
import type { BridgeState } from './state'
import type { MessageRouter } from './message-router'
export interface ConnectionManager {
ensureConnection: () => Promise<void>
send: (message: RPCMessage) => Promise<void>
close: () => Promise<void>
}
export const createConnectionManager = (
state: BridgeState,
messageRouter: MessageRouter,
): ConnectionManager => {
let connectionPromise: Promise<void> | null = null
const resetConnection = () => {
connectionPromise = null
state.ws = null
}
const rejectPendingRequests = (error: DBALError) => {
state.pendingRequests.forEach(({ reject }) => reject(error))
state.pendingRequests.clear()
}
const ensureConnection = async (): Promise<void> => {
if (state.ws?.readyState === WebSocket.OPEN) {
return
}
if (connectionPromise) {
return connectionPromise
}
connectionPromise = new Promise((resolve, reject) => {
try {
const ws = new WebSocket(state.endpoint)
state.ws = ws
ws.onopen = () => resolve()
ws.onerror = error => {
const connectionError = DBALError.internal(`WebSocket connection failed: ${error}`)
rejectPendingRequests(connectionError)
resetConnection()
reject(connectionError)
}
ws.onclose = () => {
rejectPendingRequests(DBALError.internal('WebSocket connection closed'))
resetConnection()
}
ws.onmessage = event => messageRouter.handle(event.data)
} catch (error) {
resetConnection()
const connectionError =
error instanceof DBALError ? error : DBALError.internal('Failed to establish WebSocket connection')
reject(connectionError)
}
})
return connectionPromise
}
const send = async (message: RPCMessage): Promise<void> => {
await ensureConnection()
if (!state.ws || state.ws.readyState !== WebSocket.OPEN) {
throw DBALError.internal('WebSocket connection not open')
}
state.ws.send(JSON.stringify(message))
}
const close = async (): Promise<void> => {
rejectPendingRequests(DBALError.internal('WebSocket connection closed'))
if (state.ws) {
state.ws.close()
}
resetConnection()
}
return {
ensureConnection,
send,
close,
}
}

View File

@@ -0,0 +1,84 @@
import type { DBALAdapter, AdapterCapabilities } from '../../adapters/adapter'
import type { ListOptions, ListResult } from '../../core/types'
import { createConnectionManager } from './connection-manager'
import { createMessageRouter } from './message-router'
import { createOperations } from './operations'
import { createBridgeState } from './state'
export class WebSocketBridge implements DBALAdapter {
private readonly state: ReturnType<typeof createBridgeState>
private readonly connectionManager: ReturnType<typeof createConnectionManager>
private readonly operations: ReturnType<typeof createOperations>
constructor(endpoint: string, auth?: { user: unknown; session: unknown }) {
this.state = createBridgeState(endpoint, auth)
const messageRouter = createMessageRouter(this.state)
this.connectionManager = createConnectionManager(this.state, messageRouter)
this.operations = createOperations(this.state, this.connectionManager)
}
create(entity: string, data: Record<string, unknown>): Promise<unknown> {
return this.operations.create(entity, data)
}
read(entity: string, id: string): Promise<unknown | null> {
return this.operations.read(entity, id) as Promise<unknown | null>
}
update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
return this.operations.update(entity, id, data)
}
delete(entity: string, id: string): Promise<boolean> {
return this.operations.delete(entity, id)
}
list(entity: string, options?: ListOptions): Promise<ListResult<unknown>> {
return this.operations.list(entity, options)
}
findFirst(entity: string, filter?: Record<string, unknown>): Promise<unknown | null> {
return this.operations.findFirst(entity, filter) as Promise<unknown | null>
}
findByField(entity: string, field: string, value: unknown): Promise<unknown | null> {
return this.operations.findByField(entity, field, value) as Promise<unknown | null>
}
upsert(
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>,
): Promise<unknown> {
return this.operations.upsert(entity, filter, createData, updateData)
}
updateByField(entity: string, field: string, value: unknown, data: Record<string, unknown>): Promise<unknown> {
return this.operations.updateByField(entity, field, value, data)
}
deleteByField(entity: string, field: string, value: unknown): Promise<boolean> {
return this.operations.deleteByField(entity, field, value)
}
deleteMany(entity: string, filter?: Record<string, unknown>): Promise<number> {
return this.operations.deleteMany(entity, filter)
}
createMany(entity: string, data: Record<string, unknown>[]): Promise<number> {
return this.operations.createMany(entity, data)
}
updateMany(entity: string, filter: Record<string, unknown>, data: Record<string, unknown>): Promise<number> {
return this.operations.updateMany(entity, filter, data)
}
getCapabilities(): Promise<AdapterCapabilities> {
return this.operations.getCapabilities()
}
async close(): Promise<void> {
await this.connectionManager.close()
}
}

View File

@@ -0,0 +1,68 @@
import { DBALError } from '../../core/foundation/errors'
import type { RPCResponse } from '../utils/rpc-types'
import type { BridgeState } from './state'
export interface MessageRouter {
handle: (rawMessage: unknown) => void
}
const isRecord = (value: unknown): value is Record<string, unknown> =>
typeof value === 'object' && value !== null && !Array.isArray(value)
const isRPCError = (value: unknown): value is NonNullable<RPCResponse['error']> =>
isRecord(value) &&
typeof value.code === 'number' &&
typeof value.message === 'string' &&
(value.details === undefined || isRecord(value.details))
const isRPCResponse = (value: unknown): value is RPCResponse => {
if (!isRecord(value)) {
return false
}
const hasId = typeof value.id === 'string'
const hasResult = Object.prototype.hasOwnProperty.call(value, 'result')
const hasError = isRPCError(value.error) || value.error === undefined
return hasId && (hasResult || isRPCError(value.error)) && hasError
}
const parseResponse = (rawMessage: string): RPCResponse => {
const parsed = JSON.parse(rawMessage) as unknown
if (!isRPCResponse(parsed)) {
throw new Error('Invalid RPC response shape')
}
return parsed
}
export const createMessageRouter = (state: BridgeState): MessageRouter => ({
handle: (rawMessage: unknown) => {
if (typeof rawMessage !== 'string') {
console.warn('Ignoring non-string WebSocket message')
return
}
try {
const response = parseResponse(rawMessage)
const pending = state.pendingRequests.get(response.id)
if (!pending) {
console.warn(`No pending request for response ${response.id}`)
return
}
state.pendingRequests.delete(response.id)
if (response.error) {
const error = new DBALError(response.error.message, response.error.code, response.error.details)
pending.reject(error)
} else {
pending.resolve(response.result)
}
} catch (error) {
console.error('Failed to process WebSocket message', error)
}
},
})

View File

@@ -0,0 +1,36 @@
import type { AdapterCapabilities } from '../../adapters/adapter'
import type { ListOptions, ListResult } from '../../core/types'
import type { ConnectionManager } from './connection-manager'
import type { BridgeState } from './state'
import { rpcCall } from './rpc'
export const createOperations = (state: BridgeState, connectionManager: ConnectionManager) => ({
create: (entity: string, data: Record<string, unknown>) => rpcCall(state, connectionManager, 'create', entity, data),
read: (entity: string, id: string) => rpcCall(state, connectionManager, 'read', entity, id),
update: (entity: string, id: string, data: Record<string, unknown>) =>
rpcCall(state, connectionManager, 'update', entity, id, data),
delete: (entity: string, id: string) => rpcCall(state, connectionManager, 'delete', entity, id) as Promise<boolean>,
list: (entity: string, options?: ListOptions) =>
rpcCall(state, connectionManager, 'list', entity, options) as Promise<ListResult<unknown>>,
findFirst: (entity: string, filter?: Record<string, unknown>) =>
rpcCall(state, connectionManager, 'findFirst', entity, filter),
findByField: (entity: string, field: string, value: unknown) =>
rpcCall(state, connectionManager, 'findByField', entity, field, value),
upsert: (
entity: string,
filter: Record<string, unknown>,
createData: Record<string, unknown>,
updateData: Record<string, unknown>,
) => rpcCall(state, connectionManager, 'upsert', entity, filter, createData, updateData),
updateByField: (entity: string, field: string, value: unknown, data: Record<string, unknown>) =>
rpcCall(state, connectionManager, 'updateByField', entity, field, value, data),
deleteByField: (entity: string, field: string, value: unknown) =>
rpcCall(state, connectionManager, 'deleteByField', entity, field, value) as Promise<boolean>,
deleteMany: (entity: string, filter?: Record<string, unknown>) =>
rpcCall(state, connectionManager, 'deleteMany', entity, filter) as Promise<number>,
createMany: (entity: string, data: Record<string, unknown>[]) =>
rpcCall(state, connectionManager, 'createMany', entity, data) as Promise<number>,
updateMany: (entity: string, filter: Record<string, unknown>, data: Record<string, unknown>) =>
rpcCall(state, connectionManager, 'updateMany', entity, filter, data) as Promise<number>,
getCapabilities: () => rpcCall(state, connectionManager, 'getCapabilities') as Promise<AdapterCapabilities>,
})

View File

@@ -0,0 +1,34 @@
import { DBALError } from '../../core/foundation/errors'
import { generateRequestId } from '../utils/generate-request-id'
import type { RPCMessage } from '../utils/rpc-types'
import type { ConnectionManager } from './connection-manager'
import type { BridgeState } from './state'
export const rpcCall = async (
state: BridgeState,
connectionManager: ConnectionManager,
method: string,
...params: unknown[]
): Promise<unknown> => {
const id = generateRequestId()
const message: RPCMessage = { id, method, params }
return new Promise((resolve, reject) => {
state.pendingRequests.set(id, { resolve, reject })
connectionManager
.send(message)
.catch(error => {
state.pendingRequests.delete(id)
reject(error)
return
})
setTimeout(() => {
if (state.pendingRequests.has(id)) {
state.pendingRequests.delete(id)
reject(DBALError.timeout('Request timed out'))
}
}, 30000)
})
}

View File

@@ -0,0 +1,18 @@
import type { PendingRequest } from '../utils/rpc-types'
export interface BridgeState {
ws: WebSocket | null
endpoint: string
auth?: { user: unknown; session: unknown }
pendingRequests: Map<string, PendingRequest>
}
export const createBridgeState = (
endpoint: string,
auth?: { user: unknown; session: unknown },
): BridgeState => ({
ws: null,
endpoint,
auth,
pendingRequests: new Map<string, PendingRequest>(),
})

View File

@@ -0,0 +1,8 @@
import type { DBALConfig } from '../runtime/config'
import { DBALClient } from './client/client'
export { buildAdapter, buildEntityOperations } from './client/builders'
export { normalizeClientConfig, validateClientConfig } from './client/mappers'
export const createDBALClient = (config: DBALConfig) => new DBALClient(config)
export { DBALClient }

View File

@@ -6,7 +6,7 @@
import type { DBALConfig } from '../../runtime/config'
import type { DBALAdapter } from '../../adapters/adapter'
import { DBALError } from '../foundation/errors'
import { PrismaAdapter, PostgresAdapter, MySQLAdapter } from '../../adapters/prisma-adapter'
import { PrismaAdapter, PostgresAdapter, MySQLAdapter } from '../../adapters/prisma'
import { ACLAdapter } from '../../adapters/acl-adapter'
import { WebSocketBridge } from '../../bridges/websocket-bridge'

View File

@@ -0,0 +1,24 @@
import type { DBALAdapter } from '../../adapters/adapter'
import type { DBALConfig } from '../../runtime/config'
import { createAdapter } from './adapter-factory'
import {
createComponentOperations,
createLuaScriptOperations,
createPackageOperations,
createPageOperations,
createSessionOperations,
createUserOperations,
createWorkflowOperations
} from '../entities'
export const buildAdapter = (config: DBALConfig): DBALAdapter => createAdapter(config)
export const buildEntityOperations = (adapter: DBALAdapter) => ({
users: createUserOperations(adapter),
pages: createPageOperations(adapter),
components: createComponentOperations(adapter),
workflows: createWorkflowOperations(adapter),
luaScripts: createLuaScriptOperations(adapter),
packages: createPackageOperations(adapter),
sessions: createSessionOperations(adapter)
})

View File

@@ -1,7 +1,7 @@
/**
* @file client.ts
* @description DBAL Client - Main interface for database operations
*
*
* Provides CRUD operations for all entities through modular operation handlers.
* Each entity type has its own dedicated operations module following the
* single-responsibility pattern.
@@ -9,82 +9,67 @@
import type { DBALConfig } from '../../runtime/config'
import type { DBALAdapter } from '../../adapters/adapter'
import { createAdapter } from './adapter-factory'
import {
createUserOperations,
createPageOperations,
createComponentOperations,
createWorkflowOperations,
createLuaScriptOperations,
createPackageOperations,
createSessionOperations,
} from '../entities'
import { buildAdapter, buildEntityOperations } from './builders'
import { normalizeClientConfig, validateClientConfig } from './mappers'
export class DBALClient {
private adapter: DBALAdapter
private config: DBALConfig
private operations: ReturnType<typeof buildEntityOperations>
constructor(config: DBALConfig) {
this.config = config
// Validate configuration
if (!config.adapter) {
throw new Error('Adapter type must be specified')
}
if (config.mode !== 'production' && !config.database?.url) {
throw new Error('Database URL must be specified for non-production mode')
}
this.adapter = createAdapter(config)
this.config = normalizeClientConfig(validateClientConfig(config))
this.adapter = buildAdapter(this.config)
this.operations = buildEntityOperations(this.adapter)
}
/**
* User entity operations
*/
get users() {
return createUserOperations(this.adapter)
return this.operations.users
}
/**
* Page entity operations
*/
get pages() {
return createPageOperations(this.adapter)
return this.operations.pages
}
/**
* Component hierarchy entity operations
*/
get components() {
return createComponentOperations(this.adapter)
return this.operations.components
}
/**
* Workflow entity operations
*/
get workflows() {
return createWorkflowOperations(this.adapter)
return this.operations.workflows
}
/**
* Lua script entity operations
*/
get luaScripts() {
return createLuaScriptOperations(this.adapter)
return this.operations.luaScripts
}
/**
* Package entity operations
*/
get packages() {
return createPackageOperations(this.adapter)
return this.operations.packages
}
/**
* Session entity operations
*/
get sessions() {
return createSessionOperations(this.adapter)
return this.operations.sessions
}
/**

View File

@@ -0,0 +1,25 @@
import type { DBALConfig } from '../../runtime/config'
import { DBALError } from '../foundation/errors'
export const validateClientConfig = (config: DBALConfig): DBALConfig => {
if (!config.adapter) {
throw DBALError.validationError('Adapter type must be specified', [])
}
if (config.mode !== 'production' && !config.database?.url) {
throw DBALError.validationError('Database URL must be specified for non-production mode', [])
}
return config
}
export const normalizeClientConfig = (config: DBALConfig): DBALConfig => ({
...config,
security: {
sandbox: config.security?.sandbox ?? 'strict',
enableAuditLog: config.security?.enableAuditLog ?? true
},
performance: {
...config.performance
}
})

View File

@@ -12,13 +12,13 @@ export * as luaScript from './lua-script';
export * as pkg from './package';
// Legacy factory exports (for backward compatibility)
export { createUserOperations } from './user-operations';
export { createPageOperations } from './page-operations';
export { createComponentOperations } from './component-operations';
export { createWorkflowOperations } from './workflow-operations';
export { createLuaScriptOperations } from './lua-script-operations';
export { createPackageOperations } from './package-operations';
export { createSessionOperations } from './session-operations';
export { createUserOperations } from './operations/core/user-operations';
export { createPageOperations } from './operations/system/page-operations';
export { createComponentOperations } from './operations/system/component-operations';
export { createWorkflowOperations } from './operations/core/workflow-operations';
export { createLuaScriptOperations } from './operations/core/lua-script-operations';
export { createPackageOperations } from './operations/system/package-operations';
export { createSessionOperations } from './operations/core/session-operations';
// Validation utilities
export * from '../validation';

View File

@@ -1,185 +1,11 @@
/**
* @file user-operations.ts
* @description User entity CRUD operations for DBAL client
*
* Single-responsibility module following the small-function-file pattern.
*/
export { createUserOperations } from './user'
export type { UserOperations } from './user'
import type { DBALAdapter } from '../../adapters/adapter'
import type { User, ListOptions, ListResult } from '../types'
import { DBALError } from '../errors'
import { validateUserCreate, validateUserUpdate, validateId } from '../validation'
/**
* Create user operations object for the DBAL client
*/
export const createUserOperations = (adapter: DBALAdapter) => ({
/**
* Create a new user
*/
create: async (data: Omit<User, 'id' | 'createdAt' | 'updatedAt'>): Promise<User> => {
const validationErrors = validateUserCreate(data)
if (validationErrors.length > 0) {
throw DBALError.validationError(
'Invalid user data',
validationErrors.map(error => ({ field: 'user', error }))
)
}
try {
return adapter.create('User', data) as Promise<User>
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict(`User with username or email already exists`)
}
throw error
}
},
/**
* Read a user by ID
*/
read: async (id: string): Promise<User | null> => {
const validationErrors = validateId(id)
if (validationErrors.length > 0) {
throw DBALError.validationError(
'Invalid user ID',
validationErrors.map(error => ({ field: 'id', error }))
)
}
const result = await adapter.read('User', id) as User | null
if (!result) {
throw DBALError.notFound(`User not found: ${id}`)
}
return result
},
/**
* Update an existing user
*/
update: async (id: string, data: Partial<User>): Promise<User> => {
const idErrors = validateId(id)
if (idErrors.length > 0) {
throw DBALError.validationError(
'Invalid user ID',
idErrors.map(error => ({ field: 'id', error }))
)
}
const validationErrors = validateUserUpdate(data)
if (validationErrors.length > 0) {
throw DBALError.validationError(
'Invalid user update data',
validationErrors.map(error => ({ field: 'user', error }))
)
}
try {
return adapter.update('User', id, data) as Promise<User>
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict(`Username or email already exists`)
}
throw error
}
},
/**
* Delete a user by ID
*/
delete: async (id: string): Promise<boolean> => {
const validationErrors = validateId(id)
if (validationErrors.length > 0) {
throw DBALError.validationError(
'Invalid user ID',
validationErrors.map(error => ({ field: 'id', error }))
)
}
const result = await adapter.delete('User', id)
if (!result) {
throw DBALError.notFound(`User not found: ${id}`)
}
return result
},
/**
* List users with filtering and pagination
*/
list: async (options?: ListOptions): Promise<ListResult<User>> => {
return adapter.list('User', options) as Promise<ListResult<User>>
},
/**
* Batch create multiple users
*/
createMany: async (data: Array<Omit<User, 'id' | 'createdAt' | 'updatedAt'>>): Promise<number> => {
if (!data || data.length === 0) {
return 0
}
const validationErrors = data.flatMap((item, index) =>
validateUserCreate(item).map(error => ({ field: `users[${index}]`, error }))
)
if (validationErrors.length > 0) {
throw DBALError.validationError('Invalid user batch', validationErrors)
}
try {
return adapter.createMany('User', data as Record<string, unknown>[])
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict('Username or email already exists')
}
throw error
}
},
/**
* Bulk update users matching a filter
*/
updateMany: async (filter: Record<string, unknown>, data: Partial<User>): Promise<number> => {
if (!filter || Object.keys(filter).length === 0) {
throw DBALError.validationError('Bulk update requires a filter', [
{ field: 'filter', error: 'Filter is required' },
])
}
if (!data || Object.keys(data).length === 0) {
throw DBALError.validationError('Bulk update requires data', [
{ field: 'data', error: 'Update data is required' },
])
}
const validationErrors = validateUserUpdate(data)
if (validationErrors.length > 0) {
throw DBALError.validationError(
'Invalid user update data',
validationErrors.map(error => ({ field: 'user', error }))
)
}
try {
return adapter.updateMany('User', filter, data as Record<string, unknown>)
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict('Username or email already exists')
}
throw error
}
},
/**
* Bulk delete users matching a filter
*/
deleteMany: async (filter: Record<string, unknown>): Promise<number> => {
if (!filter || Object.keys(filter).length === 0) {
throw DBALError.validationError('Bulk delete requires a filter', [
{ field: 'filter', error: 'Filter is required' },
])
}
return adapter.deleteMany('User', filter)
},
})
export { createUser } from './user/create'
export { deleteUser } from './user/delete'
export { updateUser } from './user/update'
export {
assertValidUserCreate,
assertValidUserId,
assertValidUserUpdate,
} from './user/validation'

View File

@@ -0,0 +1,71 @@
import type { DBALAdapter } from '../../../../adapters/adapter'
import type { User } from '../../../../foundation/types'
import { DBALError } from '../../../../foundation/errors'
import { validateUserCreate, validateUserUpdate } from '../../../../foundation/validation'
export const createManyUsers = async (
adapter: DBALAdapter,
data: Array<Omit<User, 'id' | 'createdAt' | 'updatedAt'>>,
): Promise<number> => {
if (!data || data.length === 0) {
return 0
}
const validationErrors = data.flatMap((item, index) =>
validateUserCreate(item).map(error => ({ field: `users[${index}]`, error })),
)
if (validationErrors.length > 0) {
throw DBALError.validationError('Invalid user batch', validationErrors)
}
try {
return adapter.createMany('User', data as Record<string, unknown>[])
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict('Username or email already exists')
}
throw error
}
}
export const updateManyUsers = async (
adapter: DBALAdapter,
filter: Record<string, unknown>,
data: Partial<User>,
): Promise<number> => {
if (!filter || Object.keys(filter).length === 0) {
throw DBALError.validationError('Bulk update requires a filter', [
{ field: 'filter', error: 'Filter is required' },
])
}
if (!data || Object.keys(data).length === 0) {
throw DBALError.validationError('Bulk update requires data', [
{ field: 'data', error: 'Update data is required' },
])
}
const validationErrors = validateUserUpdate(data)
if (validationErrors.length > 0) {
throw DBALError.validationError('Invalid user update data', validationErrors.map(error => ({ field: 'user', error })))
}
try {
return adapter.updateMany('User', filter, data as Record<string, unknown>)
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict('Username or email already exists')
}
throw error
}
}
export const deleteManyUsers = async (adapter: DBALAdapter, filter: Record<string, unknown>): Promise<number> => {
if (!filter || Object.keys(filter).length === 0) {
throw DBALError.validationError('Bulk delete requires a filter', [
{ field: 'filter', error: 'Filter is required' },
])
}
return adapter.deleteMany('User', filter)
}

View File

@@ -0,0 +1,20 @@
import type { DBALAdapter } from '../../../../adapters/adapter'
import { DBALError } from '../../../../foundation/errors'
import type { User } from '../../../../foundation/types'
import { assertValidUserCreate } from './validation'
export const createUser = async (
adapter: DBALAdapter,
data: Omit<User, 'id' | 'createdAt' | 'updatedAt'>,
): Promise<User> => {
assertValidUserCreate(data)
try {
return adapter.create('User', data) as Promise<User>
} catch (error) {
if (error instanceof DBALError && error.code === 409) {
throw DBALError.conflict('User with username or email already exists')
}
throw error
}
}

View File

@@ -0,0 +1,13 @@
import type { DBALAdapter } from '../../../../adapters/adapter'
import { DBALError } from '../../../../foundation/errors'
import { assertValidUserId } from './validation'
export const deleteUser = async (adapter: DBALAdapter, id: string): Promise<boolean> => {
assertValidUserId(id)
const result = await adapter.delete('User', id)
if (!result) {
throw DBALError.notFound(`User not found: ${id}`)
}
return result
}

View File

@@ -0,0 +1,29 @@
import type { DBALAdapter } from '../../../../adapters/adapter'
import type { User, ListOptions, ListResult } from '../../../../foundation/types'
import { createUser } from './create'
import { deleteUser } from './delete'
import { updateUser } from './update'
import { createManyUsers, deleteManyUsers, updateManyUsers } from './batch'
import { listUsers, readUser } from './reads'
export interface UserOperations {
create: (data: Omit<User, 'id' | 'createdAt' | 'updatedAt'>) => Promise<User>
read: (id: string) => Promise<User | null>
update: (id: string, data: Partial<User>) => Promise<User>
delete: (id: string) => Promise<boolean>
list: (options?: ListOptions) => Promise<ListResult<User>>
createMany: (data: Array<Omit<User, 'id' | 'createdAt' | 'updatedAt'>>) => Promise<number>
updateMany: (filter: Record<string, unknown>, data: Partial<User>) => Promise<number>
deleteMany: (filter: Record<string, unknown>) => Promise<number>
}
export const createUserOperations = (adapter: DBALAdapter): UserOperations => ({
create: data => createUser(adapter, data),
read: id => readUser(adapter, id),
update: (id, data) => updateUser(adapter, id, data),
delete: id => deleteUser(adapter, id),
list: options => listUsers(adapter, options),
createMany: data => createManyUsers(adapter, data),
updateMany: (filter, data) => updateManyUsers(adapter, filter, data),
deleteMany: filter => deleteManyUsers(adapter, filter),
})

View File

@@ -0,0 +1,21 @@
import type { DBALAdapter } from '../../../../adapters/adapter'
import type { User, ListOptions, ListResult } from '../../../../foundation/types'
import { DBALError } from '../../../../foundation/errors'
import { validateId } from '../../../../foundation/validation'
export const readUser = async (adapter: DBALAdapter, id: string): Promise<User | null> => {
const validationErrors = validateId(id)
if (validationErrors.length > 0) {
throw DBALError.validationError('Invalid user ID', validationErrors.map(error => ({ field: 'id', error })))
}
const result = await adapter.read('User', id) as User | null
if (!result) {
throw DBALError.notFound(`User not found: ${id}`)
}
return result
}
export const listUsers = (adapter: DBALAdapter, options?: ListOptions): Promise<ListResult<User>> => {
return adapter.list('User', options) as Promise<ListResult<User>>
}

Some files were not shown because too many files have changed in this diff Show More