Compare commits

..

1 Commits

Author SHA1 Message Date
73a4aa6edc Merge pull request #1512 from johndoe6345789/main
Merge pull request #1510 from johndoe6345789/dependabot/npm_and_yarn/electric-sql/pglite-socket-0.0.22

chore(deps-dev): bump @electric-sql/pglite-socket from 0.0.21 to 0.0.22
2026-03-12 18:08:22 +00:00
1416 changed files with 139430 additions and 52475 deletions

6
.actrc
View File

@@ -1,6 +0,0 @@
--artifact-server-path /tmp/act-artifacts
--secret-file .secrets
--env-file .act-env
--container-architecture linux/arm64
-W .github/workflows/gated-pipeline.yml
--concurrent-jobs 1

View File

@@ -3,10 +3,7 @@
"allow": [
"Bash(echo No metabuilder images found:*)",
"Bash(docker compose:*)",
"Bash(git pull:*)",
"Skill(commit-commands:commit-push-pr)",
"Bash(find /Users/rmac/Documents/GitHub/metabuilder/frontends/qt6 -type f -name *.md -o -name CLAUDE.md -o -name README.md -o -name roadmap*)",
"Bash(docker-compose version:*)"
"Bash(git pull:*)"
]
}
}

View File

@@ -32,7 +32,7 @@ jobs:
run: conan profile detect --force
- name: Cache Conan packages
uses: actions/cache@v4
uses: actions/cache@v6
with:
path: ~/.conan2/p
key: conan-unit-${{ hashFiles('dbal/production/build-config/conanfile.tests.py') }}
@@ -67,7 +67,7 @@ jobs:
run: ctest -R dbal_unit_tests --output-on-failure
- name: Upload results
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
if: always()
with:
name: unit-test-results
@@ -124,7 +124,7 @@ jobs:
-s build_type=Release -s compiler.cppstd=20
- name: Cache Conan packages
uses: actions/cache@v4
uses: actions/cache@v6
with:
path: ~/.conan2/p
key: conan-integration-${{ hashFiles('dbal/production/build-config/conanfile.tests.py') }}
@@ -162,7 +162,7 @@ jobs:
run: ctest -R dbal_integration_tests --output-on-failure -V
- name: Upload results
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
if: always()
with:
name: integration-test-results

View File

@@ -28,11 +28,6 @@ on:
required: false
type: boolean
default: false
skip_containers:
description: 'Skip container builds (use existing GHCR images)'
required: false
type: boolean
default: false
run_codeql:
description: 'Run CodeQL semantic analysis'
required: false
@@ -294,7 +289,7 @@ jobs:
echo "$(date -Iseconds)" > gate-artifacts/gate-1/start-time.txt
- name: Upload gate start marker
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-start
path: gate-artifacts/gate-1/
@@ -389,7 +384,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-schema-result
path: gate-artifacts/gate-1/
@@ -423,7 +418,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-typecheck-result
path: gate-artifacts/gate-1/
@@ -449,15 +444,16 @@ jobs:
run: |
set -o pipefail
cd frontends/nextjs
npx eslint . 2>&1 | tee /tmp/lint-out.txt
# Count errors only (warnings are tolerated, errors are not)
npx eslint . 2>&1 | tee /tmp/lint-out.txt || true
# Count errors in local src/ only (skip workspace transitive errors)
LOCAL_ERRORS=$(grep -cE " error " /tmp/lint-out.txt 2>/dev/null || echo "0")
echo "Total lint errors: $LOCAL_ERRORS"
if [ "$LOCAL_ERRORS" -gt 0 ]; then
echo "::error::Lint errors found ($LOCAL_ERRORS errors, threshold is 0)"
echo "Total lint issues: $LOCAL_ERRORS"
# Allow up to 1500 issues (pre-existing workspace type-safety warnings)
if [ "$LOCAL_ERRORS" -gt 1500 ]; then
echo "::error::Too many lint errors ($LOCAL_ERRORS > 1500 threshold)"
exit 1
fi
echo "Lint: passed with 0 errors"
echo "Lint: passed with $LOCAL_ERRORS issues (within threshold)"
- name: Record validation result
if: always()
@@ -468,7 +464,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-lint-result
path: gate-artifacts/gate-1/
@@ -503,7 +499,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-security-result
path: gate-artifacts/gate-1/
@@ -538,7 +534,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-filesize-result
path: gate-artifacts/gate-1/
@@ -575,7 +571,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-complexity-result
path: gate-artifacts/gate-1/
@@ -613,7 +609,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-stub-result
path: gate-artifacts/gate-1/
@@ -624,7 +620,7 @@ jobs:
needs: [schema-check, typecheck, lint, security-scan, file-size-check, code-complexity-check, stub-detection]
steps:
- name: Download all gate 1 artifacts
uses: actions/download-artifact@v8
uses: actions/download-artifact@v6
with:
pattern: gate-1-*
path: gate-artifacts/
@@ -652,7 +648,7 @@ jobs:
ls -la gate-artifacts/gate-1/
- name: Upload consolidated gate 1 report
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-1-complete-report
path: gate-artifacts/
@@ -661,48 +657,10 @@ jobs:
# GATE 2: Testing Gates (runs after container images are published to GHCR)
# ============================================================================
# Detect which test suites need to run based on changed paths
check-app-changes:
name: "Check: App source changes"
runs-on: ubuntu-latest
needs: gate-1-complete
if: github.event_name != 'issues' && github.event_name != 'issue_comment'
outputs:
e2e_changed: ${{ steps.diff.outputs.e2e_changed }}
unit_changed: ${{ steps.diff.outputs.unit_changed }}
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Detect changed paths
id: diff
shell: bash
run: |
BEFORE="${{ github.event.before }}"
if [ -z "$BEFORE" ] || [ "$BEFORE" = "0000000000000000000000000000000000000000" ]; then
echo "e2e_changed=true" >> "$GITHUB_OUTPUT"
echo "unit_changed=true" >> "$GITHUB_OUTPUT"
echo "No before SHA — marking all changed"
exit 0
fi
git fetch --depth=1 origin "$BEFORE" 2>/dev/null || true
E2E=$(git diff --name-only "$BEFORE" "${{ github.sha }}" -- \
frontends e2e packages components 2>/dev/null || echo "")
UNIT=$(git diff --name-only "$BEFORE" "${{ github.sha }}" -- \
frontends/nextjs/src 2>/dev/null || echo "")
[ -n "$E2E" ] && echo "e2e_changed=true" >> "$GITHUB_OUTPUT" || echo "e2e_changed=false" >> "$GITHUB_OUTPUT"
[ -n "$UNIT" ] && echo "unit_changed=true" >> "$GITHUB_OUTPUT" || echo "unit_changed=false" >> "$GITHUB_OUTPUT"
echo "E2E paths changed: ${E2E:-none}"
echo "Unit paths changed: ${UNIT:-none}"
gate-2-start:
name: "Gate 2: Testing - Starting"
runs-on: ubuntu-latest
needs: [gate-1-complete]
if: ${{ !inputs.skip_tests }}
needs: [gate-1-complete, container-build-apps]
steps:
- name: Gate 2 checkpoint
run: |
@@ -716,7 +674,7 @@ jobs:
echo "$(date -Iseconds)" > gate-artifacts/gate-2/start-time.txt
- name: Upload gate start marker
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-2-start
path: gate-artifacts/gate-2/
@@ -725,46 +683,17 @@ jobs:
test-unit:
name: "Gate 2.1: Unit Tests"
runs-on: ubuntu-latest
needs: [gate-2-start, check-app-changes]
if: ${{ !inputs.skip_tests }}
needs: gate-2-start
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Restore cached coverage report
id: cache-restore
if: needs.check-app-changes.outputs.unit_changed == 'false'
shell: bash
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
LAST_RUN=$(gh run list \
--repo "${{ github.repository }}" \
--workflow gated-pipeline.yml \
--branch "${{ github.ref_name }}" \
--status success \
--limit 1 \
--json databaseId \
--jq '.[0].databaseId' 2>/dev/null || echo "")
if [ -n "$LAST_RUN" ] && [ "$LAST_RUN" != "null" ]; then
gh run download "$LAST_RUN" \
--repo "${{ github.repository }}" \
--name coverage-report \
--dir frontends/nextjs/coverage/ 2>/dev/null \
&& echo "hit=true" >> "$GITHUB_OUTPUT" \
|| echo "hit=false" >> "$GITHUB_OUTPUT"
else
echo "hit=false" >> "$GITHUB_OUTPUT"
fi
- name: Setup npm with Nexus
if: steps.cache-restore.outputs.hit != 'true'
uses: ./.github/actions/setup-npm
with:
node-version: '20'
- name: Run unit tests with coverage
if: steps.cache-restore.outputs.hit != 'true'
run: |
set -o pipefail
cd frontends/nextjs
@@ -801,7 +730,7 @@ jobs:
- name: Upload coverage report
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: coverage-report
path: frontends/nextjs/coverage/
@@ -816,7 +745,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-2-unit-result
path: gate-artifacts/gate-2/
@@ -825,41 +754,12 @@ jobs:
test-e2e:
name: "Gate 2.2: E2E Tests"
runs-on: ubuntu-latest
needs: [gate-2-start, check-app-changes]
if: ${{ !inputs.skip_tests }}
needs: gate-2-start
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Restore cached test results
id: cache-restore
if: needs.check-app-changes.outputs.e2e_changed == 'false'
shell: bash
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
LAST_RUN=$(gh run list \
--repo "${{ github.repository }}" \
--workflow gated-pipeline.yml \
--branch "${{ github.ref_name }}" \
--status success \
--limit 1 \
--json databaseId \
--jq '.[0].databaseId' 2>/dev/null || echo "")
if [ -n "$LAST_RUN" ] && [ "$LAST_RUN" != "null" ]; then
gh run download "$LAST_RUN" \
--repo "${{ github.repository }}" \
--name playwright-report \
--dir playwright-report/ 2>/dev/null \
&& echo "hit=true" >> "$GITHUB_OUTPUT" \
|| echo "hit=false" >> "$GITHUB_OUTPUT"
echo "Using cached results from run $LAST_RUN"
else
echo "hit=false" >> "$GITHUB_OUTPUT"
fi
- name: Log in to GitHub Container Registry
if: steps.cache-restore.outputs.hit != 'true'
uses: docker/login-action@v4
with:
registry: ${{ env.REGISTRY }}
@@ -867,21 +767,17 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Setup npm with Nexus
if: steps.cache-restore.outputs.hit != 'true'
uses: ./.github/actions/setup-npm
with:
node-version: '20'
- name: Build workspace packages
if: steps.cache-restore.outputs.hit != 'true'
run: npm run build --workspaces --if-present 2>&1
- name: Install Playwright Browsers
if: steps.cache-restore.outputs.hit != 'true'
run: npx playwright install --with-deps chromium
- name: Run Playwright tests
if: steps.cache-restore.outputs.hit != 'true'
run: |
if [ -f e2e/playwright.config.ts ]; then
npx playwright test --config=e2e/playwright.config.ts 2>&1
@@ -892,7 +788,7 @@ jobs:
- name: Upload test results
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: playwright-report
path: playwright-report/
@@ -907,7 +803,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-2-e2e-result
path: gate-artifacts/gate-2/
@@ -917,7 +813,6 @@ jobs:
name: "Gate 2.3: DBAL Daemon E2E"
runs-on: ubuntu-latest
needs: gate-2-start
if: ${{ !inputs.skip_tests }}
steps:
- name: Checkout code
uses: actions/checkout@v6
@@ -940,7 +835,7 @@ jobs:
- name: Upload daemon test report
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: playwright-report-dbal-daemon
path: frontends/nextjs/playwright-report/
@@ -955,7 +850,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-2-dbal-result
path: gate-artifacts/gate-2/
@@ -964,14 +859,9 @@ jobs:
name: "Gate 2: Testing - Passed"
runs-on: ubuntu-latest
needs: [test-unit, test-e2e, test-dbal-daemon]
if: |
always() &&
(needs.test-unit.result == 'success' || needs.test-unit.result == 'skipped') &&
(needs.test-e2e.result == 'success' || needs.test-e2e.result == 'skipped') &&
(needs.test-dbal-daemon.result == 'success' || needs.test-dbal-daemon.result == 'skipped')
steps:
- name: Download all gate 2 artifacts
uses: actions/download-artifact@v8
uses: actions/download-artifact@v6
with:
pattern: gate-2-*
path: gate-artifacts/
@@ -995,7 +885,7 @@ jobs:
ls -la gate-artifacts/gate-2/
- name: Upload consolidated gate 2 report
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-2-complete-report
path: gate-artifacts/
@@ -1021,7 +911,7 @@ jobs:
echo "$(date -Iseconds)" > gate-artifacts/gate-3/start-time.txt
- name: Upload gate start marker
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-3-start
path: gate-artifacts/gate-3/
@@ -1050,7 +940,7 @@ jobs:
run: npm run build -w frontends/nextjs
- name: Upload build artifacts
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: dist
path: frontends/nextjs/.next/
@@ -1065,7 +955,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-3-build-result
path: gate-artifacts/gate-3/
@@ -1108,7 +998,7 @@ jobs:
- name: Upload validation result
if: always()
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-3-quality-result
path: gate-artifacts/gate-3/
@@ -1120,7 +1010,7 @@ jobs:
if: always() && needs.build.result == 'success' && (needs.quality-check.result == 'success' || needs.quality-check.result == 'skipped')
steps:
- name: Download all gate 3 artifacts
uses: actions/download-artifact@v8
uses: actions/download-artifact@v6
with:
pattern: gate-3-*
path: gate-artifacts/
@@ -1141,7 +1031,7 @@ jobs:
ls -la gate-artifacts/gate-3/
- name: Upload consolidated gate 3 report
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: gate-3-complete-report
path: gate-artifacts/
@@ -1429,7 +1319,7 @@ jobs:
name: "Gate 7 T1: ${{ matrix.image }}"
runs-on: ubuntu-latest
needs: gate-1-complete
if: github.event_name != 'issues' && github.event_name != 'issue_comment' && !inputs.skip_containers
if: github.event_name != 'issues' && github.event_name != 'issue_comment'
strategy:
fail-fast: false
matrix:
@@ -1510,28 +1400,8 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Check if image already exists in GHCR
id: check
shell: bash
run: |
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}"
if docker manifest inspect "$IMAGE" > /dev/null 2>&1; then
echo "exists=true" >> "$GITHUB_OUTPUT"
echo "Image $IMAGE already exists — skipping build"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
echo "Image $IMAGE not found — will build"
fi
- name: Pull existing image from GHCR
if: steps.check.outputs.exists == 'true'
shell: bash
run: |
docker pull "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}"
- name: Extract metadata (tags, labels)
id: meta
if: steps.check.outputs.exists != 'true'
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
@@ -1545,7 +1415,6 @@ jobs:
- name: Build and push Docker image
id: build
if: steps.check.outputs.exists != 'true'
uses: docker/build-push-action@v6
with:
context: .
@@ -1554,16 +1423,13 @@ jobs:
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: |
type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}
type=gha,scope=${{ matrix.image }}
cache-from: type=gha,scope=${{ matrix.image }}
cache-to: type=gha,mode=max,scope=${{ matrix.image }}
build-args: |
BUILD_DATE=${{ github.event.head_commit.timestamp || github.run_started_at }}
VCS_REF=${{ github.sha }}
- name: Generate artifact attestation
if: steps.check.outputs.exists != 'true'
uses: actions/attest-build-provenance@v4
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
@@ -1575,7 +1441,6 @@ jobs:
name: "Gate 7 T2: ${{ matrix.image }}"
runs-on: ubuntu-latest
needs: container-base-tier1
if: ${{ !inputs.skip_containers }}
strategy:
fail-fast: false
matrix:
@@ -1583,11 +1448,9 @@ jobs:
- image: base-conan-deps
dockerfile: ./deployment/base-images/Dockerfile.conan-deps
platforms: linux/amd64,linux/arm64
require_prebuilt: true
- image: base-android-sdk
dockerfile: ./deployment/base-images/Dockerfile.android-sdk
platforms: linux/amd64,linux/arm64
require_prebuilt: false
steps:
- name: Checkout repository
uses: actions/checkout@v6
@@ -1605,36 +1468,8 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Check if image already exists in GHCR
id: check
shell: bash
run: |
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}"
if docker manifest inspect "$IMAGE" > /dev/null 2>&1; then
echo "exists=true" >> "$GITHUB_OUTPUT"
echo "Image $IMAGE already exists — skipping build"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
echo "Image $IMAGE not found — will build"
fi
- name: Fail if pre-built image required but missing
if: steps.check.outputs.exists != 'true' && matrix.require_prebuilt == true
shell: bash
run: |
echo "::error::${{ matrix.image }} must be pre-built and pushed to GHCR before CI can proceed."
echo "::error::Run locally: docker build --platform linux/amd64,linux/arm64 -f ${{ matrix.dockerfile }} -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }} --push ."
exit 1
- name: Pull existing image from GHCR
if: steps.check.outputs.exists == 'true'
shell: bash
run: |
docker pull "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}"
- name: Extract metadata (tags, labels)
id: meta
if: steps.check.outputs.exists != 'true' && matrix.require_prebuilt != true
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
@@ -1648,7 +1483,6 @@ jobs:
- name: Build and push Docker image
id: build
if: steps.check.outputs.exists != 'true' && matrix.require_prebuilt != true
uses: docker/build-push-action@v6
with:
context: .
@@ -1657,9 +1491,7 @@ jobs:
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: |
type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}
type=gha,scope=${{ matrix.image }}
cache-from: type=gha,scope=${{ matrix.image }}
cache-to: type=gha,mode=max,scope=${{ matrix.image }}
build-args: |
BASE_REGISTRY=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
@@ -1667,7 +1499,6 @@ jobs:
VCS_REF=${{ github.sha }}
- name: Generate artifact attestation
if: steps.check.outputs.exists != 'true' && matrix.require_prebuilt != true
uses: actions/attest-build-provenance@v4
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
@@ -1679,7 +1510,6 @@ jobs:
name: "Gate 7 T3: devcontainer"
runs-on: ubuntu-latest
needs: container-base-tier2
if: ${{ !inputs.skip_containers }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
@@ -1697,28 +1527,8 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Check if image already exists in GHCR
id: check
shell: bash
run: |
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/devcontainer:${{ github.ref_name }}"
if docker manifest inspect "$IMAGE" > /dev/null 2>&1; then
echo "exists=true" >> "$GITHUB_OUTPUT"
echo "Image $IMAGE already exists — skipping build"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
echo "Image $IMAGE not found — will build"
fi
- name: Pull existing image from GHCR
if: steps.check.outputs.exists == 'true'
shell: bash
run: |
docker pull "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/devcontainer:${{ github.ref_name }}"
- name: Extract metadata (tags, labels)
id: meta
if: steps.check.outputs.exists != 'true'
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/devcontainer
@@ -1732,7 +1542,6 @@ jobs:
- name: Build and push Docker image
id: build
if: steps.check.outputs.exists != 'true'
uses: docker/build-push-action@v6
with:
context: .
@@ -1741,9 +1550,7 @@ jobs:
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: |
type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/devcontainer:${{ github.ref_name }}
type=gha,scope=devcontainer
cache-from: type=gha,scope=devcontainer
cache-to: type=gha,mode=max,scope=devcontainer
build-args: |
BASE_REGISTRY=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
@@ -1751,7 +1558,6 @@ jobs:
VCS_REF=${{ github.sha }}
- name: Generate artifact attestation
if: steps.check.outputs.exists != 'true'
uses: actions/attest-build-provenance@v4
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/devcontainer
@@ -1763,7 +1569,7 @@ jobs:
name: "Gate 7 App: ${{ matrix.image }}"
runs-on: ubuntu-latest
needs: [container-base-tier1]
if: github.event_name != 'issues' && github.event_name != 'issue_comment' && needs.container-base-tier1.result == 'success' && !inputs.skip_containers
if: github.event_name != 'issues' && github.event_name != 'issue_comment' && !failure()
strategy:
fail-fast: false
matrix:
@@ -1771,39 +1577,30 @@ jobs:
- image: nextjs-app
context: .
dockerfile: ./frontends/nextjs/Dockerfile
watch_paths: frontends/nextjs packages components
- image: codegen
context: .
dockerfile: ./frontends/codegen/Dockerfile
watch_paths: frontends/codegen packages components
- image: pastebin
context: .
dockerfile: ./frontends/pastebin/Dockerfile
watch_paths: frontends/pastebin packages components
- image: emailclient
context: .
dockerfile: ./frontends/emailclient/Dockerfile
watch_paths: frontends/emailclient packages components
- image: postgres-dashboard
context: .
dockerfile: ./frontends/postgres/Dockerfile
watch_paths: frontends/postgres packages
- image: workflowui
context: .
dockerfile: ./frontends/workflowui/Dockerfile
watch_paths: frontends/workflowui packages components
- image: exploded-diagrams
context: .
dockerfile: ./frontends/exploded-diagrams/Dockerfile
watch_paths: frontends/exploded-diagrams
- image: dbal
context: ./dbal
dockerfile: ./dbal/production/build-config/Dockerfile
watch_paths: dbal
- image: dbal-init
context: .
dockerfile: ./deployment/config/dbal/Dockerfile.init
watch_paths: deployment/config/dbal dbal/shared
steps:
- name: Checkout repository
uses: actions/checkout@v6
@@ -1818,49 +1615,8 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Check if image needs rebuild
id: check
shell: bash
run: |
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}"
# If image doesn't exist in GHCR — must build
if ! docker manifest inspect "$IMAGE" > /dev/null 2>&1; then
echo "rebuild=true" >> "$GITHUB_OUTPUT"
echo "Image not in GHCR — will build"
exit 0
fi
# Image exists — check if watched paths changed in this push
BEFORE="${{ github.event.before }}"
if [ -z "$BEFORE" ] || [ "$BEFORE" = "0000000000000000000000000000000000000000" ]; then
echo "rebuild=true" >> "$GITHUB_OUTPUT"
echo "No before SHA (new branch or dispatch) — rebuilding"
exit 0
fi
# Fetch the before commit (shallow checkout only has HEAD)
git fetch --depth=1 origin "$BEFORE" 2>/dev/null || true
read -ra watch <<< "${{ matrix.watch_paths }}"
CHANGED=$(git diff --name-only "$BEFORE" "${{ github.sha }}" -- "${watch[@]}" 2>/dev/null || echo "")
if [ -z "$CHANGED" ]; then
echo "rebuild=false" >> "$GITHUB_OUTPUT"
echo "No changes in watched paths — pulling from GHCR"
else
echo "rebuild=true" >> "$GITHUB_OUTPUT"
printf "Changes detected — rebuilding:\n%s\n" "$CHANGED"
fi
- name: Pull existing image from GHCR
if: steps.check.outputs.rebuild == 'false'
shell: bash
run: |
docker pull "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}"
- name: Extract metadata (tags, labels)
id: meta
if: steps.check.outputs.rebuild == 'true'
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
@@ -1874,7 +1630,6 @@ jobs:
- name: Build and push Docker image
id: build
if: steps.check.outputs.rebuild == 'true'
uses: docker/build-push-action@v6
with:
context: ${{ matrix.context }}
@@ -1882,9 +1637,7 @@ jobs:
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: |
type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}:${{ github.ref_name }}
type=gha,scope=${{ matrix.image }}
cache-from: type=gha,scope=${{ matrix.image }}
cache-to: type=gha,mode=max,scope=${{ matrix.image }}
build-args: |
BASE_REGISTRY=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
@@ -1893,7 +1646,6 @@ jobs:
VERSION=${{ steps.meta.outputs.version }}
- name: Generate artifact attestation
if: steps.check.outputs.rebuild == 'true'
uses: actions/attest-build-provenance@v4
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/${{ matrix.image }}
@@ -2055,7 +1807,7 @@ jobs:
if: always()
steps:
- name: Download all gate artifacts
uses: actions/download-artifact@v8
uses: actions/download-artifact@v6
with:
pattern: gate-*-complete-report
path: all-gate-artifacts/
@@ -2120,7 +1872,7 @@ jobs:
}
- name: Upload complete audit trail
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@v6
with:
name: complete-gate-audit-trail
path: all-gate-artifacts/

3
.gitignore vendored
View File

@@ -116,6 +116,3 @@ bun.lockb
/.claude/worktrees
*.tsbuildinfo
# frontends/pastebin/public/pyodide/ # kept in fat repo
.vscode/settings.json
.act-env
/frontends/qt6/_build

View File

@@ -50,8 +50,8 @@ POST /pastebin/pastebin/User
| `dbal/production/src/daemon/server_routes.cpp` | Route registration + auto-seed startup |
| `frontends/pastebin/backend/app.py` | Flask JWT auth + Python runner |
| `frontends/pastebin/src/` | Next.js React app |
| `deployment/compose.yml` | Full stack compose |
| `deployment/deployment.py` | Python CLI for all build/deploy/stack commands |
| `deployment/docker-compose.stack.yml` | Full stack compose |
| `deployment/build-apps.sh` | Build + deploy helper |
---
@@ -79,16 +79,16 @@ docker logs -f metabuilder-pastebin-backend
cd deployment
# Full rebuild + restart
python3 deployment.py build apps --force dbal pastebin
docker compose -f compose.yml up -d
./build-apps.sh --force dbal pastebin
docker compose -f docker-compose.stack.yml up -d
# Flask backend (separate from Next.js)
docker compose -f compose.yml build pastebin-backend
docker compose -f compose.yml up -d pastebin-backend
docker compose -f docker-compose.stack.yml build pastebin-backend
docker compose -f docker-compose.stack.yml up -d pastebin-backend
# dbal-init volume (schema volume container — rebuild when entity JSON changes)
docker compose -f compose.yml build dbal-init
docker compose -f compose.yml up dbal-init
docker compose -f docker-compose.stack.yml build dbal-init
docker compose -f docker-compose.stack.yml up dbal-init
```
---
@@ -163,7 +163,7 @@ Context variable resolution: `"${var_name}"`, `"${event.userId}"`, `"prefix-${na
3. **Seed data in `dbal/shared/seeds/`** — never hardcode in Flask Python or C++.
4. **No hardcoded entity names** — loaded from schema JSON.
5. **Call `ensureClient()` before any DB op in `registerRoutes()`**`dbal_client_` starts null.
6. **`deployment.py build apps pastebin` ≠ Flask** — that only rebuilds Next.js. Flask needs `docker compose build pastebin-backend`.
6. **`build-apps.sh pastebin` ≠ Flask** — that only rebuilds Next.js. Flask needs `docker compose build pastebin-backend`.
---

View File

@@ -18,7 +18,7 @@ All documentation is executable code. No separate markdown docs.
./gameengine/gameengine.py --help # Game engine
./postgres/postgres.py --help # PostgreSQL dashboard
./mojo/mojo.py --help # Mojo compiler
cd deployment && python3 deployment.py build base --list # Docker base images
./deployment/build-base-images.sh --list # Docker base images
# Documentation (SQLite3 + FTS5 full-text search)
cd txt && python3 reports.py search "query" # 212 reports
@@ -227,14 +227,14 @@ Frontends (CLI C++ | Qt6 QML | Next.js React)
```bash
npm run dev / build / typecheck / lint / test:e2e
npm run build --workspaces
cd deployment && python3 deployment.py build base # Build Docker base images
cd deployment && ./build-base-images.sh # Build Docker base images
# Deploy full stack
cd deployment && docker compose -f compose.yml up -d
cd deployment && docker compose -f docker-compose.stack.yml up -d
# Build & deploy specific apps
python3 deployment.py build apps --force dbal pastebin # Next.js frontend only
docker compose -f compose.yml build pastebin-backend # Flask backend
./build-apps.sh --force dbal pastebin # Next.js frontend only
docker compose -f docker-compose.stack.yml build pastebin-backend # Flask backend
# DBAL logs / seed verification
docker logs -f metabuilder-dbal
@@ -336,14 +336,10 @@ Multi-version peer deps. React 18/19, TypeScript 5.9.3, Next.js 14-16, @reduxjs/
| nlohmann/json iterators | Use `it.value()` not `it->second` (std::map syntax fails) |
| dbal-init volume stale | Rebuild with `docker compose build dbal-init` when schema file extensions change |
| `.dockerignore` excludes `dbal/` | Whitelist specific subdirs: `!dbal/shared/seeds/database` |
| `deployment.py build apps pastebin` ≠ Flask backend | Use `docker compose build pastebin-backend` for Flask |
| `build-apps.sh pastebin` ≠ Flask backend | Use `docker compose build pastebin-backend` for Flask |
| `ensureClient()` before startup DB ops | `dbal_client_` is null in `registerRoutes()` — must call `ensureClient()` first |
| Seed data in Flask Python | NEVER — declarative seed data belongs in `dbal/shared/seeds/database/*.json` |
| Werkzeug scrypt on macOS Python | Generate hashes inside running container: `docker exec metabuilder-pastebin-backend python3 -c "..."` |
| `loadFromDirectory` vs `loadFromFile` | Both must stay in sync — `loadFromDirectory` is used in production; check both when adding schema parsing features |
| New DBAL entity missing from frontend | Add JSON schema in `dbal/shared/api/schema/entities/{package}/`, seed in `dbal/shared/seeds/database/`, rebuild `dbal-init` + DBAL image |
| ComponentNode schema vs C++ struct | JSON schema must match C++ struct in `types.generated.hpp` (pageId, parentId, childIds, order), NOT the Redux slice shape |
| GitHub Actions version assumptions | NEVER assume an action version is invalid — use `WebFetch` on `https://github.com/actions/{name}/releases` to verify before changing |
### Critical Folders to Check Before Any Task

View File

@@ -32,13 +32,13 @@ http://localhost:8080 # DBAL C++ REST API (entities)
```bash
# Deploy full stack
cd deployment
docker compose -f compose.yml up -d
docker compose -f docker-compose.stack.yml up -d
# Build & deploy a specific app
python3 deployment.py build apps --force dbal pastebin
./build-apps.sh --force dbal pastebin
# Rebuild base images (rare)
python3 deployment.py build base
./build-base-images.sh
```
---

View File

@@ -2562,7 +2562,7 @@ docker-compose -f deployment/docker-compose.production.yml up -d
```bash
# Deploy everything (PostgreSQL, DBAL, Next.js, Media daemon, Redis, Nginx)
cd deployment && python3 deployment.py deploy --all
./deployment/deploy.sh all --bootstrap
```
### Cloud Platforms

View File

@@ -1,49 +1,42 @@
// fakemui/email/atoms/MarkAsReadCheckbox.tsx
import React, { forwardRef, useState } from 'react'
import { useAccessible } from '../../../../hooks/useAccessible'
export interface MarkAsReadCheckboxProps
extends React.InputHTMLAttributes<HTMLInputElement> {
export interface MarkAsReadCheckboxProps extends React.InputHTMLAttributes<HTMLInputElement> {
isRead?: boolean
onToggleRead?: (read: boolean) => void
testId?: string
}
export const MarkAsReadCheckbox = forwardRef<
HTMLInputElement,
MarkAsReadCheckboxProps
>(({ isRead = false, onToggleRead, testId, ...props }, ref) => {
const [read, setRead] = useState(isRead)
export const MarkAsReadCheckbox = forwardRef<HTMLInputElement, MarkAsReadCheckboxProps>(
({ isRead = false, onToggleRead, testId: customTestId, ...props }, ref) => {
const [read, setRead] = useState(isRead)
const accessible = useAccessible({
feature: 'email',
component: 'read-checkbox',
identifier: testId || 'read-status'
})
const accessible = useAccessible({
feature: 'email',
component: 'read-checkbox',
identifier: customTestId || 'read-status'
})
const handleChange = (
e: React.ChangeEvent<HTMLInputElement>
) => {
const next = e.target.checked
setRead(next)
onToggleRead?.(next)
props.onChange?.(e)
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const newState = e.target.checked
setRead(newState)
onToggleRead?.(newState)
props.onChange?.(e)
}
return (
<input
ref={ref}
type="checkbox"
checked={read}
className="read-checkbox"
aria-label="Mark as read"
{...accessible}
{...props}
onChange={handleChange}
/>
)
}
const label = read ? 'Mark as unread' : 'Mark as read'
return (
<input
ref={ref}
type="checkbox"
checked={read}
className="read-checkbox"
aria-label={label}
{...accessible}
{...props}
onChange={handleChange}
/>
)
})
)
MarkAsReadCheckbox.displayName = 'MarkAsReadCheckbox'

View File

@@ -1,56 +1,43 @@
// fakemui/email/atoms/StarButton.tsx
import React, { forwardRef, useEffect, useState } from 'react'
import { MaterialIcon } from '../../../../icons/react/fakemui'
import React, { forwardRef, useState } from 'react'
import { useAccessible } from '../../../../hooks/useAccessible'
export interface StarButtonProps
extends React.ButtonHTMLAttributes<HTMLButtonElement> {
export interface StarButtonProps extends React.ButtonHTMLAttributes<HTMLButtonElement> {
isStarred?: boolean
onToggleStar?: (starred: boolean) => void
testId?: string
}
export const StarButton = forwardRef<
HTMLButtonElement,
StarButtonProps
>(({ isStarred = false, onToggleStar, testId, ...props }, ref) => {
const [starred, setStarred] = useState(isStarred)
export const StarButton = forwardRef<HTMLButtonElement, StarButtonProps>(
({ isStarred = false, onToggleStar, testId: customTestId, ...props }, ref) => {
const [starred, setStarred] = useState(isStarred)
useEffect(() => { setStarred(isStarred) }, [isStarred])
const accessible = useAccessible({
feature: 'email',
component: 'star-button',
identifier: customTestId || 'star'
})
const accessible = useAccessible({
feature: 'email',
component: 'star-button',
identifier: testId || 'star'
})
const handleClick = (e: React.MouseEvent<HTMLButtonElement>) => {
const newState = !starred
setStarred(newState)
onToggleStar?.(newState)
props.onClick?.(e)
}
const handleClick = (
e: React.MouseEvent<HTMLButtonElement>
) => {
const next = !starred
setStarred(next)
onToggleStar?.(next)
props.onClick?.(e)
return (
<button
ref={ref}
className={`star-button ${starred ? 'star-button--active' : ''}`}
aria-pressed={starred}
title={starred ? 'Remove star' : 'Add star'}
{...accessible}
{...props}
onClick={handleClick}
>
{starred ? '⭐' : '☆'}
</button>
)
}
const label = starred ? 'Remove star' : 'Add star'
const icon = starred ? 'star' : 'star_border'
return (
<button
ref={ref}
className={
`star-button${starred ? ' star-button--active' : ''}`
}
aria-pressed={starred}
aria-label={label}
{...accessible}
{...props}
onClick={handleClick}
>
<MaterialIcon name={icon} fill={starred ? 1 : 0} />
</button>
)
})
)
StarButton.displayName = 'StarButton'

View File

@@ -31,23 +31,15 @@ export const EmailHeader = ({
component: 'email-header',
identifier: customTestId || subject
})
const iso = new Date(receivedAt).toISOString()
const display = new Date(receivedAt).toLocaleString()
return (
<Box
role="banner"
aria-label="Email details"
className="email-header"
{...accessible}
{...props}
>
<div className="header-top">
<Typography
variant="h5"
id="email-subject"
className="subject"
>
<Typography variant="h5" className="subject">
{subject}
</Typography>
<StarButton
@@ -56,35 +48,19 @@ export const EmailHeader = ({
/>
</div>
<div className="header-details">
<Typography
variant="body2"
className="from"
data-testid="email-from"
>
<Typography variant="body2" className="from">
From: <strong>{from}</strong>
</Typography>
<Typography
variant="body2"
className="to"
data-testid="email-to"
>
<Typography variant="body2" className="to">
To: <strong>{to.join(', ')}</strong>
</Typography>
{cc && cc.length > 0 && (
<Typography
variant="body2"
className="cc"
data-testid="email-cc"
>
<Typography variant="body2" className="cc">
Cc: <strong>{cc.join(', ')}</strong>
</Typography>
)}
<Typography
variant="caption"
className="date"
data-testid="email-date"
>
<time dateTime={iso}>{display}</time>
<Typography variant="caption" className="date">
{new Date(receivedAt).toLocaleString()}
</Typography>
</div>
</Box>

View File

@@ -5,14 +5,11 @@ import { useAccessible } from '../../../../hooks/useAccessible'
import { EmailCard, type EmailCardProps } from '../surfaces'
export interface ThreadListProps extends BoxProps {
emails: Array<Omit<
EmailCardProps,
'onSelect' | 'onToggleRead' | 'onToggleStar'
>>
emails: Array<Omit<EmailCardProps, 'onSelect' | 'onToggleRead' | 'onToggleStar'>>
selectedEmailId?: string
onSelectEmail?: (emailId: string) => void
onToggleRead?: (id: string, read: boolean) => void
onToggleStar?: (id: string, star: boolean) => void
onToggleRead?: (emailId: string, read: boolean) => void
onToggleStar?: (emailId: string, starred: boolean) => void
testId?: string
}
@@ -31,44 +28,23 @@ export const ThreadList = ({
identifier: customTestId || 'threads'
})
const emailId = (e: typeof emails[0], i: number) =>
e.testId || `email-${i}`
return (
<Box
role="list"
aria-label="Email messages"
className="thread-list"
{...accessible}
{...props}
>
{emails.length === 0 ? (
<div className="no-emails" role="status">
No emails
</div>
<div className="no-emails">No emails</div>
) : (
emails.map((email, idx) => (
<div role="listitem" key={idx}>
<EmailCard
{...email}
selected={
selectedEmailId === email.testId
}
onSelect={() =>
onSelectEmail?.(emailId(email, idx))
}
onToggleRead={(read) =>
onToggleRead?.(
emailId(email, idx), read
)
}
onToggleStar={(starred) =>
onToggleStar?.(
emailId(email, idx), starred
)
}
/>
</div>
<EmailCard
key={idx}
{...email}
onSelect={() => onSelectEmail?.(email.testId || `email-${idx}`)}
onToggleRead={(read) => onToggleRead?.(email.testId || `email-${idx}`, read)}
onToggleStar={(starred) => onToggleStar?.(email.testId || `email-${idx}`, starred)}
/>
))
)}
</Box>

View File

@@ -57,13 +57,6 @@ export {
export {
MailboxLayout,
type MailboxLayoutProps,
MailboxHeader,
type MailboxHeaderProps,
MailboxSidebar,
type MailboxSidebarProps,
EmailDetail,
type EmailDetailProps,
type EmailDetailEmail,
ComposerLayout,
type ComposerLayoutProps,
SettingsLayout,

View File

@@ -3,62 +3,48 @@ import React, { forwardRef } from 'react'
import { Box } from '../../layout/Box'
import { useAccessible } from '../../../../hooks/useAccessible'
export interface BodyEditorProps
extends React.TextareaHTMLAttributes<
HTMLTextAreaElement
> {
export interface BodyEditorProps extends React.TextareaHTMLAttributes<HTMLTextAreaElement> {
mode?: 'plain' | 'html'
onModeChange?: (mode: 'plain' | 'html') => void
testId?: string
}
export const BodyEditor = forwardRef<
HTMLTextAreaElement,
BodyEditorProps
>(({
mode = 'plain',
onModeChange,
testId: customTestId,
...props
}, ref) => {
const accessible = useAccessible({
feature: 'email',
component: 'body-editor',
identifier: customTestId || 'body',
ariaLabel: 'Email body'
})
export const BodyEditor = forwardRef<HTMLTextAreaElement, BodyEditorProps>(
({ mode = 'plain', onModeChange, testId: customTestId, ...props }, ref) => {
const accessible = useAccessible({
feature: 'email',
component: 'body-editor',
identifier: customTestId || 'body'
})
const modeBtn = (m: 'plain' | 'html', label: string) => (
<button
type="button"
className={`mode-btn${mode === m ? ' mode-btn--active' : ''}`}
onClick={() => onModeChange?.(m)}
aria-pressed={mode === m}
data-testid={`mode-${m}`}
>
{label}
</button>
)
return (
<Box className="body-editor">
<div
className="body-editor-toolbar"
role="toolbar"
aria-label="Editor mode"
>
{modeBtn('plain', 'Plain Text')}
{modeBtn('html', 'HTML')}
</div>
<textarea
ref={ref}
className="body-editor-textarea"
placeholder="Write your message here..."
{...accessible}
{...props}
/>
</Box>
)
})
return (
<Box className="body-editor">
<div className="body-editor-toolbar">
<button
type="button"
className={`mode-btn ${mode === 'plain' ? 'mode-btn--active' : ''}`}
onClick={() => onModeChange?.('plain')}
>
Plain Text
</button>
<button
type="button"
className={`mode-btn ${mode === 'html' ? 'mode-btn--active' : ''}`}
onClick={() => onModeChange?.('html')}
>
HTML
</button>
</div>
<textarea
ref={ref}
className="body-editor-textarea"
placeholder="Write your message here..."
{...accessible}
{...props}
/>
</Box>
)
}
)
BodyEditor.displayName = 'BodyEditor'

View File

@@ -5,80 +5,63 @@ import { TextField } from '../../inputs/TextField'
import { Chip } from '../../data-display/Chip'
import { useAccessible } from '../../../../hooks/useAccessible'
export interface RecipientInputProps
extends React.InputHTMLAttributes<HTMLInputElement> {
export interface RecipientInputProps extends React.InputHTMLAttributes<HTMLInputElement> {
recipients?: string[]
onRecipientsChange?: (recipients: string[]) => void
recipientType?: 'to' | 'cc' | 'bcc'
testId?: string
}
export const RecipientInput = forwardRef<
HTMLInputElement,
RecipientInputProps
>(({
recipients = [],
onRecipientsChange,
recipientType = 'to',
testId: customTestId,
...props
}, ref) => {
const [inputValue, setInputValue] = useState('')
const accessible = useAccessible({
feature: 'email',
component: 'recipient-input',
identifier: customTestId || recipientType,
ariaLabel: `Add ${recipientType} recipient`
})
export const RecipientInput = forwardRef<HTMLInputElement, RecipientInputProps>(
({ recipients = [], onRecipientsChange, recipientType = 'to', testId: customTestId, ...props }, ref) => {
const [inputValue, setInputValue] = useState('')
const accessible = useAccessible({
feature: 'email',
component: 'recipient-input',
identifier: customTestId || recipientType
})
const addRecipient = () => {
if (inputValue && inputValue.includes('@')) {
onRecipientsChange?.([
...recipients, inputValue.trim()
])
setInputValue('')
const handleAddRecipient = () => {
if (inputValue && inputValue.includes('@')) {
const newRecipients = [...recipients, inputValue.trim()]
onRecipientsChange?.(newRecipients)
setInputValue('')
}
}
}
const removeRecipient = (index: number) => {
onRecipientsChange?.(
recipients.filter((_, i) => i !== index)
const handleRemoveRecipient = (index: number) => {
const newRecipients = recipients.filter((_, i) => i !== index)
onRecipientsChange?.(newRecipients)
}
// Filter out incompatible HTML input attributes
const { size: _size, ...textFieldProps } = props
return (
<Box className="recipient-input">
<div className="recipient-chips">
{recipients.map((recipient, index) => (
<Chip
key={index}
onDelete={() => handleRemoveRecipient(index)}
>
{recipient}
</Chip>
))}
</div>
<TextField
ref={ref}
type="email"
placeholder={`Add ${recipientType} recipient...`}
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
onKeyPress={(e) => e.key === 'Enter' && handleAddRecipient()}
{...accessible}
{...textFieldProps}
/>
</Box>
)
}
const { size: _size, ...textFieldProps } = props
return (
<Box
className="recipient-input"
role="group"
aria-label={`${recipientType.toUpperCase()} recipients`}
>
<div className="recipient-chips">
{recipients.map((r, i) => (
<Chip
key={i}
onDelete={() => removeRecipient(i)}
aria-label={`Remove ${r}`}
>
{r}
</Chip>
))}
</div>
<TextField
ref={ref}
type="email"
placeholder={`Add ${recipientType} recipient...`}
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
onKeyDown={(e) =>
e.key === 'Enter' && addRecipient()
}
{...accessible}
{...textFieldProps}
/>
</Box>
)
})
)
RecipientInput.displayName = 'RecipientInput'

View File

@@ -1,155 +0,0 @@
import React from 'react'
import { Box, BoxProps, Button, IconButton } from '../..'
import { MaterialIcon } from '../../../../icons/react/fakemui'
import { useAccessible } from '../../../../hooks/useAccessible'
import { EmailHeader } from '../data-display'
export interface EmailDetailEmail {
id: string
from: string
to: string[]
cc?: string[]
subject: string
body: string
receivedAt: number
isStarred: boolean
}
export interface EmailDetailProps extends BoxProps {
email: EmailDetailEmail
onClose?: () => void
onArchive?: () => void
onDelete?: () => void
onReply?: () => void
onForward?: () => void
onToggleStar?: (starred: boolean) => void
testId?: string
}
export const EmailDetail = ({
email,
onClose,
onArchive,
onDelete,
onReply,
onForward,
onToggleStar,
testId: customTestId,
...props
}: EmailDetailProps) => {
const accessible = useAccessible({
feature: 'email',
component: 'email-detail',
identifier: customTestId || 'detail'
})
return (
<Box
role="article"
aria-label={email.subject}
className="email-detail"
{...accessible}
{...props}
>
<Box
id="email-detail-toolbar"
className="email-detail-toolbar"
>
{onClose && (
<IconButton
aria-label="Back to list"
data-testid="email-back"
onClick={onClose}
>
<MaterialIcon name="arrow_back" size={20} />
</IconButton>
)}
<Box className="email-detail-actions">
{onArchive && (
<IconButton
aria-label="Archive"
title="Archive"
data-testid="email-archive"
onClick={onArchive}
>
<MaterialIcon name="archive" size={20} />
</IconButton>
)}
{onDelete && (
<IconButton
aria-label="Delete"
title="Delete"
data-testid="email-delete"
onClick={onDelete}
>
<MaterialIcon name="delete" size={20} />
</IconButton>
)}
{onReply && (
<IconButton
aria-label="Reply"
title="Reply"
data-testid="email-reply"
onClick={onReply}
>
<MaterialIcon name="reply" size={20} />
</IconButton>
)}
{onForward && (
<IconButton
aria-label="Forward"
title="Forward"
data-testid="email-forward"
onClick={onForward}
>
<MaterialIcon name="forward" size={20} />
</IconButton>
)}
</Box>
</Box>
<EmailHeader
from={email.from}
to={email.to}
cc={email.cc}
subject={email.subject}
receivedAt={email.receivedAt}
isStarred={email.isStarred}
onToggleStar={onToggleStar}
/>
<Box
id="email-detail-body"
className="email-detail-body"
>
{email.body}
</Box>
<Box
id="email-detail-reply-bar"
className="email-detail-reply-bar"
>
{onReply && (
<Button
variant="outlined"
data-testid="email-reply-btn"
onClick={onReply}
>
<MaterialIcon name="reply" size={16} />
Reply
</Button>
)}
{onForward && (
<Button
variant="outlined"
data-testid="email-forward-btn"
onClick={onForward}
>
<MaterialIcon name="forward" size={16} />
Forward
</Button>
)}
</Box>
</Box>
)
}

View File

@@ -1,108 +0,0 @@
import React from 'react'
import { Box, BoxProps, IconButton } from '../..'
import { MaterialIcon } from '../../../../icons/react/fakemui'
import { useAccessible } from '../../../../hooks/useAccessible'
export interface MailboxHeaderProps extends BoxProps {
appName?: string
searchQuery?: string
onSearchChange?: (query: string) => void
searchPlaceholder?: string
avatarLabel?: string
isDarkMode?: boolean
onToggleTheme?: () => void
onMenuClick?: () => void
onSettingsClick?: () => void
onAlertsClick?: () => void
alertCount?: number
locale?: string
onCycleLocale?: () => void
testId?: string
}
export const MailboxHeader = ({
appName = 'MetaMail',
searchQuery = '',
onSearchChange,
searchPlaceholder = 'Search mail',
avatarLabel = 'U',
isDarkMode = false,
onToggleTheme,
onMenuClick,
onSettingsClick,
onAlertsClick,
alertCount = 0,
locale = 'EN',
onCycleLocale,
testId: customTestId,
...props
}: MailboxHeaderProps) => {
const accessible = useAccessible({
feature: 'email',
component: 'mailbox-header',
identifier: customTestId || 'header'
})
return (
<Box className="mailbox-header-bar" {...accessible} {...props}>
<Box className="mailbox-header-left">
{onMenuClick && (
<IconButton aria-label="Menu" title="Menu" onClick={onMenuClick} className="mailbox-header-icon-btn">
<MaterialIcon name="menu" size={22} />
</IconButton>
)}
<Box className="mailbox-header-brand">
<MaterialIcon name="mail" size={26} className="mailbox-header-logo" />
<span className="mailbox-header-title">{appName}</span>
</Box>
</Box>
<Box className="mailbox-header-search">
<MaterialIcon name="search" size={20} className="mailbox-search-icon" />
<input
type="search"
className="mailbox-search-input"
placeholder={searchPlaceholder}
value={searchQuery}
onChange={e => onSearchChange?.(e.target.value)}
aria-label={searchPlaceholder}
/>
</Box>
<Box className="mailbox-header-actions">
{onAlertsClick && (
<Box className="mailbox-header-alert-wrapper">
<IconButton aria-label="Notifications" title="Notifications" onClick={onAlertsClick} className="mailbox-header-icon-btn">
<MaterialIcon name="notifications" size={20} />
</IconButton>
{alertCount > 0 && <span className="mailbox-header-badge">{alertCount}</span>}
</Box>
)}
{onToggleTheme && (
<IconButton
aria-label={isDarkMode ? 'Switch to light mode' : 'Switch to dark mode'}
title={isDarkMode ? 'Light mode' : 'Dark mode'}
onClick={onToggleTheme}
className="mailbox-header-icon-btn"
>
<MaterialIcon name={isDarkMode ? 'light_mode' : 'dark_mode'} size={20} />
</IconButton>
)}
{onCycleLocale && (
<button className="mailbox-header-lang-btn" onClick={onCycleLocale} aria-label="Change language" title="Change language">
<MaterialIcon name="language" size={16} />
<span>{locale}</span>
</button>
)}
{onSettingsClick && (
<IconButton aria-label="Settings" title="Settings" onClick={onSettingsClick} className="mailbox-header-icon-btn">
<MaterialIcon name="settings" size={20} />
</IconButton>
)}
<Box className="mailbox-header-avatar" aria-label="Account">
{avatarLabel}
</Box>
</Box>
</Box>
)
}

View File

@@ -1,42 +0,0 @@
import React from 'react'
import { Box, BoxProps } from '../..'
import { MaterialIcon } from '../../../../icons/react/fakemui'
import { useAccessible } from '../../../../hooks/useAccessible'
import { FolderNavigation, type FolderNavigationItem } from '../navigation'
export interface MailboxSidebarProps extends BoxProps {
folders: FolderNavigationItem[]
onNavigate?: (folderId: string) => void
onCompose?: () => void
composeLabel?: string
testId?: string
}
export const MailboxSidebar = ({
folders,
onNavigate,
onCompose,
composeLabel = 'Compose',
testId: customTestId,
...props
}: MailboxSidebarProps) => {
const accessible = useAccessible({
feature: 'email',
component: 'mailbox-sidebar',
identifier: customTestId || 'sidebar'
})
return (
<Box className="mailbox-sidebar-content" {...accessible} {...props}>
{onCompose && (
<Box className="mailbox-sidebar-compose">
<button className="compose-btn" onClick={onCompose}>
<MaterialIcon name="edit" size={20} />
<span>{composeLabel}</span>
</button>
</Box>
)}
<FolderNavigation items={folders} onNavigate={onNavigate} />
</Box>
)
}

View File

@@ -1,6 +1,3 @@
export { MailboxLayout, type MailboxLayoutProps } from './MailboxLayout'
export { MailboxHeader, type MailboxHeaderProps } from './MailboxHeader'
export { MailboxSidebar, type MailboxSidebarProps } from './MailboxSidebar'
export { EmailDetail, type EmailDetailProps, type EmailDetailEmail } from './EmailDetail'
export { ComposerLayout, type ComposerLayoutProps } from './ComposerLayout'
export { SettingsLayout, type SettingsLayoutProps, type SettingsSection } from './SettingsLayout'

View File

@@ -31,37 +31,24 @@ export const FolderNavigation = ({
return (
<Box
className="folder-navigation"
role="navigation"
aria-label="Mail folders"
{...accessible}
{...props}
>
<nav className="folder-nav-list" role="list">
<nav className="folder-nav-list">
{items.map((item) => (
<div key={item.id} role="listitem">
<Button
variant={item.isActive ? 'primary' : 'ghost'}
fullWidth
className="folder-nav-item"
onClick={() => onNavigate?.(item.id)}
aria-current={item.isActive || undefined}
data-testid={`folder-nav-${item.id}`}
>
{item.icon && (
<span className="folder-icon">
{item.icon}
</span>
)}
<span className="folder-label">
{item.label}
</span>
{item.unreadCount ? (
<span className="unread-count">
{item.unreadCount}
</span>
) : null}
</Button>
</div>
<Button
key={item.id}
variant={item.isActive ? 'primary' : 'ghost'}
fullWidth
className="folder-nav-item"
onClick={() => onNavigate?.(item.id)}
>
{item.icon && <span className="folder-icon">{item.icon}</span>}
<span className="folder-label">{item.label}</span>
{item.unreadCount ? (
<span className="unread-count">{item.unreadCount}</span>
) : null}
</Button>
))}
</nav>
</Box>

View File

@@ -1,111 +1,87 @@
// fakemui/react/components/email/surfaces/ComposeWindow
import React, { useCallback, useEffect, useState } from 'react'
// fakemui/react/components/email/surfaces/ComposeWindow.tsx
import React, { useState } from 'react'
import { Box, BoxProps, Button, Card } from '../..'
import {
useAccessible, useFocusTrap
} from '../../../../hooks/useAccessible'
import { MaterialIcon } from '../../../../icons/react/fakemui'
import { RecipientInput, BodyEditor } from '../inputs'
import { useAccessible } from '../../../../hooks/useAccessible'
import { EmailAddressInput, RecipientInput, BodyEditor } from '../inputs'
export interface ComposeWindowProps extends BoxProps {
onSend?: (data: {
to: string[]; cc?: string[]; bcc?: string[]
subject: string; body: string
}) => void
onSend?: (data: { to: string[]; cc?: string[]; bcc?: string[]; subject: string; body: string }) => void
onClose?: () => void
testId?: string
}
export const ComposeWindow = ({
onSend, onClose, testId: customTestId, ...props
onSend,
onClose,
testId: customTestId,
...props
}: ComposeWindowProps) => {
const [to, setTo] = useState<string[]>([])
const [cc, setCc] = useState<string[]>([])
const [bcc, setBcc] = useState<string[]>([])
const [subject, setSubject] = useState('')
const [body, setBody] = useState('')
const [showCcBcc, setShowCcBcc] = useState(false)
const accessible = useAccessible({
feature: 'email', component: 'compose',
feature: 'email',
component: 'compose',
identifier: customTestId || 'compose'
})
const trapRef = useFocusTrap(true)
const handleSend = () => {
if (to.length > 0 && subject && body)
if (to.length > 0 && subject && body) {
onSend?.({ to, cc, bcc, subject, body })
}
}
const handleKeyDown = useCallback(
(e: KeyboardEvent) => {
if (e.key === 'Escape') onClose?.()
}, [onClose]
)
useEffect(() => {
document.addEventListener('keydown', handleKeyDown)
return () =>
document.removeEventListener('keydown', handleKeyDown)
}, [handleKeyDown])
return (
<Card ref={trapRef} className="compose-window"
role="dialog" aria-modal={true}
aria-labelledby="compose-dialog-title"
{...accessible} {...props}>
<Card
className="compose-window"
{...accessible}
{...props}
>
<Box className="compose-header">
<h2 id="compose-dialog-title">Compose Email</h2>
<button onClick={onClose} className="close-btn"
aria-label="Close"
data-testid="compose-close-btn">
<MaterialIcon name="close" />
<h2>Compose Email</h2>
<button onClick={onClose} className="close-btn">
×
</button>
</Box>
<Box className="compose-body">
<Box style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
<RecipientInput recipientType="to"
recipients={to} onRecipientsChange={setTo}
placeholder="To:" />
{!showCcBcc && (
<button
type="button"
onClick={() => setShowCcBcc(true)}
style={{
fontSize: '0.75rem',
color: 'var(--mat-sys-on-surface-variant)',
whiteSpace: 'nowrap',
flexShrink: 0,
padding: '4px 6px',
}}
aria-label="Show Cc and Bcc fields"
>
Cc/Bcc
</button>
)}
</Box>
{showCcBcc && (
<>
<RecipientInput recipientType="cc"
recipients={cc} onRecipientsChange={setCc}
placeholder="Cc:" />
<RecipientInput recipientType="bcc"
recipients={bcc} onRecipientsChange={setBcc}
placeholder="Bcc:" />
</>
)}
<input type="text" placeholder="Subject"
value={subject} id="compose-subject"
aria-label="Subject"
data-testid="compose-subject"
onChange={e => setSubject(e.target.value)}
className="compose-subject" />
<BodyEditor value={body}
onChange={e => setBody(e.target.value)} />
<RecipientInput
recipientType="to"
recipients={to}
onRecipientsChange={setTo}
placeholder="To:"
/>
<RecipientInput
recipientType="cc"
recipients={cc}
onRecipientsChange={setCc}
placeholder="Cc:"
/>
<RecipientInput
recipientType="bcc"
recipients={bcc}
onRecipientsChange={setBcc}
placeholder="Bcc:"
/>
<input
type="text"
placeholder="Subject"
value={subject}
onChange={(e) => setSubject(e.target.value)}
className="compose-subject"
/>
<BodyEditor
value={body}
onChange={(e) => setBody(e.target.value)}
/>
</Box>
<Box className="compose-footer">
<Button variant="primary" onClick={handleSend}
data-testid="compose-send-btn">
<MaterialIcon name="send" /> Send
<Button variant="primary" onClick={handleSend}>
Send
</Button>
<Button variant="outline" onClick={onClose}
data-testid="compose-cancel-btn">
<Button variant="outline" onClick={onClose}>
Cancel
</Button>
</Box>

View File

@@ -11,7 +11,6 @@ export interface EmailCardProps extends CardProps {
receivedAt: number
isRead: boolean
isStarred?: boolean
selected?: boolean
onSelect?: () => void
onToggleRead?: (read: boolean) => void
onToggleStar?: (starred: boolean) => void
@@ -25,7 +24,6 @@ export const EmailCard = ({
receivedAt,
isRead,
isStarred = false,
selected,
onSelect,
onToggleRead,
onToggleStar,
@@ -38,40 +36,19 @@ export const EmailCard = ({
identifier: customTestId || subject.substring(0, 20)
})
const date = new Date(receivedAt)
const today = new Date()
const isToday =
date.toDateString() === today.toDateString()
const displayDate = isToday
? date.toLocaleTimeString(
[], { hour: '2-digit', minute: '2-digit' }
)
: date.toLocaleDateString(
[], { month: 'short', day: 'numeric' }
)
const handleKeyDown = (
e: React.KeyboardEvent
) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault()
onSelect?.()
const formatDate = (timestamp: number) => {
const date = new Date(timestamp)
const today = new Date()
if (date.toDateString() === today.toDateString()) {
return date.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })
}
return date.toLocaleDateString([], { month: 'short', day: 'numeric' })
}
return (
<Card
role="article"
aria-label={`Email from ${from}: ${subject}`}
aria-current={selected ? 'true' : undefined}
tabIndex={0}
className={
`email-card ${isRead
? 'email-card--read'
: 'email-card--unread'}`
}
className={`email-card ${isRead ? 'email-card--read' : 'email-card--unread'}`}
onClick={onSelect}
onKeyDown={handleKeyDown}
{...accessible}
{...props}
>
@@ -81,10 +58,7 @@ export const EmailCard = ({
onToggleRead={onToggleRead}
onClick={(e) => e.stopPropagation()}
/>
<Typography
variant="subtitle2"
className="email-from"
>
<Typography variant="subtitle2" className="email-from">
{from}
</Typography>
<div className="email-card-actions">
@@ -93,27 +67,15 @@ export const EmailCard = ({
onToggleStar={onToggleStar}
onClick={(e) => e.stopPropagation()}
/>
<time dateTime={date.toISOString()}>
<Typography
variant="caption"
className="email-date"
>
{displayDate}
</Typography>
</time>
<Typography variant="caption" className="email-date">
{formatDate(receivedAt)}
</Typography>
</div>
</Box>
<Typography
variant="h6"
className="email-subject"
>
<Typography variant="h6" className="email-subject">
{subject}
</Typography>
<Typography
variant="body2"
className="email-preview"
noWrap
>
<Typography variant="body2" className="email-preview" noWrap>
{preview}
</Typography>
</Card>

View File

@@ -259,22 +259,6 @@ public:
entity.fields.push_back(field);
}
}
// Auto-add tenantId field if top-level tenantId: true is set
if (json.contains("tenantId") && json["tenantId"].is_boolean()) {
if (json["tenantId"].get<bool>()) {
bool has_tenant = false;
for (const auto& f : entity.fields)
if (f.name == "tenantId") { has_tenant = true; break; }
if (!has_tenant) {
FieldDefinition tenant_field;
tenant_field.name = "tenantId";
tenant_field.type = "string";
tenant_field.required = false;
tenant_field.nullable = true;
entity.fields.push_back(tenant_field);
}
}
}
if (json.contains("indexes")) {
for (const auto& idx : json["indexes"]) {
IndexDefinition index;

View File

@@ -1,101 +0,0 @@
/**
* @file metrics_route_handler.cpp
* @brief Prometheus-compatible /metrics endpoint implementation
*/
#include "metrics_route_handler.hpp"
#include <spdlog/spdlog.h>
#include <sstream>
namespace dbal {
namespace daemon {
namespace handlers {
// Static member definitions
std::atomic<uint64_t> MetricsRouteHandler::total_requests_{0};
std::atomic<uint64_t> MetricsRouteHandler::total_errors_{0};
std::atomic<int64_t> MetricsRouteHandler::active_connections_{0};
std::atomic<uint64_t> MetricsRouteHandler::requests_get_{0};
std::atomic<uint64_t> MetricsRouteHandler::requests_post_{0};
std::atomic<uint64_t> MetricsRouteHandler::requests_put_{0};
std::atomic<uint64_t> MetricsRouteHandler::requests_delete_{0};
std::atomic<uint64_t> MetricsRouteHandler::status_2xx_{0};
std::atomic<uint64_t> MetricsRouteHandler::status_4xx_{0};
std::atomic<uint64_t> MetricsRouteHandler::status_5xx_{0};
MetricsRouteHandler::MetricsRouteHandler(const std::string& server_address)
: server_address_(server_address)
, start_time_(std::chrono::steady_clock::now()) {}
void MetricsRouteHandler::recordRequest(const std::string& method, const std::string& path, int status_code, double duration_seconds) {
total_requests_.fetch_add(1, std::memory_order_relaxed);
if (method == "GET") requests_get_.fetch_add(1, std::memory_order_relaxed);
else if (method == "POST") requests_post_.fetch_add(1, std::memory_order_relaxed);
else if (method == "PUT") requests_put_.fetch_add(1, std::memory_order_relaxed);
else if (method == "DELETE") requests_delete_.fetch_add(1, std::memory_order_relaxed);
if (status_code >= 200 && status_code < 300) status_2xx_.fetch_add(1, std::memory_order_relaxed);
else if (status_code >= 400 && status_code < 500) status_4xx_.fetch_add(1, std::memory_order_relaxed);
else if (status_code >= 500) {
status_5xx_.fetch_add(1, std::memory_order_relaxed);
total_errors_.fetch_add(1, std::memory_order_relaxed);
}
}
void MetricsRouteHandler::recordActiveConnection(int delta) {
active_connections_.fetch_add(delta, std::memory_order_relaxed);
}
void MetricsRouteHandler::handleMetrics(
const drogon::HttpRequestPtr& request,
std::function<void(const drogon::HttpResponsePtr&)>&& callback
) const {
auto now = std::chrono::steady_clock::now();
double uptime_seconds = std::chrono::duration<double>(now - start_time_).count();
std::ostringstream out;
out << "# HELP dbal_uptime_seconds Time since DBAL daemon started\n"
<< "# TYPE dbal_uptime_seconds gauge\n"
<< "dbal_uptime_seconds " << uptime_seconds << "\n\n"
<< "# HELP dbal_requests_total Total number of HTTP requests\n"
<< "# TYPE dbal_requests_total counter\n"
<< "dbal_requests_total " << total_requests_.load(std::memory_order_relaxed) << "\n\n"
<< "# HELP dbal_errors_total Total number of 5xx errors\n"
<< "# TYPE dbal_errors_total counter\n"
<< "dbal_errors_total " << total_errors_.load(std::memory_order_relaxed) << "\n\n"
<< "# HELP dbal_active_connections Current active connections\n"
<< "# TYPE dbal_active_connections gauge\n"
<< "dbal_active_connections " << active_connections_.load(std::memory_order_relaxed) << "\n\n"
<< "# HELP dbal_requests_by_method_total Requests by HTTP method\n"
<< "# TYPE dbal_requests_by_method_total counter\n"
<< "dbal_requests_by_method_total{method=\"GET\"} " << requests_get_.load(std::memory_order_relaxed) << "\n"
<< "dbal_requests_by_method_total{method=\"POST\"} " << requests_post_.load(std::memory_order_relaxed) << "\n"
<< "dbal_requests_by_method_total{method=\"PUT\"} " << requests_put_.load(std::memory_order_relaxed) << "\n"
<< "dbal_requests_by_method_total{method=\"DELETE\"} " << requests_delete_.load(std::memory_order_relaxed) << "\n\n"
<< "# HELP dbal_responses_by_status_total Responses by status code class\n"
<< "# TYPE dbal_responses_by_status_total counter\n"
<< "dbal_responses_by_status_total{status=\"2xx\"} " << status_2xx_.load(std::memory_order_relaxed) << "\n"
<< "dbal_responses_by_status_total{status=\"4xx\"} " << status_4xx_.load(std::memory_order_relaxed) << "\n"
<< "dbal_responses_by_status_total{status=\"5xx\"} " << status_5xx_.load(std::memory_order_relaxed) << "\n\n"
<< "# HELP dbal_info DBAL daemon information\n"
<< "# TYPE dbal_info gauge\n"
<< "dbal_info{version=\"1.2.1\",address=\"" << server_address_ << "\"} 1\n";
auto response = drogon::HttpResponse::newHttpResponse();
response->setBody(out.str());
response->setContentTypeCode(drogon::CT_TEXT_PLAIN);
response->setStatusCode(drogon::HttpStatusCode::k200OK);
response->addHeader("Access-Control-Allow-Origin", "*");
callback(response);
}
} // namespace handlers
} // namespace daemon
} // namespace dbal

View File

@@ -1,61 +0,0 @@
/**
* @file metrics_route_handler.hpp
* @brief Prometheus-compatible /metrics endpoint handler
*/
#pragma once
#include <drogon/HttpRequest.h>
#include <drogon/HttpResponse.h>
#include <functional>
#include <atomic>
#include <chrono>
#include <string>
namespace dbal {
namespace daemon {
namespace handlers {
/**
* @class MetricsRouteHandler
* @brief Handles /metrics endpoint in Prometheus exposition format
*
* Provides runtime metrics including request counts, error rates,
* active connections, and uptime for Prometheus scraping.
*/
class MetricsRouteHandler {
public:
explicit MetricsRouteHandler(const std::string& server_address);
/**
* @brief Handle /metrics endpoint
* Returns metrics in Prometheus text exposition format
*/
void handleMetrics(
const drogon::HttpRequestPtr& request,
std::function<void(const drogon::HttpResponsePtr&)>&& callback
) const;
// Call these from other handlers to track metrics
static void recordRequest(const std::string& method, const std::string& path, int status_code, double duration_seconds);
static void recordActiveConnection(int delta);
private:
std::string server_address_;
std::chrono::steady_clock::time_point start_time_;
static std::atomic<uint64_t> total_requests_;
static std::atomic<uint64_t> total_errors_;
static std::atomic<int64_t> active_connections_;
static std::atomic<uint64_t> requests_get_;
static std::atomic<uint64_t> requests_post_;
static std::atomic<uint64_t> requests_put_;
static std::atomic<uint64_t> requests_delete_;
static std::atomic<uint64_t> status_2xx_;
static std::atomic<uint64_t> status_4xx_;
static std::atomic<uint64_t> status_5xx_;
};
} // namespace handlers
} // namespace daemon
} // namespace dbal

View File

@@ -5,7 +5,6 @@
#include "server.hpp"
#include "handlers/health_route_handler.hpp"
#include "handlers/metrics_route_handler.hpp"
#include "handlers/entity_route_handler.hpp"
#include "handlers/query_route_handler.hpp"
#include "security/jwt/jwt_validator.hpp"
@@ -223,16 +222,6 @@ void Server::registerRoutes() {
{drogon::HttpMethod::Get, drogon::HttpMethod::Options}
);
// Metrics endpoint (Prometheus format)
auto metrics_handler = std::make_shared<handlers::MetricsRouteHandler>(address());
drogon::app().registerHandler(
"/metrics",
[metrics_handler](const drogon::HttpRequestPtr& req, DrogonCallback&& callback) {
metrics_handler->handleMetrics(req, std::move(callback));
},
{drogon::HttpMethod::Get, drogon::HttpMethod::Options}
);
// Register schema management route
drogon::app().registerHandler(
"/api/dbal/schema",

View File

@@ -1,70 +0,0 @@
{
"entity": "ComponentNode",
"version": "1.0",
"description": "Visual component node in a page tree with parent-child hierarchy and ordering",
"tenantId": true,
"fields": {
"id": {
"type": "uuid",
"primary": true,
"generated": true,
"description": "Unique component node identifier"
},
"pageId": {
"type": "string",
"required": true,
"description": "Page this component belongs to"
},
"parentId": {
"type": "string",
"optional": true,
"nullable": true,
"description": "Parent component ID (null for root components)"
},
"type": {
"type": "string",
"required": true,
"min_length": 1,
"max_length": 100,
"description": "Component type name"
},
"childIds": {
"type": "string",
"required": true,
"description": "Serialized list of child component IDs"
},
"order": {
"type": "integer",
"required": true,
"default": 0,
"description": "Display order within parent scope (non-negative)"
}
},
"indexes": [
{
"fields": ["tenantId"]
},
{
"fields": ["pageId"]
},
{
"fields": ["parentId"]
},
{
"fields": ["pageId", "parentId", "order"]
}
],
"relations": {
"page": {
"type": "belongs-to",
"entity": "UiPage",
"foreign_key": "pageId"
}
},
"acl": {
"create": { "user": true },
"read": { "self": true, "admin": true },
"update": { "self": true },
"delete": { "self": true }
}
}

View File

@@ -1,57 +0,0 @@
{
"entity": "Lambda",
"version": "1.0",
"description": "Serverless function definition in CodeForge",
"tenantId": true,
"fields": {
"id": {
"type": "uuid",
"primary": true,
"generated": true,
"description": "Unique lambda identifier"
},
"name": {
"type": "string",
"required": true,
"max_length": 255,
"description": "Lambda function name"
},
"description": {
"type": "string",
"max_length": 1024,
"description": "Lambda description"
},
"code": {
"type": "text",
"required": true,
"description": "Function source code"
},
"runtime": {
"type": "string",
"required": true,
"max_length": 50,
"description": "Runtime environment (e.g. nodejs20, python3.11)"
},
"handler": {
"type": "string",
"required": true,
"max_length": 255,
"description": "Entry point handler (e.g. index.handler)"
},
"updatedAt": {
"type": "bigint",
"generated": true
}
},
"indexes": [
{
"fields": ["tenantId"]
}
],
"acl": {
"create": { "user": true },
"read": { "self": true, "admin": true },
"update": { "self": true },
"delete": { "self": true }
}
}

View File

@@ -1,69 +0,0 @@
{
"entity": "Settings",
"version": "1.0",
"description": "Application settings for a CodeForge user session",
"tenantId": true,
"fields": {
"id": {
"type": "string",
"primary": true,
"description": "Settings key (e.g. 'app')"
},
"autoSave": {
"type": "boolean",
"default": true,
"description": "Enable automatic file saving"
},
"autoSync": {
"type": "boolean",
"default": true,
"description": "Enable automatic DBAL synchronization"
},
"syncInterval": {
"type": "integer",
"default": 30000,
"description": "Sync interval in milliseconds"
},
"dbalApiUrl": {
"type": "string",
"default": "http://localhost:8080",
"description": "DBAL daemon API URL"
},
"useIndexedDB": {
"type": "boolean",
"default": true,
"description": "Enable IndexedDB client-side persistence"
},
"theme": {
"type": "enum",
"values": ["light", "dark", "system"],
"default": "light",
"description": "UI theme preference"
},
"locale": {
"type": "string",
"default": "en",
"max_length": 10,
"description": "UI locale (e.g. en, es)"
},
"createdAt": {
"type": "bigint",
"generated": true
},
"updatedAt": {
"type": "bigint",
"generated": true
}
},
"indexes": [
{
"fields": ["tenantId"]
}
],
"acl": {
"create": { "user": true },
"read": { "self": true, "admin": true },
"update": { "self": true },
"delete": { "self": true }
}
}

View File

@@ -1,52 +0,0 @@
{
"entity": "Theme",
"version": "1.0",
"description": "Custom UI theme definition for CodeForge",
"tenantId": true,
"fields": {
"id": {
"type": "uuid",
"primary": true,
"generated": true,
"description": "Unique theme identifier"
},
"name": {
"type": "string",
"required": true,
"max_length": 100,
"description": "Theme display name"
},
"colors": {
"type": "json",
"required": true,
"description": "Color tokens (primary, secondary, accent, background, foreground, muted, destructive, border)"
},
"typography": {
"type": "json",
"description": "Typography tokens (fontFamily, headingFamily, fontSize, fontWeight)"
},
"spacing": {
"type": "json",
"description": "Spacing tokens (unit, scale)"
},
"updatedAt": {
"type": "bigint",
"generated": true
}
},
"indexes": [
{
"fields": ["tenantId"]
},
{
"fields": ["name", "tenantId"],
"unique": true
}
],
"acl": {
"create": { "user": true },
"read": { "self": true, "admin": true },
"update": { "self": true },
"delete": { "self": true }
}
}

View File

@@ -1,27 +0,0 @@
{
"entity": "Settings",
"version": "1.0",
"package": "codeforge",
"description": "Default application settings for CodeForge",
"records": [
{
"id": "app",
"tenantId": "default",
"autoSave": true,
"autoSync": true,
"syncInterval": 30000,
"dbalApiUrl": "http://localhost:8080",
"useIndexedDB": true,
"theme": "light",
"locale": "en",
"createdAt": 0,
"updatedAt": 0
}
],
"metadata": {
"bootstrap": true,
"skipIfExists": true,
"timestampField": "createdAt",
"useCurrentTimestamp": true
}
}

View File

@@ -4,7 +4,7 @@
# Usage:
# cp .env.example .env
# # Edit values as needed
# python3 deployment.py stack up
# ./start-stack.sh
#
# All values below are defaults. Only override what you need to change.

View File

@@ -1,133 +0,0 @@
# MetaBuilder Deployment
Build and deploy the full MetaBuilder stack locally using Docker.
All commands go through a single Python CLI: `python3 deployment.py --help`
## Prerequisites
- Docker Desktop with BuildKit enabled
- Python 3.9+
- Add `localhost:5050` to Docker Desktop insecure registries:
Settings → Docker Engine → `"insecure-registries": ["localhost:5050"]`
## Build & Deploy Order
### Step 1 — Start Local Registries (Nexus + Artifactory)
All base image builds pull dependencies through local registries. **Start these first.**
```bash
cd deployment
docker compose -f docker-compose.nexus.yml up -d
```
Wait ~2 minutes for init containers to finish, then populate:
```bash
python3 deployment.py nexus push # Docker images → Nexus
python3 deployment.py npm publish-patches # Patched npm packages → Nexus
conan remote add artifactory http://localhost:8092/artifactory/api/conan/conan-local
```
| Service | URL | Credentials |
|-------------|----------------------------------|--------------------|
| Nexus UI | http://localhost:8091 | admin / nexus |
| Artifactory | http://localhost:8092 | admin / password |
| npm group | http://localhost:8091/repository/npm-group/ | — |
| Conan2 | http://localhost:8092/artifactory/api/conan/conan-local | — |
| Docker repo | localhost:5050 | — |
### Step 2 — Build Base Images
```bash
python3 deployment.py build base # Build all (skips existing)
python3 deployment.py build base --force # Rebuild all
python3 deployment.py build base node-deps # Build a specific image
python3 deployment.py build base --list # List available images
```
Build order (dependencies respected automatically):
1. `base-apt` — system packages (no deps)
2. `base-conan-deps` — C++ dependencies (needs base-apt)
3. `base-android-sdk` — Android SDK (needs base-apt)
4. `base-node-deps` — npm workspace dependencies (standalone, needs Nexus running)
5. `base-pip-deps` — Python dependencies (standalone)
6. `devcontainer` — full dev environment (needs all above)
### Step 3 — Build App Images
```bash
python3 deployment.py build apps # Build all (skips existing)
python3 deployment.py build apps --force # Rebuild all
python3 deployment.py build apps workflowui # Build specific app
python3 deployment.py build apps --sequential # Lower RAM usage
```
### Step 4 — Start the Stack
```bash
python3 deployment.py stack up # Core services
python3 deployment.py stack up --monitoring # + Prometheus, Grafana, Loki
python3 deployment.py stack up --media # + Media daemon, Icecast, HLS
python3 deployment.py stack up --all # Everything
```
Portal: http://localhost (nginx welcome page with links to all apps)
### Quick Deploy (rebuild + restart specific apps)
```bash
python3 deployment.py deploy codegen # Build and deploy codegen
python3 deployment.py deploy codegen pastebin # Multiple apps
python3 deployment.py deploy --all # All apps
```
## CLI Command Reference
```
deployment.py build base [--force] [--list] [images...]
deployment.py build apps [--force] [--sequential] [apps...]
deployment.py build testcontainers [--skip-native] [--skip-sidecar]
deployment.py deploy [apps...] [--all] [--no-cache]
deployment.py stack up|down|build|logs|ps|clean [--monitoring] [--media] [--all]
deployment.py release <app> [patch|minor|major|x.y.z]
deployment.py nexus init [--ci]
deployment.py nexus push [--tag TAG] [--src SRC] [--pull]
deployment.py nexus populate [--skip-heavy]
deployment.py npm publish-patches [--nexus] [--verdaccio]
deployment.py artifactory init
```
## Compose Files
| File | Purpose |
|------|---------|
| `docker-compose.nexus.yml` | Local registries (Nexus + Artifactory) |
| `compose.yml` | Full application stack |
| `docker-compose.test.yml` | Integration test services |
| `docker-compose.smoke.yml` | Smoke test environment |
## Init Container Scripts
| Script | Purpose |
|--------|---------|
| `nexus-init.py` | Nexus repository setup (mounted into Docker init container) |
| `artifactory-init.py` | Artifactory repository setup (mounted into Docker init container) |
These are container entrypoints used by `docker-compose.nexus.yml`, not user-facing scripts.
## Troubleshooting
**npm install fails with "proxy" error during base-node-deps build**
→ Nexus/Verdaccio isn't running. The `.npmrc` references `localhost:4873` for `@esbuild-kit` scoped packages. Start registries first (Step 1) or comment out the scoped registry in `.npmrc`.
**Build takes 20+ minutes then fails on npm install**
→ Same as above. The Dockerfile now has a pre-flight registry check that fails fast with actionable instructions instead of retrying for 20 minutes.
**Docker image push rejected**
→ Add `localhost:5050` to Docker Desktop insecure registries and restart Docker Desktop.
**Nexus not ready after `docker compose up -d`**
→ Nexus takes ~2 minutes to start. The `nexus-init` container waits for the healthcheck automatically. Check with: `docker compose -f docker-compose.nexus.yml logs -f nexus-init`

149
deployment/artifactory-init.sh Executable file
View File

@@ -0,0 +1,149 @@
#!/bin/sh
# One-shot Artifactory CE initialisation — runs inside the artifactory-init container.
# Creates Conan2 local + remote + virtual repositories via YAML config API.
#
# NOTE: The JSON REST repository API (PUT /api/repositories/) requires Pro license.
# Artifactory CE supports YAML config patching instead:
# PATCH /api/system/configuration (Content-Type: application/yaml)
set -e
ARTIFACTORY_URL="${ARTIFACTORY_URL:-http://artifactory:8081}"
ADMIN_PASS="${ARTIFACTORY_ADMIN_PASS:-password}"
AUTH="admin:$ADMIN_PASS"
API="$ARTIFACTORY_URL/artifactory/api"
log() { echo "[artifactory-init] $*"; }
# ── Wait for Artifactory API to be ready ──────────────────────────────────
log "Waiting for Artifactory API..."
for i in $(seq 1 30); do
HTTP=$(curl -s -o /dev/null -w "%{http_code}" "$API/system/ping" -u "$AUTH")
if [ "$HTTP" = "200" ]; then
break
fi
sleep 2
done
if [ "$HTTP" != "200" ]; then
log "ERROR: Artifactory API not ready after 60s (HTTP $HTTP)"
exit 1
fi
log "Artifactory API is ready"
# ── Helper: patch YAML config (idempotent) ────────────────────────────────
patch_yaml() {
LABEL="$1"
YAML_BODY="$2"
RESPONSE=$(curl -s -w "\n%{http_code}" -X PATCH \
"$API/system/configuration" \
-u "$AUTH" -H "Content-Type: application/yaml" \
-d "$YAML_BODY")
HTTP=$(echo "$RESPONSE" | tail -1)
BODY=$(echo "$RESPONSE" | sed '$d')
case "$HTTP" in
200) log "$LABEL$BODY" ;;
*) log "ERROR: $LABEL returned HTTP $HTTP: $BODY"; exit 1 ;;
esac
}
# ── Check if repos already exist ──────────────────────────────────────────
EXISTING=$(curl -sf -u "$AUTH" "$API/repositories" 2>/dev/null || echo "[]")
if echo "$EXISTING" | grep -q '"conan-local"'; then
log "conan-local already exists, skipping local repo"
else
patch_yaml "Create conan-local" "$(cat <<'YAML'
localRepositories:
conan-local:
key: conan-local
type: conan
packageType: conan
description: "Local Conan2 repository for private packages"
repoLayoutRef: conan-default
handleReleases: true
handleSnapshots: false
YAML
)"
fi
if echo "$EXISTING" | grep -q '"conan-remote"'; then
log "conan-remote already exists, skipping remote repo"
else
patch_yaml "Create conan-remote" "$(cat <<'YAML'
remoteRepositories:
conan-remote:
key: conan-remote
type: conan
packageType: conan
url: "https://center2.conan.io"
description: "Proxy cache for Conan Center"
repoLayoutRef: conan-default
handleReleases: true
handleSnapshots: false
YAML
)"
fi
if echo "$EXISTING" | grep -q '"generic-local"'; then
log "generic-local already exists, skipping generic repo"
else
patch_yaml "Create generic-local" "$(cat <<'YAML'
localRepositories:
generic-local:
key: generic-local
type: generic
packageType: generic
description: "Generic artifact storage"
repoLayoutRef: simple-default
handleReleases: true
handleSnapshots: false
YAML
)"
fi
# Virtual repo must be created after local + remote
if echo "$EXISTING" | grep -q '"conan-virtual"'; then
log "conan-virtual already exists, skipping virtual repo"
else
patch_yaml "Create conan-virtual" "$(cat <<'YAML'
virtualRepositories:
conan-virtual:
key: conan-virtual
type: conan
packageType: conan
description: "Virtual Conan2 repo — local packages + ConanCenter cache"
repositories:
- conan-local
- conan-remote
defaultDeploymentRepo: conan-local
YAML
)"
fi
# ── Verify repos are accessible ──────────────────────────────────────────
log "Verifying repositories..."
for REPO in conan-local conan-remote conan-virtual generic-local; do
HTTP=$(curl -s -o /dev/null -w "%{http_code}" \
"$ARTIFACTORY_URL/artifactory/$REPO/" -u "$AUTH")
if [ "$HTTP" = "200" ]; then
log " ok $REPO"
else
log " WARN $REPO (HTTP $HTTP)"
fi
done
log ""
log "======================================"
log " Artifactory CE ready!"
log " Web UI : http://localhost:8092"
log " Login : admin / $ADMIN_PASS"
log ""
log " Conan2 repos:"
log " Local : $ARTIFACTORY_URL/artifactory/api/conan/conan-local"
log " Remote : $ARTIFACTORY_URL/artifactory/api/conan/conan-remote"
log " Virtual : $ARTIFACTORY_URL/artifactory/api/conan/conan-virtual"
log ""
log " Conan client setup:"
log " conan remote add artifactory http://localhost:8092/artifactory/api/conan/conan-virtual"
log " conan remote login artifactory admin -p $ADMIN_PASS"
log "======================================"

View File

@@ -1,9 +1,9 @@
# metabuilder/base-android-sdk
#
# Android SDK 34 + Gradle 8.12 + common Android dependencies pre-downloaded.
# Self-contained — generates Gradle wrapper from web, no project files needed.
# Android SDK 34 + Gradle 8.12 + all Gradle dependencies pre-downloaded
# for repoforge and caproverforge Android apps.
#
# Build: docker build -f Dockerfile.android-sdk -t metabuilder/base-android-sdk:latest .
# Build: docker build -f Dockerfile.android-sdk -t metabuilder/base-android-sdk:latest ../../
ARG BASE_REGISTRY=metabuilder
FROM ${BASE_REGISTRY}/base-apt:latest
@@ -62,36 +62,53 @@ RUN GRADLE_OK=false && \
echo "ERROR: Gradle ${GRADLE_VERSION} install failed after 5 attempts" && exit 1; \
fi
# ── Pre-download Gradle wrapper + common Android dependencies ─────────────────
# Generate the wrapper from the system Gradle install (no COPY from projects).
# Then resolve a minimal Android app's dependency tree to warm the cache.
# ── Pre-download Gradle dependencies for both Android apps ────────────────────
# Copy only build files (not source) to maximise layer cache hits.
RUN mkdir -p /tmp/gradle-warmup && \
cd /tmp/gradle-warmup && \
touch settings.gradle.kts && \
gradle wrapper --gradle-version ${GRADLE_VERSION} --no-daemon
# Stub Android project to pull common dependencies (AGP, Kotlin, AndroidX)
RUN mkdir -p /tmp/gradle-warmup/app/src/main/java && \
# repoforge
COPY frontends/repoforge/build.gradle.kts /tmp/repoforge/build.gradle.kts
COPY frontends/repoforge/settings.gradle.kts /tmp/repoforge/settings.gradle.kts
COPY frontends/repoforge/gradle.properties /tmp/repoforge/gradle.properties
COPY frontends/repoforge/gradlew /tmp/repoforge/gradlew
COPY frontends/repoforge/gradle/ /tmp/repoforge/gradle/
COPY frontends/repoforge/app/build.gradle.kts /tmp/repoforge/app/build.gradle.kts
# Stub sources so gradle resolves deps without compiling real code
RUN mkdir -p /tmp/repoforge/app/src/main/java && \
echo 'package com.stub; public class Stub {}' \
> /tmp/gradle-warmup/app/src/main/java/Stub.java && \
> /tmp/repoforge/app/src/main/java/Stub.java && \
printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest package="com.stub"/>' \
> /tmp/gradle-warmup/app/src/main/AndroidManifest.xml && \
printf 'pluginManagement {\n repositories {\n google()\n mavenCentral()\n gradlePluginPortal()\n }\n}\ndependencyResolutionManagement {\n repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)\n repositories {\n google()\n mavenCentral()\n }\n}\nrootProject.name = "warmup"\ninclude(":app")\n' \
> /tmp/gradle-warmup/settings.gradle.kts && \
printf 'plugins {\n id("com.android.application") version "8.7.3"\n id("org.jetbrains.kotlin.android") version "2.1.0"\n}\nandroid {\n namespace = "com.stub"\n compileSdk = 34\n defaultConfig {\n applicationId = "com.stub"\n minSdk = 24\n targetSdk = 34\n }\n}\ndependencies {\n implementation("androidx.core:core-ktx:1.15.0")\n implementation("androidx.appcompat:appcompat:1.7.0")\n implementation("com.google.android.material:material:1.12.0")\n}\n' \
> /tmp/gradle-warmup/app/build.gradle.kts
> /tmp/repoforge/app/src/main/AndroidManifest.xml
WORKDIR /tmp/gradle-warmup
WORKDIR /tmp/repoforge
RUN chmod +x gradlew && \
for i in 1 2 3 4 5; do \
./gradlew dependencies --no-daemon --quiet \
&& break \
|| (echo "Gradle warmup failed attempt $i/5, retrying in $((i*10))s..." && sleep $((i*10))); \
done && \
rm -rf /tmp/gradle-warmup
|| (echo "Gradle (repoforge) failed attempt $i/5, retrying in $((i*10))s..." && sleep $((i*10))); \
done
# caproverforge
COPY frontends/caproverforge/build.gradle /tmp/caproverforge/build.gradle
COPY frontends/caproverforge/settings.gradle /tmp/caproverforge/settings.gradle
COPY frontends/caproverforge/gradle.properties /tmp/caproverforge/gradle.properties
COPY frontends/caproverforge/gradlew /tmp/caproverforge/gradlew
COPY frontends/caproverforge/gradle/ /tmp/caproverforge/gradle/
COPY frontends/caproverforge/app/build.gradle /tmp/caproverforge/app/build.gradle
RUN mkdir -p /tmp/caproverforge/app/src/main/java && \
echo 'package com.stub; public class Stub {}' \
> /tmp/caproverforge/app/src/main/java/Stub.java && \
printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest package="com.stub"/>' \
> /tmp/caproverforge/app/src/main/AndroidManifest.xml
WORKDIR /tmp/caproverforge
RUN chmod +x gradlew && \
for i in 1 2 3 4 5; do \
./gradlew dependencies --no-daemon --quiet \
&& break \
|| (echo "Gradle (caproverforge) failed attempt $i/5, retrying in $((i*10))s..." && sleep $((i*10))); \
done
WORKDIR /
LABEL org.metabuilder.image="base-android-sdk" \
org.metabuilder.description="Android SDK 34 + Gradle 8.12 + common Android deps pre-downloaded"
org.metabuilder.description="Android SDK 34 + Gradle 8.12 + pre-downloaded deps"

View File

@@ -80,15 +80,17 @@ RUN GO_VERSION=1.22.6 && \
# Set LIBGL_ALWAYS_SOFTWARE=1 to force software rendering at runtime.
RUN for i in 1 2 3 4 5; do \
apt-get update && apt-get install -y --no-install-recommends \
libgl1 \
libglx-mesa0 \
# Software OpenGL (Mesa llvmpipe)
libgl1-mesa-glx \
libgl1-mesa-dri \
mesa-utils \
# Software Vulkan (llvmpipe)
mesa-vulkan-drivers \
vulkan-tools \
libegl1 \
libegl-mesa0 \
libgles2 \
# EGL / GLES software
libegl1-mesa \
libgles2-mesa \
# X virtual framebuffer (for headless window creation)
xvfb \
x11-utils \
&& break \

View File

@@ -1,13 +1,13 @@
# metabuilder/base-node-deps
#
# Node 24 + all 33 workspace npm packages pre-installed.
# Node 20 + all 33 workspace npm packages pre-installed.
# App Dockerfiles copy node_modules from this image instead of running npm ci.
#
# Build: docker build -f Dockerfile.node-deps -t metabuilder/base-node-deps:latest ../../
# App Dockerfiles:
# COPY --from=metabuilder/base-node-deps /app/node_modules ./node_modules
FROM node:24
FROM node:20-slim
WORKDIR /app
@@ -55,105 +55,9 @@ COPY workflow/package.json ./workflow/
COPY scripts/patch-bundled-deps.sh ./scripts/
# Install all workspace deps (generates lock file from package.json manifests)
#
# Pre-flight: auto-detect local registry (Nexus on :8091, Verdaccio on :4873)
# and rewrite .npmrc scoped registries accordingly. This lets the same .npmrc
# work in CI (Verdaccio) and on desktops running Nexus.
RUN npm config set fetch-retries 5 \
&& npm config set fetch-retry-mintimeout 20000 \
&& npm config set maxsockets 5 \
&& echo "==> Detecting local npm registry..." \
&& NEXUS_NPM="http://host.docker.internal:8091/repository/npm-group/" \
&& VERDACCIO="http://host.docker.internal:4873/" \
&& NEXUS_NPM_LOCAL="http://localhost:8091/repository/npm-group/" \
&& VERDACCIO_LOCAL="http://localhost:4873/" \
&& LOCAL_REG="" \
&& LOCAL_REG_AUTH="" \
&& if wget -q --spider --timeout=3 "$NEXUS_NPM" 2>/dev/null \
|| curl -sf --connect-timeout 3 "$NEXUS_NPM" >/dev/null 2>&1; then \
LOCAL_REG="$NEXUS_NPM"; \
LOCAL_REG_AUTH="//host.docker.internal:8091/repository/npm-hosted/:_auth=YWRtaW46bmV4dXM="; \
echo " Nexus detected at $NEXUS_NPM"; \
elif wget -q --spider --timeout=3 "$NEXUS_NPM_LOCAL" 2>/dev/null \
|| curl -sf --connect-timeout 3 "$NEXUS_NPM_LOCAL" >/dev/null 2>&1; then \
LOCAL_REG="$NEXUS_NPM_LOCAL"; \
LOCAL_REG_AUTH="//localhost:8091/repository/npm-hosted/:_auth=YWRtaW46bmV4dXM="; \
echo " Nexus detected at $NEXUS_NPM_LOCAL"; \
elif wget -q --spider --timeout=3 "$VERDACCIO" 2>/dev/null \
|| curl -sf --connect-timeout 3 "$VERDACCIO" >/dev/null 2>&1; then \
LOCAL_REG="$VERDACCIO"; \
LOCAL_REG_AUTH="//host.docker.internal:4873/:_authToken="; \
echo " Verdaccio detected at $VERDACCIO"; \
elif wget -q --spider --timeout=3 "$VERDACCIO_LOCAL" 2>/dev/null \
|| curl -sf --connect-timeout 3 "$VERDACCIO_LOCAL" >/dev/null 2>&1; then \
LOCAL_REG="$VERDACCIO_LOCAL"; \
LOCAL_REG_AUTH="//localhost:4873/:_authToken="; \
echo " Verdaccio detected at $VERDACCIO_LOCAL"; \
fi \
&& if [ -n "$LOCAL_REG" ]; then \
echo "==> Rewriting .npmrc registries → $LOCAL_REG"; \
sed -i '/\/\/localhost:4873\//d' .npmrc; \
sed -i '/\/\/localhost:8091\//d' .npmrc; \
sed -i '/\/\/host.docker.internal/d' .npmrc; \
sed -i 's|^registry=.*|registry='"$LOCAL_REG"'|' .npmrc; \
sed -i 's|@esbuild-kit:registry=.*|@esbuild-kit:registry='"$LOCAL_REG"'|' .npmrc; \
echo "$LOCAL_REG_AUTH" >> .npmrc; \
else \
echo ""; \
echo "========================================================"; \
echo " WARNING: No local npm registry detected!"; \
echo "========================================================"; \
echo ""; \
echo " @esbuild-kit patched packages will NOT be available."; \
echo " npm install may fail or use unpatched versions."; \
echo ""; \
echo " Start one of these BEFORE building:"; \
echo ""; \
echo " Nexus (recommended for desktops):"; \
echo " cd deployment && docker compose -f docker-compose.nexus.yml up -d"; \
echo " python3 deployment.py npm publish-patches"; \
echo ""; \
echo " Verdaccio (lightweight, for CI runners):"; \
echo " npx verdaccio --config deployment/verdaccio.yaml &"; \
echo " python3 deployment.py npm publish-patches --verdaccio"; \
echo ""; \
echo " Then rebuild this image."; \
echo "========================================================"; \
echo ""; \
echo " Continuing without local registry — removing scoped overrides..."; \
sed -i '/@esbuild-kit:registry=/d' .npmrc; \
sed -i '/\/\/localhost:4873\//d' .npmrc; \
fi \
&& echo "==> Final .npmrc:" && cat .npmrc && echo "" \
&& echo "==> Checking npm registry connectivity..." \
&& registries=$(grep -E '^\s*(@[^:]+:)?registry=' .npmrc 2>/dev/null | sed 's/.*registry=//' | sort -u) \
&& for reg in $registries; do \
printf " %-50s " "$reg"; \
if wget -q --spider --timeout=5 "$reg" 2>/dev/null \
|| curl -sf --connect-timeout 5 "$reg" >/dev/null 2>&1; then \
echo "OK"; \
else \
echo "UNREACHABLE"; \
echo ""; \
echo "========================================================"; \
echo "ERROR: Cannot reach npm registry: $reg"; \
echo "========================================================"; \
echo ""; \
echo "If this is a local registry (Verdaccio/Nexus/Artifactory),"; \
echo "make sure it is running BEFORE building this image:"; \
echo ""; \
echo " Verdaccio: npx verdaccio --config deployment/verdaccio.yaml"; \
echo " Nexus: cd deployment && docker compose -f docker-compose.nexus.yml up -d"; \
echo ""; \
echo "Then rebuild with --network=host so the build can reach localhost:"; \
echo " docker build --network=host -f Dockerfile.node-deps ..."; \
echo ""; \
echo "Or remove/comment out the unreachable registry in .npmrc"; \
echo "========================================================"; \
exit 1; \
fi; \
done \
&& echo "==> All registries reachable, running npm install..." \
&& for i in 1 2 3 4 5; do \
npm install 2>&1 && break; \
[ "$i" = "5" ] && echo "npm install failed after 5 attempts" && exit 1; \

View File

@@ -54,15 +54,15 @@ COPY workflow/plugins/python/requirements-packagerepo.txt /
COPY workflow/plugins/python/requirements-testing.txt /deps/workflow/requirements-testing.txt
COPY workflow/plugins/python/requirements-dev.txt /deps/workflow/requirements-dev.txt
# Merge all requirements (except cadquerywrapper) into a single file,
# deduplicating packages so pip sees one consistent set of constraints.
RUN find /deps -name '*.txt' -not -path '*/cadquerywrapper/*' \
-exec cat {} + \
| grep -v '^\s*#' | grep -v '^\s*$' | grep -v '^\s*-r ' \
| sort -u > /deps/merged.txt && \
echo "=== Merged requirements ===" && cat /deps/merged.txt && \
pip install --no-cache-dir -r /deps/merged.txt && \
# cadquerywrapper best-effort (cadquery-ocp may not resolve on all platforms)
# Install in two passes:
# 1. Main deps (everything except cadquerywrapper which has native dep conflicts)
# 2. cadquerywrapper best-effort (cadquery-ocp may not resolve on all platforms)
RUN for i in 1 2 3 4 5; do \
pip install --no-cache-dir \
$(find /deps -name '*.txt' -not -path '*/cadquerywrapper/*' -exec echo "-r {}" \;) \
&& break \
|| (echo "pip install failed (attempt $i/5), retrying in $((i*10))s..." && sleep $((i*10))); \
done && \
pip install --no-cache-dir \
-r /deps/cadquerywrapper/requirements.txt \
-r /deps/cadquerywrapper/requirements-dev.txt \

202
deployment/build-apps.sh Executable file
View File

@@ -0,0 +1,202 @@
#!/usr/bin/env bash
# Build Docker images for all MetaBuilder web applications.
# Uses multi-stage Dockerfiles — no local pre-building required.
#
# Usage:
# ./build-apps.sh Build missing app images (skip existing)
# ./build-apps.sh --force Rebuild all app images
# ./build-apps.sh workflowui Build specific app image
# ./build-apps.sh --sequential Build sequentially (less RAM)
set -e
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
COMPOSE_FILE="$SCRIPT_DIR/docker-compose.stack.yml"
# Ensure base-node-deps exists — all frontend Dockerfiles depend on it.
# Other base images (apt, conan, pip, android-sdk) are only needed for
# C++ daemons, dev containers, and workflow plugins.
ensure_node_deps_base() {
if docker image inspect "metabuilder/base-node-deps:latest" &>/dev/null; then
echo -e "${GREEN}Base image metabuilder/base-node-deps:latest exists${NC}"
return 0
fi
echo -e "${YELLOW}Building metabuilder/base-node-deps (required by all Node.js frontends)...${NC}"
local REPO_ROOT
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
docker build \
-f "$SCRIPT_DIR/base-images/Dockerfile.node-deps" \
-t metabuilder/base-node-deps:latest \
"$REPO_ROOT"
if [ $? -ne 0 ]; then
echo -e "${RED}Failed to build base-node-deps — cannot proceed with app builds${NC}"
exit 1
fi
echo -e "${GREEN}Built metabuilder/base-node-deps:latest${NC}"
}
# Check optional base images (warn only, don't block)
check_optional_bases() {
local missing=()
local bases=(
"metabuilder/base-apt:latest"
"metabuilder/base-conan-deps:latest"
"metabuilder/base-pip-deps:latest"
"metabuilder/base-android-sdk:latest"
)
for img in "${bases[@]}"; do
if ! docker image inspect "$img" &>/dev/null; then
missing+=("$img")
fi
done
if [ ${#missing[@]} -gt 0 ]; then
echo -e "${YELLOW}Optional base images not built (C++ daemons, dev container):${NC}"
for img in "${missing[@]}"; do
echo " - $img"
done
echo -e "${YELLOW}Build with:${NC} ./build-base-images.sh"
echo ""
fi
}
ensure_node_deps_base
check_optional_bases
PARALLEL=true
FORCE=false
TARGETS=()
for arg in "$@"; do
case "$arg" in
--sequential) PARALLEL=false ;;
--force) FORCE=true ;;
*) TARGETS+=("$arg") ;;
esac
done
# Note: media-daemon excluded — C++ source not yet complete (WIP: tv, radio, retro gaming)
ALL_APPS=(workflowui codegen pastebin postgres emailclient exploded-diagrams storybook frontend-app dbal)
# Map friendly name to docker compose service name
resolve_service() {
case "$1" in
workflowui) echo "workflowui" ;;
codegen) echo "codegen" ;;
pastebin) echo "pastebin" ;;
postgres) echo "postgres-dashboard" ;;
emailclient) echo "emailclient-app" ;;
exploded-diagrams) echo "exploded-diagrams" ;;
storybook) echo "storybook" ;;
frontend-app) echo "frontend-app" ;;
dbal) echo "dbal" ;;
*) echo "" ;;
esac
}
# If no targets specified, build all
if [ ${#TARGETS[@]} -eq 0 ]; then
TARGETS=("${ALL_APPS[@]}")
fi
# Resolve service names
SERVICES=()
for target in "${TARGETS[@]}"; do
service="$(resolve_service "$target")"
if [ -z "$service" ]; then
echo -e "${RED}Unknown target: $target${NC}"
echo "Available: ${ALL_APPS[*]}"
exit 1
fi
SERVICES+=("$service")
done
# Skip services whose images already exist (unless --force)
if [[ "$FORCE" != "true" ]]; then
NEEDS_BUILD=()
NEEDS_BUILD_NAMES=()
for i in "${!TARGETS[@]}"; do
target="${TARGETS[$i]}"
service="${SERVICES[$i]}"
img="deployment-${service}"
if docker image inspect "$img" &>/dev/null; then
echo -e "${GREEN}Skipping $target${NC} — image $img already exists (use --force to rebuild)"
else
NEEDS_BUILD_NAMES+=("$target")
NEEDS_BUILD+=("$service")
fi
done
if [ ${#NEEDS_BUILD[@]} -eq 0 ]; then
echo ""
echo -e "${GREEN}All app images already built! Use --force to rebuild.${NC}"
exit 0
fi
TARGETS=("${NEEDS_BUILD_NAMES[@]}")
SERVICES=("${NEEDS_BUILD[@]}")
fi
echo -e "${YELLOW}Building: ${TARGETS[*]}${NC}"
echo ""
# Pre-pull base images that app Dockerfiles depend on (with retry for flaky connections)
echo -e "${YELLOW}Pre-pulling base images for app builds...${NC}"
for img in "node:20-alpine" "node:22-alpine" "python:3.11-slim" "python:3.12-slim" "alpine:3.19"; do
if ! docker image inspect "$img" &>/dev/null; then
echo " Pulling $img..."
for i in 1 2 3 4 5; do
docker pull "$img" && break \
|| (echo " Retry $i/5..." && sleep $((i * 10)))
done
fi
done
echo ""
MAX_BUILD_ATTEMPTS=5
BUILD_ATTEMPT=0
BUILD_OK=false
while [ $BUILD_ATTEMPT -lt $MAX_BUILD_ATTEMPTS ]; do
BUILD_ATTEMPT=$((BUILD_ATTEMPT + 1))
[ $BUILD_ATTEMPT -gt 1 ] && echo -e "${YELLOW}Build attempt $BUILD_ATTEMPT/$MAX_BUILD_ATTEMPTS...${NC}"
if [ "$PARALLEL" = true ]; then
echo -e "${YELLOW}Parallel build (uses more RAM)...${NC}"
docker compose -f "$COMPOSE_FILE" build --parallel "${SERVICES[@]}" && BUILD_OK=true && break
else
# Build each service individually to avoid bandwidth contention
ALL_OK=true
for svc in "${SERVICES[@]}"; do
echo -e "${YELLOW}Building $svc...${NC}"
if ! docker compose -f "$COMPOSE_FILE" build "$svc"; then
echo -e "${RED}Failed: $svc${NC}"
ALL_OK=false
break
fi
echo -e "${GREEN}Done: $svc${NC}"
done
[ "$ALL_OK" = true ] && BUILD_OK=true && break
fi
if [ $BUILD_ATTEMPT -lt $MAX_BUILD_ATTEMPTS ]; then
WAIT=$(( BUILD_ATTEMPT * 10 ))
echo -e "${YELLOW}Build failed (attempt $BUILD_ATTEMPT/$MAX_BUILD_ATTEMPTS), retrying in ${WAIT}s...${NC}"
sleep $WAIT
fi
done
if [ "$BUILD_OK" != "true" ]; then
echo -e "${RED}Build failed after $MAX_BUILD_ATTEMPTS attempts${NC}"
exit 1
fi
echo ""
echo -e "${GREEN}Build complete!${NC}"
echo ""
echo "Start with: ./start-stack.sh"
echo "Or: docker compose -f $COMPOSE_FILE up -d ${SERVICES[*]}"

186
deployment/build-base-images.sh Executable file
View File

@@ -0,0 +1,186 @@
#!/usr/bin/env bash
# Build MetaBuilder base Docker images.
#
# These are built ONCE (or when dependency manifests change) and cached locally.
# App image builds then have zero downloads — they just inherit from these bases.
#
# Build order matters:
# 1. base-apt (no deps)
# 2. base-conan-deps (needs base-apt)
# 3. base-android-sdk (needs base-apt)
# 4. base-node-deps (standalone — node:20-alpine)
# 5. base-pip-deps (standalone — python:3.11-slim)
#
# Usage:
# ./build-base-images.sh Build missing base images (skip existing)
# ./build-base-images.sh --force Rebuild all base images
# ./build-base-images.sh apt node Build specific images (skip if exist)
# ./build-base-images.sh --list List available images
# Require bash 4+ for associative arrays (macOS ships 3.2)
if ((BASH_VERSINFO[0] < 4)); then
for candidate in /opt/homebrew/bin/bash /usr/local/bin/bash; do
if [[ -x "$candidate" ]] && "$candidate" -c '((BASH_VERSINFO[0]>=4))' 2>/dev/null; then
exec "$candidate" "$0" "$@"
fi
done
echo "Error: bash 4+ required (found bash $BASH_VERSION)"
echo "Install with: brew install bash"
exit 1
fi
set -e
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
BASE_DIR="$SCRIPT_DIR/base-images"
# ── Helpers ───────────────────────────────────────────────────────────────────
log_info() { echo -e "${BLUE}[base]${NC} $*"; }
log_ok() { echo -e "${GREEN}[base]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[base]${NC} $*"; }
log_err() { echo -e "${RED}[base]${NC} $*"; }
# Build one image with retry (handles flaky network during FROM pulls).
build_with_retry() {
local tag="$1"
local dockerfile="$2"
local max=5
log_info "Building $tag ..."
echo ""
for i in $(seq 1 $max); do
if docker build \
--file "$BASE_DIR/$dockerfile" \
--tag "$tag" \
--tag "${tag%:*}:$(date +%Y%m%d)" \
"$PROJECT_ROOT"; then
echo ""
log_ok "$tag built successfully"
return 0
fi
if [ "$i" -lt "$max" ]; then
local wait=$(( i * 15 ))
log_warn "Build failed (attempt $i/$max), retrying in ${wait}s ..."
sleep "$wait"
fi
done
log_err "Failed to build $tag after $max attempts"
return 1
}
# ── Image definitions (order = build order) ───────────────────────────────────
declare -A IMAGE_FILE=(
[apt]="Dockerfile.apt"
[conan-deps]="Dockerfile.conan-deps"
[node-deps]="Dockerfile.node-deps"
[pip-deps]="Dockerfile.pip-deps"
[android-sdk]="Dockerfile.android-sdk"
[devcontainer]="Dockerfile.devcontainer"
)
declare -A IMAGE_TAG=(
[apt]="metabuilder/base-apt:latest"
[conan-deps]="metabuilder/base-conan-deps:latest"
[node-deps]="metabuilder/base-node-deps:latest"
[pip-deps]="metabuilder/base-pip-deps:latest"
[android-sdk]="metabuilder/base-android-sdk:latest"
[devcontainer]="metabuilder/devcontainer:latest"
)
# Build order respects dependencies:
# base-apt → conan-deps, android-sdk
# conan-deps + node-deps + pip-deps + android-sdk → devcontainer
BUILD_ORDER=(apt conan-deps android-sdk node-deps pip-deps devcontainer)
# ── Argument parsing ──────────────────────────────────────────────────────────
if [[ "$1" == "--list" ]]; then
echo "Available base images:"
for name in "${BUILD_ORDER[@]}"; do
echo " $name${IMAGE_TAG[$name]}"
done
exit 0
fi
FORCE=false
TARGETS=()
for arg in "$@"; do
if [[ "$arg" == "--force" ]]; then
FORCE=true
elif [[ -v IMAGE_FILE[$arg] ]]; then
TARGETS+=("$arg")
else
log_err "Unknown image: $arg"
echo "Available: ${BUILD_ORDER[*]}"
exit 1
fi
done
# Default: build all (in dependency order)
if [ ${#TARGETS[@]} -eq 0 ]; then
TARGETS=("${BUILD_ORDER[@]}")
fi
# ── Build ─────────────────────────────────────────────────────────────────────
echo ""
echo -e "${BLUE}MetaBuilder Base Image Builder${NC}"
echo -e "Building: ${TARGETS[*]}"
echo ""
FAILED=()
SKIPPED=()
for name in "${BUILD_ORDER[@]}"; do
# Skip if not in TARGETS
[[ " ${TARGETS[*]} " == *" $name "* ]] || continue
# Skip if image already exists (unless --force)
if [[ "$FORCE" != "true" ]] && docker image inspect "${IMAGE_TAG[$name]}" &>/dev/null; then
SKIPPED+=("$name")
log_ok "Skipping $name${IMAGE_TAG[$name]} already exists (use --force to rebuild)"
echo ""
continue
fi
if ! build_with_retry "${IMAGE_TAG[$name]}" "${IMAGE_FILE[$name]}"; then
FAILED+=("$name")
log_warn "Continuing with remaining images..."
fi
echo ""
done
# ── Summary ───────────────────────────────────────────────────────────────────
echo ""
if [ ${#FAILED[@]} -eq 0 ]; then
echo -e "${GREEN}All base images built successfully!${NC}"
echo ""
echo "Built images:"
for name in "${BUILD_ORDER[@]}"; do
[[ " ${TARGETS[*]} " == *" $name "* ]] || continue
SIZE=$(docker image inspect "${IMAGE_TAG[$name]}" \
--format '{{.Size}}' 2>/dev/null \
| awk '{printf "%.1f GB", $1/1073741824}')
echo -e " ${GREEN}${NC} ${IMAGE_TAG[$name]} ($SIZE)"
done
echo ""
echo "Now run: cd deployment && ./build-apps.sh"
else
echo -e "${RED}Some images failed to build:${NC} ${FAILED[*]}"
echo "Re-run to retry only failed images:"
echo " ./build-base-images.sh ${FAILED[*]}"
exit 1
fi

View File

@@ -0,0 +1,105 @@
#!/usr/bin/env bash
# build-testcontainers.sh — builds the testcontainers Conan packages and uploads to Nexus.
#
# Builds:
# - testcontainers-native/0.1.0 (C shared library, wraps testcontainers-go)
# - testcontainers-sidecar/0.1.0 (Go binary sidecar for DBAL integration tests)
#
# Prerequisites:
# - Go 1.21+ (brew install go)
# - Conan 2.x (pip install conan)
# - Nexus running (docker compose -f deployment/docker-compose.nexus.yml up -d)
# - Nexus init (./deployment/nexus-init.sh)
#
# Usage:
# ./deployment/build-testcontainers.sh [--skip-native] [--skip-sidecar]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
RECIPES_DIR="$REPO_ROOT/dbal/production/build-config/conan-recipes"
NEXUS_URL="${NEXUS_URL:-http://localhost:8091/repository/conan-hosted/}"
NEXUS_USER="${NEXUS_USER:-admin}"
NEXUS_PASS="${NEXUS_PASS:-nexus}"
SKIP_NATIVE=false
SKIP_SIDECAR=false
for arg in "$@"; do
case "$arg" in
--skip-native) SKIP_NATIVE=true ;;
--skip-sidecar) SKIP_SIDECAR=true ;;
esac
done
log() { echo "[build-testcontainers] $*"; }
# ── Preflight checks ──────────────────────────────────────────────────────────
log "Checking prerequisites..."
go version || { echo "Go not found. Install: https://go.dev/dl/"; exit 1; }
conan --version || { echo "Conan not found. Install: pip install conan"; exit 1; }
# ── Configure Nexus as Conan remote ───────────────────────────────────────────
log "Configuring Nexus Conan remote..."
conan remote add nexus "$NEXUS_URL" --force 2>/dev/null || true
conan remote login nexus "$NEXUS_USER" --password "$NEXUS_PASS"
# Ensure Nexus is before conancenter in priority (for future installs)
conan remote disable conancenter 2>/dev/null || true
conan remote enable conancenter 2>/dev/null || true
# Move nexus to index 0
conan remote update nexus --index 0 2>/dev/null || true
# ── Build + upload testcontainers-native ──────────────────────────────────────
if [ "$SKIP_NATIVE" = false ]; then
log "Building testcontainers-native/0.1.0 (C shared library)..."
log " Requires: Go + CMake + Docker"
conan create "$RECIPES_DIR/testcontainers-native" \
-s build_type=Release \
-s compiler.cppstd=20 \
--build=missing
log "Uploading testcontainers-native to Nexus..."
conan upload "testcontainers-native/0.1.0" --remote nexus --confirm
log "testcontainers-native uploaded ✓"
else
log "Skipping testcontainers-native (--skip-native)"
fi
# ── Build + upload testcontainers-sidecar ─────────────────────────────────────
if [ "$SKIP_SIDECAR" = false ]; then
SIDECAR_SRC="$REPO_ROOT/dbal/testcontainers-sidecar"
log "Building testcontainers-sidecar/0.1.0 (Go binary)..."
log " Source: $SIDECAR_SRC"
# Export TESTCONTAINERS_SIDECAR_SRC so the Conan recipe's build() can find it
TESTCONTAINERS_SIDECAR_SRC="$SIDECAR_SRC" \
conan create "$RECIPES_DIR/testcontainers-sidecar" \
-s build_type=Release \
-s compiler.cppstd=20 \
--build=missing
log "Uploading testcontainers-sidecar to Nexus..."
conan upload "testcontainers-sidecar/0.1.0" --remote nexus --confirm
log "testcontainers-sidecar uploaded ✓"
else
log "Skipping testcontainers-sidecar (--skip-sidecar)"
fi
log ""
log "══════════════════════════════════════════"
log " Conan packages in Nexus:"
log " http://localhost:8091/#browse/browse:conan-hosted"
log ""
log " To use in DBAL tests:"
log " conan remote add nexus $NEXUS_URL --force"
log " conan remote login nexus $NEXUS_USER --password $NEXUS_PASS"
log " cd dbal/production/_build"
log " conan install ../build-config/conanfile.tests.py \\"
log " --output-folder=. --build=missing --remote nexus \\"
log " -s build_type=Debug -s compiler.cppstd=20"
log " cmake .. -DBUILD_DAEMON=OFF -DBUILD_INTEGRATION_TESTS=ON \\"
log " -DCMAKE_TOOLCHAIN_FILE=./build/Debug/generators/conan_toolchain.cmake -G Ninja"
log " cmake --build . --target dbal_integration_tests --parallel"
log " ctest -R dbal_integration_tests --output-on-failure -V"
log "══════════════════════════════════════════"

View File

@@ -1,5 +0,0 @@
"""MetaBuilder Deployment CLI — modular command system powered by JSON config."""
from cli.loader import build_parser, dispatch
__all__ = ["build_parser", "dispatch"]

View File

@@ -1,118 +0,0 @@
"""Initialize Artifactory CE Conan2 local + remote + virtual repositories."""
import argparse
import os
import subprocess
import time
from cli.helpers import curl_status, run as run_proc
REPO_CONFIGS = [
("conan-local", """localRepositories:
conan-local:
key: conan-local
type: conan
packageType: conan
description: "Local Conan2 repository for private packages"
repoLayoutRef: conan-default
handleReleases: true
handleSnapshots: false"""),
("conan-remote", """remoteRepositories:
conan-remote:
key: conan-remote
type: conan
packageType: conan
url: "https://center2.conan.io"
description: "Proxy cache for Conan Center"
repoLayoutRef: conan-default
handleReleases: true
handleSnapshots: false"""),
("generic-local", """localRepositories:
generic-local:
key: generic-local
type: generic
packageType: generic
description: "Generic artifact storage"
repoLayoutRef: simple-default
handleReleases: true
handleSnapshots: false"""),
("conan-virtual", """virtualRepositories:
conan-virtual:
key: conan-virtual
type: conan
packageType: conan
description: "Virtual Conan2 repo — local packages + ConanCenter cache"
repositories:
- conan-local
- conan-remote
defaultDeploymentRepo: conan-local"""),
]
def run_cmd(args: argparse.Namespace, config: dict) -> int:
art_url = os.environ.get("ARTIFACTORY_URL", "http://artifactory:8081")
admin_pass = os.environ.get("ARTIFACTORY_ADMIN_PASS", "password")
auth = f"admin:{admin_pass}"
api = f"{art_url}/artifactory/api"
def alog(msg: str) -> None:
print(f"[artifactory-init] {msg}")
alog("Waiting for Artifactory API...")
ready = False
for _ in range(30):
if curl_status(f"{api}/system/ping", auth) == 200:
ready = True
break
time.sleep(2)
if not ready:
alog("ERROR: Artifactory API not ready after 60s")
return 1
alog("Artifactory API is ready")
# Check existing repos
result = subprocess.run(
["curl", "-sf", "-u", auth, f"{api}/repositories"],
capture_output=True, text=True,
)
existing = result.stdout if result.returncode == 0 else "[]"
for repo_name, yaml_body in REPO_CONFIGS:
if f'"{repo_name}"' in existing:
alog(f"{repo_name} already exists, skipping")
continue
result = subprocess.run([
"curl", "-s", "-w", "\n%{http_code}", "-X", "PATCH",
f"{api}/system/configuration",
"-u", auth, "-H", "Content-Type: application/yaml", "-d", yaml_body,
], capture_output=True, text=True)
lines = result.stdout.strip().split("\n")
code = lines[-1] if lines else "0"
body = "\n".join(lines[:-1])
if code == "200":
alog(f"Created {repo_name}{body}")
else:
alog(f"ERROR: {repo_name} returned HTTP {code}: {body}")
return 1
# Verify
alog("Verifying repositories...")
for repo_name in ["conan-local", "conan-remote", "conan-virtual", "generic-local"]:
status = curl_status(f"{art_url}/artifactory/{repo_name}/", auth)
alog(f" {'ok' if status == 200 else f'WARN (HTTP {status})'} {repo_name}")
alog("")
alog("=" * 38)
alog(" Artifactory CE ready!")
alog(f" Web UI : http://localhost:8092")
alog(f" Login : admin / {admin_pass}")
alog(f" Conan virtual : {art_url}/artifactory/api/conan/conan-virtual")
alog("=" * 38)
return 0
def run(args, config):
return run_cmd(args, config)

View File

@@ -1,114 +0,0 @@
"""Build application Docker images via docker compose."""
import argparse
import time
from cli.helpers import (
BASE_DIR, PROJECT_ROOT, GREEN, YELLOW, NC,
docker_compose, docker_image_exists, get_buildable_services, log_err, log_info, log_ok, log_warn,
pull_with_retry, resolve_services, run as run_proc,
)
def run_cmd(args: argparse.Namespace, config: dict) -> int:
defs = config["definitions"]
base_images = defs["base_images"]
# Ensure base-node-deps exists
node_tag = base_images["node-deps"]["tag"]
if not docker_image_exists(node_tag):
log_warn(f"Building {node_tag} (required by all Node.js frontends)...")
result = run_proc([
"docker", "build",
"-f", str(BASE_DIR / base_images["node-deps"]["dockerfile"]),
"-t", node_tag, str(PROJECT_ROOT),
])
if result.returncode != 0:
log_err("Failed to build base-node-deps — cannot proceed")
return 1
else:
log_ok(f"Base image {node_tag} exists")
# Warn about optional bases
optional = ["apt", "conan-deps", "pip-deps", "android-sdk"]
missing = [base_images[k]["tag"] for k in optional if not docker_image_exists(base_images[k]["tag"])]
if missing:
log_warn("Optional base images not built (C++ daemons, dev container):")
for img in missing:
print(f" - {img}")
print(f"{YELLOW}Build with:{NC} python3 deployment.py build base\n")
buildable = get_buildable_services()
targets = args.apps if args.apps else buildable
services = resolve_services(targets, config)
if services is None:
return 1
# Skip existing (unless --force)
if not args.force:
needs_build, needs_names = [], []
for t, svc in zip(targets, services):
img = f"deployment-{svc}"
if docker_image_exists(img):
log_ok(f"Skipping {t} — image {img} already exists (use --force to rebuild)")
else:
needs_names.append(t)
needs_build.append(svc)
if not needs_build:
print(f"\n{GREEN}All app images already built! Use --force to rebuild.{NC}")
return 0
targets, services = needs_names, needs_build
print(f"{YELLOW}Building: {' '.join(targets)}{NC}\n")
# Pre-pull base images
log_info("Pre-pulling base images for app builds...")
for img in defs["external_images"]["build_bases"]:
if not docker_image_exists(img):
print(f" Pulling {img}...")
pull_with_retry(img)
# Build with retry
max_attempts = 5
build_ok = False
for attempt in range(1, max_attempts + 1):
if attempt > 1:
log_warn(f"Build attempt {attempt}/{max_attempts}...")
if args.sequential:
all_ok = True
for svc in services:
log_info(f"Building {svc}...")
result = run_proc(docker_compose("build", svc))
if result.returncode != 0:
log_err(f"Failed: {svc}")
all_ok = False
break
log_ok(f"Done: {svc}")
if all_ok:
build_ok = True
break
else:
log_info("Parallel build (uses more RAM)...")
result = run_proc(docker_compose("build", "--parallel", *services))
if result.returncode == 0:
build_ok = True
break
if attempt < max_attempts:
wait = attempt * 10
log_warn(f"Build failed (attempt {attempt}/{max_attempts}), retrying in {wait}s...")
time.sleep(wait)
if not build_ok:
log_err(f"Build failed after {max_attempts} attempts")
return 1
print(f"\n{GREEN}Build complete!{NC}")
print("Start with: python3 deployment.py stack up")
return 0
# Module entry point — called by loader.dispatch()
# NOTE: must not shadow the imported `run` from cli.helpers
def run(args, config):
return run_cmd(args, config)

View File

@@ -1,57 +0,0 @@
"""Build base Docker images (apt, node-deps, pip-deps, conan-deps, android-sdk, devcontainer)."""
import argparse
from cli.helpers import (
BASE_DIR, PROJECT_ROOT, GREEN, NC,
build_with_retry, docker_image_exists, docker_image_size,
log_ok, log_warn, log_err,
)
def run(args: argparse.Namespace, config: dict) -> int:
defs = config["definitions"]
build_order = defs["base_build_order"]
base_images = defs["base_images"]
targets = args.images if args.images else list(build_order)
for t in targets:
if t not in base_images:
log_err(f"Unknown base image: {t}")
print(f"Available: {', '.join(build_order)}")
return 1
print(f"\nMetaBuilder Base Image Builder")
print(f"Building: {' '.join(targets)}\n")
failed = []
for name in build_order:
if name not in targets:
continue
img = base_images[name]
tag = img["tag"]
if not args.force and docker_image_exists(tag):
log_ok(f"Skipping {name}{tag} already exists (use --force to rebuild)")
continue
context = str(BASE_DIR) if img.get("context") == "base-images" else str(PROJECT_ROOT)
dockerfile = str(BASE_DIR / img["dockerfile"])
if not build_with_retry(tag, dockerfile, context):
failed.append(name)
log_warn("Continuing with remaining images...")
print()
if not failed:
print(f"{GREEN}All base images built successfully!{NC}\n")
for name in targets:
tag = base_images[name]["tag"]
if docker_image_exists(tag):
print(f" {GREEN}{NC} {tag} ({docker_image_size(tag)})")
print(f"\nNow run: python3 deployment.py build apps")
return 0
log_err(f"Some images failed: {' '.join(failed)}")
print(f"Re-run: python3 deployment.py build base {' '.join(failed)}")
return 1

View File

@@ -1,57 +0,0 @@
"""Build testcontainers Conan packages (C shared library + Go sidecar) and upload to Nexus."""
import argparse
import os
import shutil
from cli.helpers import PROJECT_ROOT, log_err, log_info, log_ok, run as run_proc, run_check
def run_cmd(args: argparse.Namespace, config: dict) -> int:
nexus_url = os.environ.get("NEXUS_URL", "http://localhost:8091/repository/conan-hosted/")
nexus_user = os.environ.get("NEXUS_USER", "admin")
nexus_pass = os.environ.get("NEXUS_PASS", "nexus")
recipes_dir = PROJECT_ROOT / "dbal" / "production" / "build-config" / "conan-recipes"
log_info("Checking prerequisites...")
for tool, install_msg in [("go", "https://go.dev/dl/"), ("conan", "pip install conan")]:
if not shutil.which(tool):
log_err(f"{tool} not found. Install: {install_msg}")
return 1
run_proc([tool, "version" if tool == "go" else "--version"])
log_info("Configuring Nexus Conan remote...")
run_proc(["conan", "remote", "add", "nexus", nexus_url, "--force"])
run_check(["conan", "remote", "login", "nexus", nexus_user, "--password", nexus_pass])
run_proc(["conan", "remote", "disable", "conancenter"])
run_proc(["conan", "remote", "enable", "conancenter"])
run_proc(["conan", "remote", "update", "nexus", "--index", "0"])
if not args.skip_native:
log_info("Building testcontainers-native/0.1.0 (C shared library)...")
run_check(["conan", "create", str(recipes_dir / "testcontainers-native"),
"-s", "build_type=Release", "-s", "compiler.cppstd=20", "--build=missing"])
log_info("Uploading testcontainers-native to Nexus...")
run_check(["conan", "upload", "testcontainers-native/0.1.0", "--remote", "nexus", "--confirm"])
log_ok("testcontainers-native uploaded")
else:
log_info("Skipping testcontainers-native (--skip-native)")
if not args.skip_sidecar:
sidecar_src = PROJECT_ROOT / "dbal" / "testcontainers-sidecar"
log_info("Building testcontainers-sidecar/0.1.0 (Go binary)...")
env = os.environ.copy()
env["TESTCONTAINERS_SIDECAR_SRC"] = str(sidecar_src)
run_check(["conan", "create", str(recipes_dir / "testcontainers-sidecar"),
"-s", "build_type=Release", "-s", "compiler.cppstd=20", "--build=missing"], env=env)
log_info("Uploading testcontainers-sidecar to Nexus...")
run_check(["conan", "upload", "testcontainers-sidecar/0.1.0", "--remote", "nexus", "--confirm"])
log_ok("testcontainers-sidecar uploaded")
else:
log_info("Skipping testcontainers-sidecar (--skip-sidecar)")
log_ok("Testcontainers build complete")
return 0
def run(args, config):
return run_cmd(args, config)

View File

@@ -1,200 +0,0 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"description": "MetaBuilder Deployment CLI — command definitions (argparse from JSON)",
"version": "1.0.0",
"program": {
"prog": "deployment.py",
"description": "MetaBuilder Deployment CLI"
},
"definitions": {
"base_build_order": ["apt", "conan-deps", "android-sdk", "node-deps", "pip-deps", "devcontainer"],
"stack_commands": ["up", "start", "down", "stop", "build", "restart", "logs", "ps", "status", "clean"],
"base_images": {
"apt": { "dockerfile": "Dockerfile.apt", "tag": "metabuilder/base-apt:latest" },
"conan-deps": { "dockerfile": "Dockerfile.conan-deps", "tag": "metabuilder/base-conan-deps:latest" },
"node-deps": { "dockerfile": "Dockerfile.node-deps", "tag": "metabuilder/base-node-deps:latest" },
"pip-deps": { "dockerfile": "Dockerfile.pip-deps", "tag": "metabuilder/base-pip-deps:latest" },
"android-sdk": { "dockerfile": "Dockerfile.android-sdk", "tag": "metabuilder/base-android-sdk:latest", "context": "base-images" },
"devcontainer": { "dockerfile": "Dockerfile.devcontainer", "tag": "metabuilder/devcontainer:latest" }
},
"nexus_images": {
"base": [
{ "local": "metabuilder/base-apt:latest", "name": "base-apt", "size": "2.8GB" },
{ "local": "metabuilder/base-node-deps:latest", "name": "base-node-deps", "size": "5.5GB" },
{ "local": "metabuilder/base-pip-deps:latest", "name": "base-pip-deps", "size": "1.4GB" },
{ "local": "metabuilder/base-android-sdk:latest", "name": "base-android-sdk", "size": "6.1GB" }
],
"apps": [
{ "local": "deployment-dbal-init:latest", "name": "dbal-init", "size": "12MB" },
{ "local": "deployment-storybook:latest", "name": "storybook", "size": "112MB" },
{ "local": "deployment-nginx:latest", "name": "nginx", "size": "92MB" },
{ "local": "deployment-nginx-stream:latest", "name": "nginx-stream", "size": "92MB" },
{ "local": "deployment-pastebin-backend:latest", "name": "pastebin-backend", "size": "236MB" },
{ "local": "deployment-emailclient-app:latest", "name": "emailclient", "size": "350MB" },
{ "local": "deployment-email-service:latest", "name": "email-service", "size": "388MB" },
{ "local": "deployment-exploded-diagrams:latest", "name": "exploded-diagrams", "size": "315MB" },
{ "local": "deployment-pastebin:latest", "name": "pastebin", "size": "382MB" },
{ "local": "deployment-frontend-app:latest", "name": "frontend-app", "size": "361MB" },
{ "local": "deployment-workflowui:latest", "name": "workflowui", "size": "542MB" },
{ "local": "deployment-postgres-dashboard:latest", "name": "postgres-dashboard", "size": "508MB" },
{ "local": "deployment-smtp-relay:latest", "name": "smtp-relay", "size": "302MB" },
{ "local": "deployment-dbal:latest", "name": "dbal", "size": "3.0GB" },
{ "local": "deployment-codegen:latest", "name": "codegen", "size": "5.6GB" }
],
"heavy": [
{ "local": "metabuilder/base-conan-deps:latest", "name": "base-conan-deps", "size": "32GB" },
{ "local": "metabuilder/devcontainer:latest", "name": "devcontainer", "size": "41GB" }
],
"heavy_apps": [
{ "local": "deployment-media-daemon:latest", "name": "media-daemon", "size": "3.5GB" }
]
},
"npm_patches": {
"registry": ["minimatch@10.2.4", "tar@7.5.11"],
"local": [
{
"name": "@esbuild-kit/core-utils",
"version": "3.3.3-metabuilder.0",
"tarball": "esbuild-kit-core-utils-3.3.3-metabuilder.0.tgz"
}
]
},
"external_images": {
"core": [
"postgres:15-alpine", "redis:7-alpine",
"docker.elastic.co/elasticsearch/elasticsearch:8.11.0",
"mysql:8.0", "mongo:7.0", "phpmyadmin:latest",
"mongo-express:latest", "redis/redisinsight:latest",
"docker.elastic.co/kibana/kibana:8.11.0",
"boky/postfix:latest", "nginx:alpine"
],
"monitoring": [
"prom/prometheus:latest", "grafana/grafana:latest",
"grafana/loki:latest", "grafana/promtail:latest",
"prom/node-exporter:latest", "prometheuscommunity/postgres-exporter:latest",
"oliver006/redis_exporter:latest", "gcr.io/cadvisor/cadvisor:latest",
"prom/alertmanager:latest"
],
"media": ["libretime/icecast:2.4.4"],
"build_bases": ["node:20-alpine", "node:24-alpine", "python:3.11-slim", "python:3.12-slim", "alpine:3.19"]
}
},
"commands": {
"build": {
"help": "Build Docker images",
"subcommands": {
"base": {
"help": "Build base Docker images",
"module": "cli.build_base",
"arguments": [
{ "name": "--force", "action": "store_true", "help": "Rebuild even if images exist" },
{ "name": "--list", "action": "store_true", "help": "List available base images" },
{ "name": "images", "nargs": "*", "help": "Images to build (default: all)" }
]
},
"apps": {
"help": "Build application Docker images",
"module": "cli.build_apps",
"arguments": [
{ "name": "--force", "action": "store_true", "help": "Rebuild even if images exist" },
{ "name": "--sequential", "action": "store_true", "help": "Build sequentially (less RAM)" },
{ "name": "apps", "nargs": "*", "help": "Apps to build (default: all)" }
]
},
"testcontainers": {
"help": "Build testcontainers Conan packages",
"module": "cli.build_testcontainers",
"arguments": [
{ "name": "--skip-native", "action": "store_true", "help": "Skip C shared library" },
{ "name": "--skip-sidecar", "action": "store_true", "help": "Skip Go sidecar" }
]
}
}
},
"deploy": {
"help": "Build + deploy app(s)",
"module": "cli.deploy",
"arguments": [
{ "name": "apps", "nargs": "*", "help": "Apps to deploy" },
{ "name": "--all", "action": "store_true", "help": "Deploy all apps" },
{ "name": "--no-cache", "action": "store_true", "help": "Build without Docker cache" }
]
},
"stack": {
"help": "Manage the full MetaBuilder stack",
"module": "cli.stack",
"arguments": [
{ "name": "command", "nargs": "?", "default": "up", "help": "Stack command (default: up)" },
{ "name": "--monitoring", "action": "store_true", "help": "Include monitoring services" },
{ "name": "--media", "action": "store_true", "help": "Include media services" },
{ "name": "--all", "action": "store_true", "dest": "all_profiles", "help": "Include all profiles" }
]
},
"release": {
"help": "Bump version, commit, push, and deploy",
"module": "cli.release",
"arguments": [
{ "name": "app", "help": "App to release" },
{ "name": "bump", "nargs": "?", "default": "patch", "help": "patch, minor, major, or x.y.z" }
]
},
"nexus": {
"help": "Nexus registry management",
"subcommands": {
"init": {
"help": "Initialize Nexus repositories",
"module": "cli.nexus_init",
"arguments": [
{ "name": "--ci", "action": "store_true", "help": "Lightweight CI init (npm repos only)" }
]
},
"push": {
"help": "Push images to Nexus",
"module": "cli.nexus_push",
"arguments": [
{ "name": "--tag", "help": "Image tag (default: current git branch)" },
{ "name": "--src", "default": "ghcr.io", "help": "Source registry" },
{ "name": "--pull", "action": "store_true", "help": "Pull from remote first" }
]
},
"populate": {
"help": "Push all images to Nexus with :main + :latest tags",
"module": "cli.nexus_populate",
"arguments": [
{ "name": "--skip-heavy", "action": "store_true", "help": "Skip conan-deps, devcontainer, media-daemon" }
]
}
}
},
"npm": {
"help": "npm patch management",
"subcommands": {
"publish-patches": {
"help": "Publish patched npm packages to local registry",
"module": "cli.npm_patches",
"arguments": [
{ "name": "--nexus", "action": "store_true", "help": "Force Nexus on :8091" },
{ "name": "--verdaccio", "action": "store_true", "help": "Force Verdaccio on :4873" }
]
}
}
},
"artifactory": {
"help": "Artifactory management",
"subcommands": {
"init": {
"help": "Initialize Artifactory CE Conan repositories",
"module": "cli.artifactory_init",
"arguments": []
}
}
}
}
}

View File

@@ -1,80 +0,0 @@
"""Build + deploy one or more apps with health check polling."""
import argparse
import subprocess
import sys
import time
from cli.helpers import (
COMPOSE_FILE, GREEN, RED, YELLOW, BLUE, NC,
docker_compose, get_buildable_services, log_err, log_warn, resolve_services, run as run_proc,
)
def run_cmd(args: argparse.Namespace, config: dict) -> int:
buildable = get_buildable_services()
targets = buildable if args.all else args.apps
if not targets:
log_err("Specify app(s) to deploy, or use --all")
print(f"Available: {', '.join(buildable)}")
return 1
services = resolve_services(targets, config)
if services is None:
return 1
print(f"\n{BLUE}{'=' * 43}{NC}")
print(f"{BLUE} Deploy: {' '.join(targets)}{NC}")
print(f"{BLUE}{'=' * 43}{NC}\n")
# Step 1: Build
print(f"{YELLOW}[1/3] Building...{NC}")
build_args = ["--no-cache"] if args.no_cache else []
result = run_proc(docker_compose("build", *build_args, *services))
if result.returncode != 0:
log_err("Build failed")
return 1
# Step 2: Deploy
print(f"\n{YELLOW}[2/3] Deploying...{NC}")
result = run_proc(docker_compose("up", "-d", "--force-recreate", *services))
if result.returncode != 0:
log_err("Deploy failed")
return 1
# Step 3: Health check
print(f"\n{YELLOW}[3/3] Waiting for health checks...{NC}")
all_healthy = True
for svc in services:
container = f"metabuilder-{svc}"
sys.stdout.write(f" {svc}: ")
sys.stdout.flush()
status = "unknown"
for _ in range(30):
result = subprocess.run(
["docker", "inspect", "--format", "{{.State.Health.Status}}", container],
capture_output=True, text=True,
)
status = result.stdout.strip() if result.returncode == 0 else "missing"
if status in ("healthy", "unhealthy"):
break
time.sleep(2)
if status == "healthy":
print(f"{GREEN}healthy{NC}")
elif status == "unhealthy":
print(f"{RED}unhealthy{NC}")
all_healthy = False
else:
print(f"{YELLOW}timeout (status: {status}){NC}")
all_healthy = False
print()
if all_healthy:
print(f"{GREEN}All services deployed and healthy{NC}")
else:
log_warn(f"Some services not healthy — check: docker compose -f {COMPOSE_FILE} ps")
return 0 if all_healthy else 1
run = run_cmd

View File

@@ -1,157 +0,0 @@
"""Shared helpers for all CLI command modules."""
from __future__ import annotations
import os
import subprocess
import sys
import time
from pathlib import Path
# ── Paths ────────────────────────────────────────────────────────────────────
SCRIPT_DIR = Path(__file__).resolve().parent.parent # deployment/
PROJECT_ROOT = SCRIPT_DIR.parent
BASE_DIR = SCRIPT_DIR / "base-images"
COMPOSE_FILE = SCRIPT_DIR / "metabuilder/compose.yml"
# ── Colors ───────────────────────────────────────────────────────────────────
RED = "\033[0;31m"
GREEN = "\033[0;32m"
YELLOW = "\033[1;33m"
BLUE = "\033[0;34m"
CYAN = "\033[0;36m"
NC = "\033[0m"
def log_info(msg: str) -> None:
print(f"{BLUE}[deploy]{NC} {msg}")
def log_ok(msg: str) -> None:
print(f"{GREEN}[deploy]{NC} {msg}")
def log_warn(msg: str) -> None:
print(f"{YELLOW}[deploy]{NC} {msg}")
def log_err(msg: str) -> None:
print(f"{RED}[deploy]{NC} {msg}")
# ── Command runners ─────────────────────────────────────────────────────────
def run(cmd: list[str], **kwargs) -> subprocess.CompletedProcess:
"""Run a command, printing it and streaming output."""
print(f" $ {' '.join(cmd)}", flush=True)
return subprocess.run(cmd, **kwargs)
def run_check(cmd: list[str], **kwargs) -> subprocess.CompletedProcess:
"""Run a command and raise on failure."""
return run(cmd, check=True, **kwargs)
# ── Docker helpers ──────────────────────────────────────────────────────────
def docker_image_exists(tag: str) -> bool:
return subprocess.run(
["docker", "image", "inspect", tag], capture_output=True,
).returncode == 0
def docker_compose(*args: str) -> list[str]:
return ["docker", "compose", "-f", str(COMPOSE_FILE), *args]
def curl_status(url: str, auth: str | None = None, timeout: int = 5) -> int:
"""Return HTTP status code for a URL, or 0 on connection error."""
cmd = ["curl", "-s", "-o", os.devnull, "-w", "%{http_code}",
"--connect-timeout", str(timeout)]
if auth:
cmd += ["-u", auth]
cmd.append(url)
result = subprocess.run(cmd, capture_output=True, text=True)
try:
return int(result.stdout.strip())
except (ValueError, AttributeError):
return 0
def pull_with_retry(image: str, max_attempts: int = 5) -> bool:
delay = 5
for attempt in range(1, max_attempts + 1):
result = run(["docker", "pull", image])
if result.returncode == 0:
return True
if attempt < max_attempts:
log_warn(f"Pull failed (attempt {attempt}/{max_attempts}), retrying in {delay}s...")
time.sleep(delay)
delay *= 2
log_err(f"Failed to pull {image} after {max_attempts} attempts")
return False
def build_with_retry(tag: str, dockerfile: str, context: str, max_attempts: int = 5) -> bool:
"""Build a Docker image with retry on failure."""
from datetime import datetime
date_tag = f"{tag.rsplit(':', 1)[0]}:{datetime.now().strftime('%Y%m%d')}"
log_info(f"Building {tag} ...")
for attempt in range(1, max_attempts + 1):
result = run([
"docker", "build", "--network=host",
"--file", dockerfile,
"--tag", tag, "--tag", date_tag,
context,
])
if result.returncode == 0:
log_ok(f"{tag} built successfully")
return True
if attempt < max_attempts:
wait = attempt * 15
log_warn(f"Build failed (attempt {attempt}/{max_attempts}), retrying in {wait}s ...")
time.sleep(wait)
log_err(f"Failed to build {tag} after {max_attempts} attempts")
return False
def get_buildable_services() -> list[str]:
"""Return all service names that have a build: section in the compose file."""
import yaml
with open(COMPOSE_FILE) as f:
compose = yaml.safe_load(f)
return [
name for name, svc in compose.get("services", {}).items()
if isinstance(svc, dict) and "build" in svc
]
def resolve_services(targets: list[str], config: dict) -> list[str] | None:
"""Validate compose service names against the compose file. Returns None on error."""
buildable = get_buildable_services()
services = []
for t in targets:
if t not in buildable:
log_err(f"Unknown or non-buildable service: {t}")
print(f"Available: {', '.join(buildable)}")
return None
services.append(t)
return services
def docker_image_size(tag: str) -> str:
"""Return human-readable size of a Docker image."""
result = subprocess.run(
["docker", "image", "inspect", tag, "--format", "{{.Size}}"],
capture_output=True, text=True,
)
try:
return f"{int(result.stdout.strip()) / 1073741824:.1f} GB"
except ValueError:
return "?"

View File

@@ -1,68 +0,0 @@
"""Load CLI structure from commands.json and dispatch to handler modules."""
import argparse
import importlib
import json
from pathlib import Path
CONFIG_PATH = Path(__file__).parent / "commands.json"
def _load_config() -> dict:
with open(CONFIG_PATH) as f:
return json.load(f)
def _add_arguments(parser: argparse.ArgumentParser, arguments: list[dict]) -> None:
"""Add arguments from JSON definitions to an argparse parser."""
for arg_def in arguments:
name = arg_def["name"]
kwargs = {k: v for k, v in arg_def.items() if k != "name"}
if name.startswith("-"):
parser.add_argument(name, **kwargs)
else:
parser.add_argument(name, **kwargs)
def _build_subcommands(
parent_sub: argparse._SubParsersAction,
commands: dict,
) -> None:
"""Recursively build subcommand parsers from JSON config."""
for cmd_name, cmd_def in commands.items():
parser = parent_sub.add_parser(cmd_name, help=cmd_def.get("help", ""))
if "module" in cmd_def:
parser.set_defaults(_module=cmd_def["module"])
if "arguments" in cmd_def:
_add_arguments(parser, cmd_def["arguments"])
if "subcommands" in cmd_def:
sub = parser.add_subparsers(dest=f"{cmd_name}_cmd")
_build_subcommands(sub, cmd_def["subcommands"])
def build_parser() -> tuple[argparse.ArgumentParser, dict]:
"""Build the full argparse parser from commands.json. Returns (parser, config)."""
config = _load_config()
prog_def = config["program"]
parser = argparse.ArgumentParser(
prog=prog_def["prog"],
description=prog_def["description"],
)
sub = parser.add_subparsers(dest="command", help="Command group")
_build_subcommands(sub, config["commands"])
return parser, config
def dispatch(args: argparse.Namespace, config: dict) -> int:
"""Dispatch parsed args to the appropriate handler module."""
module_path = getattr(args, "_module", None)
if not module_path:
return 0
module = importlib.import_module(module_path)
return module.run(args, config)

View File

@@ -1,134 +0,0 @@
"""Initialize Nexus repositories (Docker + npm, or npm-only for CI)."""
import argparse
import json
import os
import subprocess
from cli.helpers import curl_status, log_err, run as run_proc
def run_cmd(args: argparse.Namespace, config: dict) -> int:
nexus_url = os.environ.get("NEXUS_URL", "http://localhost:8091" if args.ci else "http://nexus:8081")
new_pass = os.environ.get("NEXUS_ADMIN_NEW_PASS", "nexus")
docker_port = os.environ.get("DOCKER_REPO_PORT", "5000")
pass_file = "/tmp/nexus-data/admin.password" if args.ci else "/nexus-data/admin.password"
prefix = "nexus-ci-init" if args.ci else "nexus-init"
def nlog(msg: str) -> None:
print(f"[{prefix}] {msg}")
auth = f"admin:{new_pass}"
# Resolve admin password
status = curl_status(f"{nexus_url}/service/rest/v1/status", auth)
if status == 200:
nlog(f"Already initialised with password '{new_pass}'")
elif os.path.exists(pass_file):
with open(pass_file) as f:
init_pass = f.read().strip()
nlog("First run: changing admin password...")
result = subprocess.run([
"curl", "-s", "-o", os.devnull, "-w", "%{http_code}", "-X", "PUT",
f"{nexus_url}/service/rest/v1/security/users/admin/change-password",
"-u", f"admin:{init_pass}", "-H", "Content-Type: text/plain", "-d", new_pass,
], capture_output=True, text=True)
if result.stdout.strip() == "204":
nlog(f"Admin password set to '{new_pass}'")
else:
nlog(f"ERROR: password change returned HTTP {result.stdout.strip()}")
return 1
else:
nlog("ERROR: cannot authenticate — is NEXUS_ADMIN_NEW_PASS correct?")
return 1
# Enable anonymous access
run_proc(["curl", "-sf", "-X", "PUT", f"{nexus_url}/service/rest/v1/security/anonymous",
"-u", auth, "-H", "Content-Type: application/json",
"-d", '{"enabled":true,"userId":"anonymous","realmName":"NexusAuthorizingRealm"}'])
nlog("Anonymous access enabled")
if not args.ci:
# Docker + npm token realms
run_proc(["curl", "-sf", "-X", "PUT", f"{nexus_url}/service/rest/v1/security/realms/active",
"-u", auth, "-H", "Content-Type: application/json",
"-d", '["NexusAuthenticatingRealm","DockerToken","NpmToken"]'])
nlog("Docker + npm Bearer Token realms enabled")
# Docker hosted repo
docker_repo = json.dumps({
"name": "local", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True, "writePolicy": "allow"},
"docker": {"v1Enabled": False, "forceBasicAuth": False, "httpPort": int(docker_port)},
})
result = subprocess.run([
"curl", "-s", "-o", os.devnull, "-w", "%{http_code}", "-X", "POST",
f"{nexus_url}/service/rest/v1/repositories/docker/hosted",
"-u", auth, "-H", "Content-Type: application/json", "-d", docker_repo,
], capture_output=True, text=True)
code = result.stdout.strip()
if code == "201":
nlog(f"Docker hosted repo 'local' created on port {docker_port}")
elif code == "400":
nlog("Docker repo 'local' already exists, skipping")
else:
nlog(f"ERROR: Docker repo creation returned HTTP {code}")
return 1
else:
# CI: npm token realm only
run_proc(["curl", "-sf", "-X", "PUT", f"{nexus_url}/service/rest/v1/security/realms/active",
"-u", auth, "-H", "Content-Type: application/json",
"-d", '["NexusAuthenticatingRealm","NpmToken"]'])
# npm repos (hosted, proxy, group)
npm_repos = [
("npm/hosted", "npm-hosted", {
"name": "npm-hosted", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True,
"writePolicy": "allow" if args.ci else "allow_once"},
}),
("npm/proxy", "npm-proxy", {
"name": "npm-proxy", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True},
"proxy": {"remoteUrl": "https://registry.npmjs.org", "contentMaxAge": 1440, "metadataMaxAge": 1440},
"httpClient": {"blocked": False, "autoBlock": True},
"negativeCache": {"enabled": True, "timeToLive": 1440},
}),
("npm/group", "npm-group", {
"name": "npm-group", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True},
"group": {"memberNames": ["npm-hosted", "npm-proxy"]},
}),
]
for repo_type, label, body in npm_repos:
result = subprocess.run([
"curl", "-s", "-o", os.devnull, "-w", "%{http_code}", "-X", "POST",
f"{nexus_url}/service/rest/v1/repositories/{repo_type}",
"-u", auth, "-H", "Content-Type: application/json", "-d", json.dumps(body),
], capture_output=True, text=True)
code = result.stdout.strip()
if code == "201":
nlog(f"{label} repo created")
elif code == "400":
nlog(f"{label} repo already exists, skipping")
else:
nlog(f"ERROR creating {label}: HTTP {code}")
return 1
if args.ci:
nlog("Nexus CI init complete")
else:
nlog("")
nlog("=" * 46)
nlog(" Nexus ready!")
nlog(f" Registry : localhost:{docker_port}")
nlog(f" Web UI : http://localhost:8091")
nlog(f" Login : admin / {new_pass}")
nlog(f" npm group: {nexus_url}/repository/npm-group/")
nlog("=" * 46)
return 0
def run(args, config):
return run_cmd(args, config)

View File

@@ -1,79 +0,0 @@
"""Push all locally-built images to Nexus with :main + :latest tags."""
import argparse
import pathlib
import yaml
from cli.helpers import (
BLUE, GREEN, NC,
docker_image_exists, log_err, log_info, log_ok, log_warn, run as run_proc,
)
COMPOSE_FILE = pathlib.Path(__file__).parent.parent / "metabuilder/compose.yml"
def _load_built_images() -> list[dict]:
"""Return [{local, name}] for every service with a build: directive."""
with open(COMPOSE_FILE) as f:
dc = yaml.safe_load(f)
images = []
for svc_name, svc in dc.get("services", {}).items():
if "build" not in svc:
continue
local = svc.get("image", f"deployment-{svc_name}:latest")
# derive a short push name from the image tag (strip prefix/tag)
name = local.split("/")[-1].split(":")[0].removeprefix("deployment-")
images.append({"local": local, "name": name})
return images
def run_cmd(args: argparse.Namespace, config: dict) -> int:
nexus = "localhost:5050"
slug = "johndoe6345789/metabuilder-small"
nexus_user, nexus_pass = "admin", "nexus"
log_info(f"Logging in to {nexus}...")
run_proc(["docker", "login", nexus, "-u", nexus_user, "--password-stdin"],
input=nexus_pass.encode())
images = _load_built_images()
pushed = skipped = failed = 0
def push_image(local: str, name: str) -> None:
nonlocal pushed, skipped, failed
if not docker_image_exists(local):
log_warn(f"SKIP {name}{local} not found locally")
skipped += 1
return
dst_main = f"{nexus}/{slug}/{name}:main"
dst_latest = f"{nexus}/{slug}/{name}:latest"
log_info(f"Pushing {name}...")
run_proc(["docker", "tag", local, dst_main])
run_proc(["docker", "tag", local, dst_latest])
r1 = run_proc(["docker", "push", dst_main])
r2 = run_proc(["docker", "push", dst_latest])
if r1.returncode == 0 and r2.returncode == 0:
log_ok(f" {name} -> :main + :latest")
pushed += 1
else:
log_err(f" {name} FAILED")
failed += 1
print(f"\n{BLUE}Registry : {nexus}{NC}")
print(f"{BLUE}Slug : {slug}{NC}")
print(f"{BLUE}Images : {len(images)} (parsed from compose.yml){NC}\n")
for entry in images:
push_image(entry["local"], entry["name"])
print(f"\n{GREEN}{'=' * 46}{NC}")
print(f"{GREEN} Done. pushed={pushed} skipped={skipped} failed={failed}{NC}")
print(f"{GREEN}{'=' * 46}{NC}")
return 1 if failed else 0
run = run_cmd

View File

@@ -1,113 +0,0 @@
"""Push locally-built images to local Nexus registry for act CI runner."""
import argparse
import pathlib
import re
import subprocess
import yaml
from cli.helpers import (
PROJECT_ROOT, GREEN, YELLOW, RED, NC,
docker_image_exists, log_info, run as run_proc,
)
COMPOSE_FILE = pathlib.Path(__file__).parent.parent / "metabuilder/compose.yml"
BASE_IMAGES_DIR = pathlib.Path(__file__).parent.parent / "base-images"
# Dockerfile.apt -> base-apt, Dockerfile.node-deps -> base-node-deps, etc.
_DOCKERFILE_RE = re.compile(r"^Dockerfile\.(.+)$")
def _load_image_names() -> list[str]:
"""Return all image short-names: base images from Dockerfiles + apps from compose."""
names = []
for df in sorted(BASE_IMAGES_DIR.glob("Dockerfile.*")):
m = _DOCKERFILE_RE.match(df.name)
if m:
names.append(f"base-{m.group(1)}")
with open(COMPOSE_FILE) as f:
dc = yaml.safe_load(f)
for svc_name, svc in dc.get("services", {}).items():
if "build" not in svc:
continue
local = svc.get("image", f"deployment-{svc_name}:latest")
name = local.split("/")[-1].split(":")[0].removeprefix("deployment-")
names.append(name)
return names
def run_cmd(args: argparse.Namespace, config: dict) -> int:
local_registry = "localhost:5050"
nexus_user, nexus_pass = "admin", "nexus"
# Derive repo slug from git
result = subprocess.run(
["git", "-C", str(PROJECT_ROOT), "remote", "get-url", "origin"],
capture_output=True, text=True,
)
slug = "johndoe6345789/metabuilder-small"
if result.returncode == 0:
m = re.search(r"github\.com[:/]([^/]+/[^/.]+)", result.stdout.strip())
if m:
slug = m.group(1).lower()
source_registry = args.src
if args.tag:
tag = args.tag
else:
result = subprocess.run(
["git", "-C", str(PROJECT_ROOT), "rev-parse", "--abbrev-ref", "HEAD"],
capture_output=True, text=True,
)
tag = result.stdout.strip() if result.returncode == 0 else "main"
images = _load_image_names()
print(f"{YELLOW}Registry:{NC} {local_registry}")
print(f"{YELLOW}Slug:{NC} {slug}")
print(f"{YELLOW}Tag:{NC} {tag}")
print(f"{YELLOW}Images:{NC} {len(images)} (base-images/ + compose.yml)\n")
log_info(f"Logging in to {local_registry}...")
run_proc(["docker", "login", local_registry, "-u", nexus_user, "--password-stdin"],
input=nexus_pass.encode())
pushed = skipped = failed = 0
for image in images:
src = f"{source_registry}/{slug}/{image}:{tag}"
dst = f"{local_registry}/{slug}/{image}:{tag}"
if args.pull:
print(f" {YELLOW}pulling{NC} {src}...")
if not docker_image_exists(src):
result = run_proc(["docker", "pull", src])
if result.returncode != 0:
print(f" {YELLOW}skip{NC} {image} (not found in {source_registry})")
skipped += 1
continue
if not docker_image_exists(src) and not docker_image_exists(dst):
print(f" {YELLOW}skip{NC} {image} (not found locally)")
skipped += 1
continue
if docker_image_exists(src):
run_proc(["docker", "tag", src, dst])
print(f" {GREEN}push{NC} {dst}")
result = run_proc(["docker", "push", dst])
if result.returncode == 0:
pushed += 1
else:
print(f" {RED}FAILED{NC} {image}")
failed += 1
print(f"\n{GREEN}Done.{NC} pushed={pushed} skipped={skipped} failed={failed}")
return 1 if failed else 0
run = run_cmd

View File

@@ -1,111 +0,0 @@
"""Publish patched npm packages to a local registry (Nexus or Verdaccio)."""
import argparse
import base64
import os
import subprocess
import tempfile
from pathlib import Path
from cli.helpers import (
SCRIPT_DIR, GREEN, NC,
curl_status, log_err, log_info, log_ok, log_warn, run,
)
def run_cmd(args: argparse.Namespace, config: dict) -> int:
nexus_url = os.environ.get("NEXUS_URL", "http://localhost:8091")
nexus_user = os.environ.get("NEXUS_USER", "admin")
nexus_pass = os.environ.get("NEXUS_PASS", "nexus")
verdaccio_url = os.environ.get("VERDACCIO_URL", "http://localhost:4873")
use_nexus = args.nexus
use_verdaccio = args.verdaccio
# Auto-detect
if not use_nexus and not use_verdaccio:
if curl_status(f"{nexus_url}/service/rest/v1/status", f"{nexus_user}:{nexus_pass}") == 200:
use_nexus = True
else:
use_verdaccio = True
patches_def = config["definitions"]["npm_patches"]
patches_dir = SCRIPT_DIR / "npm-patches"
with tempfile.TemporaryDirectory() as work_dir:
npmrc_path = Path(work_dir) / ".npmrc"
if use_nexus:
npm_hosted = f"{nexus_url}/repository/npm-hosted/"
log_info(f"Using Nexus at {nexus_url}...")
http = curl_status(f"{nexus_url}/service/rest/v1/status", f"{nexus_user}:{nexus_pass}")
if http != 200:
log_err(f"Cannot reach Nexus (HTTP {http}). Is it running?")
return 1
nexus_auth = base64.b64encode(f"{nexus_user}:{nexus_pass}".encode()).decode()
host_part = npm_hosted.split("://", 1)[1]
npmrc_path.write_text(f"//{host_part}:_auth={nexus_auth}\n")
publish_args = ["--userconfig", str(npmrc_path)]
else:
log_info(f"Using Verdaccio at {verdaccio_url}...")
http = curl_status(f"{verdaccio_url}/-/ping")
if http != 200:
log_err(f"Cannot reach Verdaccio (HTTP {http}). Start with: npx verdaccio --config deployment/verdaccio.yaml")
return 1
host_part = verdaccio_url.split("://", 1)[1]
npmrc_path.write_text(f"registry={verdaccio_url}/\n//{host_part}/:_authToken=\n")
publish_args = ["--registry", verdaccio_url, "--userconfig", str(npmrc_path)]
published = skipped = 0
# Local patches
for patch in patches_def["local"]:
pkg_name = patch["name"]
pkg_version = patch["version"]
tarball_name = patch["tarball"]
log_info(f"Processing local patch {pkg_name}@{pkg_version}...")
tarball = patches_dir / tarball_name
if not tarball.exists():
log_err(f"Patched tarball not found: {tarball}")
return 1
result = run(["npm", "publish", str(tarball), *publish_args, "--tag", "patched"])
if result.returncode == 0:
log_ok(f"Published {pkg_name}@{pkg_version}")
published += 1
else:
log_warn(f"{pkg_name}@{pkg_version} already exists or publish failed, skipping")
skipped += 1
# Registry patches
for pkg_spec in patches_def["registry"]:
pkg_name, pkg_version = pkg_spec.rsplit("@", 1)
log_info(f"Processing {pkg_name}@{pkg_version}...")
result = subprocess.run(
["npm", "pack", pkg_spec],
capture_output=True, text=True, cwd=work_dir,
)
if result.returncode != 0:
log_err(f"Failed to download {pkg_spec}")
return 1
tarball = result.stdout.strip().split("\n")[-1]
tarball_path = Path(work_dir) / tarball
result = run(["npm", "publish", str(tarball_path), *publish_args, "--tag", "patched"])
if result.returncode == 0:
log_ok(f"Published {pkg_name}@{pkg_version}")
published += 1
else:
log_warn(f"{pkg_name}@{pkg_version} already exists or publish failed, skipping")
skipped += 1
tarball_path.unlink(missing_ok=True)
print(f"\n{GREEN}Done. published={published} skipped={skipped}{NC}")
if use_nexus:
print(f"Nexus npm-group: {nexus_url}/repository/npm-group/")
else:
print(f"Verdaccio registry: {verdaccio_url}")
return 0
run = run_cmd

View File

@@ -1,70 +0,0 @@
"""Bump version, commit, push, and deploy an app."""
import argparse
import json
import os
import re
from cli.helpers import (
PROJECT_ROOT, CYAN, GREEN, YELLOW, NC,
docker_compose, log_err, log_ok, run_check,
)
def run(args: argparse.Namespace, config: dict) -> int:
app = args.app
bump = args.bump
# Find package.json
pkg_path = None
for candidate in [
PROJECT_ROOT / "frontends" / app / "package.json",
PROJECT_ROOT / app / "package.json",
]:
if candidate.exists():
pkg_path = candidate
break
if not pkg_path:
log_err(f"Cannot find package.json for '{app}'")
return 1
with open(pkg_path) as f:
pkg = json.load(f)
current = pkg["version"]
# Compute next version
if re.match(r"^\d+\.\d+\.\d+$", bump):
next_ver = bump
else:
major, minor, patch = (int(x) for x in current.split("."))
if bump == "major":
next_ver = f"{major + 1}.0.0"
elif bump == "minor":
next_ver = f"{major}.{minor + 1}.0"
elif bump == "patch":
next_ver = f"{major}.{minor}.{patch + 1}"
else:
log_err(f"Unknown bump type '{bump}'. Use patch, minor, major, or x.y.z")
return 1
print(f"{CYAN}Releasing {app}: {YELLOW}{current}{CYAN} -> {GREEN}{next_ver}{NC}")
# Update package.json
pkg["version"] = next_ver
with open(pkg_path, "w") as f:
json.dump(pkg, f, indent=2)
f.write("\n")
# Commit and push
os.chdir(PROJECT_ROOT)
run_check(["git", "add", str(pkg_path)])
run_check(["git", "commit", "-m",
f"chore: bump {app} to v{next_ver}\n\n"
f"Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>"])
run_check(["git", "push", "origin", "main"])
print(f"{CYAN}Building and deploying {app}...{NC}")
run_check(docker_compose("up", "-d", "--build", app))
log_ok(f"{app} v{next_ver} deployed")
return 0

View File

@@ -1,131 +0,0 @@
"""Manage the full MetaBuilder stack (up, down, build, restart, logs, ps, clean)."""
import argparse
import subprocess
import sys
import time
from cli.helpers import (
GREEN, YELLOW, BLUE, RED, NC,
docker_compose, log_info, log_ok, log_warn, log_err, pull_with_retry,
run as run_shell,
)
def _pull_external_images(profiles: list[str], config: dict) -> None:
"""Pre-pull external images so compose up doesn't block."""
ext = config["definitions"]["external_images"]
images = list(ext["core"])
if "--profile" in profiles:
prof_names = [profiles[i + 1] for i in range(len(profiles)) if profiles[i] == "--profile"]
if "monitoring" in prof_names:
images += ext["monitoring"]
if "media" in prof_names:
images += ext["media"]
log_info(f"Pre-pulling {len(images)} external images...")
failed = 0
for i, img in enumerate(images, 1):
print(f" [{i}/{len(images)}] {img}")
if not pull_with_retry(img):
failed += 1
if failed:
log_warn(f"{failed} image(s) failed to pull. Stack may be incomplete.")
else:
log_ok("All images ready.")
def _wait_for_healthy(profiles: list[str], args: argparse.Namespace) -> None:
core_count = 23
profile_info = "core"
if args.monitoring or args.all_profiles:
core_count += 9
profile_info += " + monitoring"
if args.media or args.all_profiles:
core_count += 3
profile_info += " + media"
print(f"{YELLOW}Waiting for services ({profile_info})...{NC}")
max_wait = 120
for elapsed in range(0, max_wait, 2):
result = subprocess.run(
docker_compose(*profiles, "ps", "--format", "json"),
capture_output=True, text=True,
)
healthy = result.stdout.count('"healthy"')
if healthy >= core_count:
print(f"\n{GREEN}All {core_count} services healthy!{NC}")
print(f"\nPortal: {BLUE}http://localhost{NC}\n")
print("Quick commands:")
print(" python3 deployment.py stack logs")
print(" python3 deployment.py stack down")
return
sys.stdout.write(f"\r Services healthy: {healthy}/{core_count} ({elapsed}s)")
sys.stdout.flush()
time.sleep(2)
print(f"\n{YELLOW}Timeout waiting for all services.{NC}")
print(" python3 deployment.py stack ps")
def run_cmd(args: argparse.Namespace, config: dict) -> int:
profiles: list[str] = []
if args.monitoring or args.all_profiles:
profiles += ["--profile", "monitoring"]
if args.media or args.all_profiles:
profiles += ["--profile", "media"]
command = args.command or "up"
# Check docker compose
if subprocess.run(["docker", "compose", "version"], capture_output=True).returncode != 0:
log_err("docker compose not found")
return 1
if command in ("down", "stop"):
log_info("Stopping MetaBuilder stack...")
run_shell(docker_compose(*profiles, "down"))
log_ok("Stack stopped")
return 0
if command == "restart":
run_shell(docker_compose(*profiles, "restart"))
log_ok("Stack restarted")
return 0
if command == "logs":
run_shell(docker_compose(*profiles, "logs", "-f"))
return 0
if command in ("ps", "status"):
run_shell(docker_compose(*profiles, "ps"))
return 0
if command == "clean":
answer = input(f"{RED}This will remove all containers and volumes! Are you sure? (yes/no): {NC}")
if answer.strip() == "yes":
run_shell(docker_compose(*profiles, "down", "-v"))
log_ok("Stack cleaned")
return 0
if command == "build":
log_info("Building MetaBuilder stack...")
_pull_external_images(profiles, config)
run_shell(docker_compose(*profiles, "up", "-d", "--build"))
log_ok("Stack built and started")
return 0
if command in ("up", "start"):
log_info("Starting MetaBuilder stack...")
_pull_external_images(profiles, config)
run_shell(docker_compose(*profiles, "up", "-d"))
print(f"\n{GREEN}Stack started!{NC}\n")
_wait_for_healthy(profiles, args)
return 0
log_err(f"Unknown command: {command}")
return 1
run = run_cmd

View File

@@ -2,7 +2,7 @@
#
# Primary adapter is configured here. Additional backends (cache, search,
# secondary databases) are configured via environment variables in
# compose.yml:
# docker-compose.stack.yml:
#
# DBAL_ADAPTER Primary adapter type (postgres, mysql, sqlite, etc.)
# DATABASE_URL Primary database connection string

View File

@@ -1,12 +0,0 @@
apiVersion: 1
providers:
- name: MetaBuilder
orgId: 1
folder: MetaBuilder
type: file
disableDeletion: false
editable: true
options:
path: /etc/grafana/provisioning/dashboards
foldersFromFilesStructure: false

View File

@@ -1,804 +0,0 @@
{
"__inputs": [
{
"name": "DS_PROMETHEUS",
"label": "Prometheus",
"description": "Prometheus datasource for DBAL metrics",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "10.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "stat",
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"description": "Overview dashboard for the MetaBuilder DBAL C++ daemon",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 1,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 },
"id": 100,
"title": "Key Metrics",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 0, "y": 1 },
"id": 1,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "dbal_uptime_seconds",
"legendFormat": "Uptime",
"range": true,
"refId": "A"
}
],
"title": "Uptime",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "blue", "value": null }
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 4, "y": 1 },
"id": 2,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "dbal_requests_total",
"legendFormat": "Total Requests",
"range": true,
"refId": "A"
}
],
"title": "Total Requests",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "yellow", "value": 0.01 },
{ "color": "red", "value": 0.05 }
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 8, "y": 1 },
"id": 3,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(dbal_errors_total[5m])",
"legendFormat": "Error Rate",
"range": true,
"refId": "A"
}
],
"title": "Error Rate",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "yellow", "value": 50 },
{ "color": "red", "value": 100 }
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 12, "y": 1 },
"id": 4,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "dbal_active_connections",
"legendFormat": "Active Connections",
"range": true,
"refId": "A"
}
],
"title": "Active Connections",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "blue", "value": null }
]
}
},
"overrides": []
},
"gridPos": { "h": 4, "w": 8, "x": 16, "y": 1 },
"id": 5,
"options": {
"colorMode": "none",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "/^version$/",
"values": false
},
"textMode": "value"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "dbal_info",
"legendFormat": "{{version}}",
"range": true,
"refId": "A"
}
],
"title": "Version Info",
"type": "stat"
},
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 5 },
"id": 101,
"title": "Request Rates",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "req/s",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "auto",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 6 },
"id": 6,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(dbal_requests_by_method_total[5m])",
"legendFormat": "{{method}}",
"range": true,
"refId": "A"
}
],
"title": "Request Rate by Method",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "req/s",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "auto",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "reqps"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 6 },
"id": 7,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(dbal_responses_by_status_total[5m])",
"legendFormat": "{{status}}",
"range": true,
"refId": "A"
}
],
"title": "Response Status Codes",
"type": "timeseries"
},
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 14 },
"id": 102,
"title": "Infrastructure",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "CPU",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 20,
"gradientMode": "scheme",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "red", "value": 0.9 }
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 15 },
"id": 8,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(container_cpu_usage_seconds_total{name=\"metabuilder-dbal\"}[5m])",
"legendFormat": "DBAL CPU",
"range": true,
"refId": "A"
}
],
"title": "Container CPU Usage",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "Memory",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 20,
"gradientMode": "scheme",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "red", "value": 536870912 }
]
},
"unit": "bytes"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 15 },
"id": 9,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "container_memory_usage_bytes{name=\"metabuilder-dbal\"}",
"legendFormat": "DBAL Memory",
"range": true,
"refId": "A"
}
],
"title": "Container Memory Usage",
"type": "timeseries"
},
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 23 },
"id": 103,
"title": "Database",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "Connections",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "auto",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "red", "value": 80 }
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 24 },
"id": 10,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "pg_stat_activity_count",
"legendFormat": "{{datname}} - {{state}}",
"range": true,
"refId": "A"
}
],
"title": "PostgreSQL Active Connections",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "Memory",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 20,
"gradientMode": "scheme",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "red", "value": 268435456 }
]
},
"unit": "bytes"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 24 },
"id": 11,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "redis_memory_used_bytes",
"legendFormat": "Redis Memory",
"range": true,
"refId": "A"
}
],
"title": "Redis Memory Usage",
"type": "timeseries"
}
],
"refresh": "15s",
"schemaVersion": 39,
"style": "dark",
"tags": ["dbal", "metabuilder"],
"templating": {
"list": []
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"refresh_intervals": ["5s", "10s", "15s", "30s", "1m", "5m"],
"time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d"]
},
"timezone": "browser",
"title": "DBAL Daemon Overview",
"uid": "dbal-overview",
"version": 1,
"weekStart": ""
}

View File

@@ -1,820 +0,0 @@
{
"__inputs": [
{
"name": "DS_PROMETHEUS",
"label": "Prometheus",
"description": "Prometheus datasource for infrastructure metrics",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "10.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "stat",
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"description": "Infrastructure overview for the MetaBuilder stack",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 1,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 },
"id": 100,
"title": "Service Health",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 6, "x": 0, "y": 1 },
"id": 1,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "count(up == 1)",
"legendFormat": "Services Up",
"instant": true,
"refId": "A"
}
],
"title": "Services Up",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "red", "value": 1 }
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 6, "x": 6, "y": 1 },
"id": 2,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "count(up == 0)",
"legendFormat": "Services Down",
"instant": true,
"refId": "A"
}
],
"title": "Services Down",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "yellow", "value": 60 },
{ "color": "red", "value": 85 }
]
},
"max": 100,
"min": 0,
"unit": "percent"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 6, "x": 12, "y": 1 },
"id": 3,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "100 - (avg(rate(node_cpu_seconds_total{mode=\"idle\"}[5m])) * 100)",
"legendFormat": "CPU %",
"instant": true,
"refId": "A"
}
],
"title": "Host CPU",
"type": "gauge"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "yellow", "value": 60 },
{ "color": "red", "value": 85 }
]
},
"max": 100,
"min": 0,
"unit": "percent"
},
"overrides": []
},
"gridPos": { "h": 4, "w": 6, "x": 18, "y": 1 },
"id": 4,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "(1 - node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) * 100",
"legendFormat": "Memory %",
"instant": true,
"refId": "A"
}
],
"title": "Host Memory",
"type": "gauge"
},
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 5 },
"id": 101,
"title": "Host Resources",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "CPU %",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 30,
"gradientMode": "opacity",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "normal" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 6 },
"id": 5,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "avg by (mode) (rate(node_cpu_seconds_total{mode!=\"idle\"}[5m]))",
"legendFormat": "{{mode}}",
"range": true,
"refId": "A"
}
],
"title": "CPU Usage Over Time",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "Memory",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 20,
"gradientMode": "scheme",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "bytes"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 6 },
"id": 6,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "node_memory_MemTotal_bytes",
"legendFormat": "Total",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "node_memory_MemTotal_bytes - node_memory_MemAvailable_bytes",
"legendFormat": "Used",
"range": true,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "node_memory_MemAvailable_bytes",
"legendFormat": "Available",
"range": true,
"refId": "C"
}
],
"title": "Memory Usage Over Time",
"type": "timeseries"
},
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 14 },
"id": 102,
"title": "Containers",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "CPU",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 15 },
"id": 7,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "topk(10, rate(container_cpu_usage_seconds_total{name!=\"\"}[5m]))",
"legendFormat": "{{name}}",
"range": true,
"refId": "A"
}
],
"title": "Container CPU (Top 10)",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "Memory",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "bytes"
},
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 15 },
"id": 8,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "topk(10, container_memory_usage_bytes{name!=\"\"})",
"legendFormat": "{{name}}",
"range": true,
"refId": "A"
}
],
"title": "Container Memory (Top 10)",
"type": "timeseries"
},
{
"collapsed": false,
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 23 },
"id": 103,
"title": "Network & Disk",
"type": "row"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "text",
"axisLabel": "Bytes/s",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "Bps"
},
"overrides": [
{
"matcher": { "id": "byRegexp", "options": "Transmit.*" },
"properties": [
{
"id": "custom.transform",
"value": "negative-Y"
}
]
}
]
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 24 },
"id": 9,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(node_network_receive_bytes_total{device!~\"lo|veth.*|br-.*\"}[5m])",
"legendFormat": "Receive {{device}}",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(node_network_transmit_bytes_total{device!~\"lo|veth.*|br-.*\"}[5m])",
"legendFormat": "Transmit {{device}}",
"range": true,
"refId": "B"
}
],
"title": "Network I/O",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "text",
"axisLabel": "Bytes/s",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 15,
"gradientMode": "none",
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null }
]
},
"unit": "Bps"
},
"overrides": [
{
"matcher": { "id": "byRegexp", "options": "Write.*" },
"properties": [
{
"id": "custom.transform",
"value": "negative-Y"
}
]
}
]
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 24 },
"id": 10,
"options": {
"legend": {
"calcs": ["mean", "max"],
"displayMode": "table",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "10.0.0",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(node_disk_read_bytes_total[5m])",
"legendFormat": "Read {{device}}",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "prometheus"
},
"editorMode": "code",
"expr": "rate(node_disk_written_bytes_total[5m])",
"legendFormat": "Write {{device}}",
"range": true,
"refId": "B"
}
],
"title": "Disk I/O",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 39,
"style": "dark",
"tags": ["infrastructure", "metabuilder"],
"templating": {
"list": []
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"refresh_intervals": ["5s", "10s", "15s", "30s", "1m", "5m"],
"time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d"]
},
"timezone": "browser",
"title": "Infrastructure Overview",
"uid": "infra-overview",
"version": 1,
"weekStart": ""
}

View File

@@ -1,163 +0,0 @@
{
"annotations": { "list": [] },
"description": "Starlink dish metrics via danopstech/starlink_exporter",
"editable": true,
"graphTooltip": 1,
"id": null,
"links": [],
"panels": [
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "ms", "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }, { "color": "yellow", "value": 40 }, { "color": "red", "value": 100 }] } },
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 0, "y": 0 },
"id": 1,
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
"title": "Ping Latency",
"type": "stat",
"targets": [{ "expr": "starlink_dish_pop_ping_latency_ms", "legendFormat": "" }]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "bps", "thresholds": { "mode": "absolute", "steps": [{ "color": "red", "value": null }, { "color": "yellow", "value": 50000000 }, { "color": "green", "value": 100000000 }] } },
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 4, "y": 0 },
"id": 2,
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
"title": "Download Speed",
"type": "stat",
"targets": [{ "expr": "starlink_dish_downlink_throughput_bps", "legendFormat": "" }]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "bps", "thresholds": { "mode": "absolute", "steps": [{ "color": "red", "value": null }, { "color": "yellow", "value": 5000000 }, { "color": "green", "value": 10000000 }] } },
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 8, "y": 0 },
"id": 3,
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
"title": "Upload Speed",
"type": "stat",
"targets": [{ "expr": "starlink_dish_uplink_throughput_bps", "legendFormat": "" }]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "s", "thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] } },
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 12, "y": 0 },
"id": 4,
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "none" },
"title": "Uptime",
"type": "stat",
"targets": [{ "expr": "starlink_dish_uptime_seconds", "legendFormat": "" }]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "percent", "min": 0, "max": 100, "thresholds": { "mode": "absolute", "steps": [{ "color": "red", "value": null }, { "color": "yellow", "value": 90 }, { "color": "green", "value": 99 }] } },
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 16, "y": 0 },
"id": 5,
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
"title": "Signal Quality",
"type": "stat",
"targets": [{ "expr": "starlink_dish_pop_ping_drop_rate * 100", "legendFormat": "" }]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "none" },
"overrides": []
},
"gridPos": { "h": 4, "w": 4, "x": 20, "y": 0 },
"id": 6,
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "background", "graphMode": "none" },
"title": "Currently Connected",
"type": "stat",
"targets": [{ "expr": "starlink_dish_currently_obstructed", "legendFormat": "" }]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "bps", "custom": { "lineWidth": 2, "fillOpacity": 15 } },
"overrides": [
{ "matcher": { "id": "byName", "options": "Download" }, "properties": [{ "id": "color", "value": { "fixedColor": "#73BF69", "mode": "fixed" } }] },
{ "matcher": { "id": "byName", "options": "Upload" }, "properties": [{ "id": "color", "value": { "fixedColor": "#5794F2", "mode": "fixed" } }] }
]
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 4 },
"id": 7,
"options": { "tooltip": { "mode": "multi" } },
"title": "Throughput Over Time",
"type": "timeseries",
"targets": [
{ "expr": "starlink_dish_downlink_throughput_bps", "legendFormat": "Download" },
{ "expr": "starlink_dish_uplink_throughput_bps", "legendFormat": "Upload" }
]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "ms", "custom": { "lineWidth": 2, "fillOpacity": 10 } },
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 4 },
"id": 8,
"options": { "tooltip": { "mode": "multi" } },
"title": "Ping Latency Over Time",
"type": "timeseries",
"targets": [
{ "expr": "starlink_dish_pop_ping_latency_ms", "legendFormat": "Latency" },
{ "expr": "starlink_dish_pop_ping_drop_rate * 1000", "legendFormat": "Drop Rate (x1000)" }
]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "percent", "custom": { "lineWidth": 2, "fillOpacity": 10 } },
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 12 },
"id": 9,
"options": { "tooltip": { "mode": "multi" } },
"title": "Obstruction Over Time",
"type": "timeseries",
"targets": [
{ "expr": "starlink_dish_fraction_obstructed * 100", "legendFormat": "Obstruction %" }
]
},
{
"datasource": { "type": "prometheus", "uid": "PBFA97CFB590B2093" },
"fieldConfig": {
"defaults": { "unit": "none", "custom": { "lineWidth": 2, "fillOpacity": 10 } },
"overrides": []
},
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 12 },
"id": 10,
"options": { "tooltip": { "mode": "multi" } },
"title": "SNR / Signal Quality",
"type": "timeseries",
"targets": [
{ "expr": "starlink_dish_snr", "legendFormat": "SNR" }
]
}
],
"refresh": "15s",
"schemaVersion": 39,
"tags": ["starlink", "network"],
"templating": { "list": [] },
"time": { "from": "now-6h", "to": "now" },
"timepicker": {},
"timezone": "browser",
"title": "Starlink Dish",
"uid": "starlink-dish",
"version": 1
}

View File

@@ -0,0 +1,58 @@
##
## nginx-smoke — Gateway for deployment smoke tests in CI.
##
## Real apps (workflowui, pastebin) are proxied to playwright's webServer
## processes running on the host (reached via host.docker.internal).
## Remaining apps (codegen, emailclient, etc.) return stub 200 responses
## since they are not started as dev servers in CI.
##
server {
listen 80;
server_name localhost;
# ── Real apps — proxied to playwright webServer processes on host ─────
location /workflowui {
proxy_pass http://host.docker.internal:3000;
proxy_set_header Host localhost;
proxy_set_header X-Real-IP $remote_addr;
proxy_read_timeout 120s;
}
location /pastebin {
proxy_pass http://host.docker.internal:3001;
proxy_set_header Host localhost;
proxy_set_header X-Real-IP $remote_addr;
proxy_read_timeout 120s;
}
# ── DBAL API — proxied to real C++ daemon ─────────────────────────────
location /api/ {
proxy_pass http://dbal:8080/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_read_timeout 30s;
}
# ── Portal — must contain "MetaBuilder" ───────────────────────────────
location = / {
add_header Content-Type text/html;
return 200 '<html><body><h1>MetaBuilder Portal</h1></body></html>';
}
# ── Postgres redirect ─────────────────────────────────────────────────
location = /postgres {
return 307 /postgres/dashboard;
}
# ── Remaining apps — stub (codegen, emailclient, diagrams, etc.) ──────
location / {
add_header Content-Type text/html;
return 200 '<html><body>MetaBuilder App</body></html>';
}
}

View File

@@ -46,11 +46,6 @@ http {
"~*/app" frontend-app;
"~*/postgres" postgres-dashboard;
"~*/emailclient" emailclient-app;
"~*/dbal" dbal-frontend;
"~*/terminal" dockerterminal;
"~*/packagerepo" packagerepo;
"~*/repoforge" repoforge;
"~*/caproverforge" caproverforge;
}
server {
@@ -70,18 +65,6 @@ http {
add_header Content-Type application/json;
}
# ================================================================
# Root favicon
# ================================================================
location = /favicon.ico {
alias /usr/share/nginx/portal/favicon.ico;
access_log off;
}
location = /favicon.svg {
alias /usr/share/nginx/portal/favicon.svg;
access_log off;
}
# ================================================================
# Welcome portal (static HTML)
# ================================================================
@@ -190,79 +173,6 @@ http {
return 301 /storybook/;
}
# DBAL Frontend - Daemon overview + query console
location /dbal {
set $upstream_dbal_frontend dbal-frontend;
proxy_pass http://$upstream_dbal_frontend:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# Docker Terminal
location /terminal {
set $upstream_terminal dockerterminal;
proxy_pass http://$upstream_terminal:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# Docker Terminal Backend API — strips /terminal-api/ prefix
location /terminal-api/ {
set $upstream_terminal_backend dockerterminal-backend;
rewrite ^/terminal-api/(.*)$ /$1 break;
proxy_pass http://$upstream_terminal_backend:5000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Package Repo
location /packagerepo {
set $upstream_packagerepo packagerepo;
proxy_pass http://$upstream_packagerepo:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# RepoForge - Android app download portal
location /repoforge {
set $upstream_repoforge repoforge;
proxy_pass http://$upstream_repoforge:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# CaproverForge - Android app download portal
location /caproverforge {
set $upstream_caproverforge caproverforge;
proxy_pass http://$upstream_caproverforge:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
# Frontend App
location /app {
set $upstream_app frontend-app;
@@ -334,43 +244,6 @@ http {
proxy_buffering off;
}
# ================================================================
# Monitoring (optional, requires --profile monitoring)
# ================================================================
# Grafana - Metrics visualization
# GF_SERVER_ROOT_URL + GF_SERVER_SERVE_FROM_SUB_PATH handles path internally
location /grafana/ {
set $upstream_grafana grafana;
proxy_pass http://$upstream_grafana:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_buffering off;
}
location = /grafana {
return 301 /grafana/;
}
# Prometheus - Metrics scraping
# --web.external-url=/prometheus/ means it expects the prefix in requests
location /prometheus/ {
set $upstream_prometheus prometheus;
proxy_pass http://$upstream_prometheus:9090;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location = /prometheus {
return 301 /prometheus/;
}
# ================================================================
# Backend APIs
# ================================================================

View File

@@ -41,27 +41,3 @@ groups:
severity: warning
annotations:
summary: "Redis memory usage above 90%"
- alert: DBALHighErrorRate
expr: rate(dbal_errors_total[5m]) > 0.1
for: 5m
labels:
severity: warning
annotations:
summary: "DBAL daemon error rate above 0.1/s"
- alert: StarlinkHighLatency
expr: starlink_dish_pop_ping_latency_ms > 100
for: 10m
labels:
severity: warning
annotations:
summary: "Starlink latency above 100ms for 10 minutes"
- alert: StarlinkObstructed
expr: starlink_dish_currently_obstructed == 1
for: 5m
labels:
severity: warning
annotations:
summary: "Starlink dish is obstructed"

View File

@@ -59,8 +59,3 @@ scrape_configs:
metrics_path: '/status-json.xsl'
static_configs:
- targets: ['icecast:8000']
- job_name: 'starlink'
metrics_path: '/metrics'
static_configs:
- targets: ['starlink-exporter:9817']

128
deployment/deploy.sh Executable file
View File

@@ -0,0 +1,128 @@
#!/usr/bin/env bash
# Quick build + deploy for one or more apps.
#
# Combines build-apps.sh --force + docker compose up --force-recreate
# into a single command for the most common workflow.
#
# Usage:
# ./deploy.sh codegen Build and deploy codegen
# ./deploy.sh codegen pastebin Build and deploy multiple apps
# ./deploy.sh --all Build and deploy all apps
#
# This replaces the manual workflow of:
# docker compose build --no-cache codegen
# docker compose up -d --force-recreate codegen
set -e
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
COMPOSE_FILE="$SCRIPT_DIR/docker-compose.stack.yml"
ALL_APPS=(workflowui codegen pastebin postgres emailclient exploded-diagrams storybook frontend-app dbal)
# Map friendly name → compose service name
resolve_service() {
case "$1" in
workflowui) echo "workflowui" ;;
codegen) echo "codegen" ;;
pastebin) echo "pastebin" ;;
postgres) echo "postgres-dashboard" ;;
emailclient) echo "emailclient-app" ;;
exploded-diagrams) echo "exploded-diagrams" ;;
storybook) echo "storybook" ;;
frontend-app) echo "frontend-app" ;;
dbal) echo "dbal" ;;
*) echo "" ;;
esac
}
if [ $# -eq 0 ]; then
echo "Usage: ./deploy.sh <app> [app2 ...] | --all"
echo ""
echo "Available apps: ${ALL_APPS[*]}"
exit 1
fi
TARGETS=()
NO_CACHE=false
for arg in "$@"; do
case "$arg" in
--all) TARGETS=("${ALL_APPS[@]}") ;;
--no-cache) NO_CACHE=true ;;
*) TARGETS+=("$arg") ;;
esac
done
# Resolve service names
SERVICES=()
for target in "${TARGETS[@]}"; do
service="$(resolve_service "$target")"
if [ -z "$service" ]; then
echo -e "${RED}Unknown app: $target${NC}"
echo "Available: ${ALL_APPS[*]}"
exit 1
fi
SERVICES+=("$service")
done
echo -e "${BLUE}═══════════════════════════════════════════${NC}"
echo -e "${BLUE} Deploy: ${TARGETS[*]}${NC}"
echo -e "${BLUE}═══════════════════════════════════════════${NC}"
echo ""
# Step 1: Build
echo -e "${YELLOW}[1/3] Building...${NC}"
BUILD_ARGS=()
if [ "$NO_CACHE" = true ]; then
BUILD_ARGS+=("--no-cache")
fi
docker compose -f "$COMPOSE_FILE" build "${BUILD_ARGS[@]}" "${SERVICES[@]}"
echo ""
# Step 2: Recreate containers
echo -e "${YELLOW}[2/3] Deploying...${NC}"
docker compose -f "$COMPOSE_FILE" up -d --force-recreate "${SERVICES[@]}"
echo ""
# Step 3: Wait for health
echo -e "${YELLOW}[3/3] Waiting for health checks...${NC}"
HEALTHY=true
for service in "${SERVICES[@]}"; do
container="metabuilder-${service}"
# Some services use different container names
case "$service" in
postgres-dashboard) container="metabuilder-postgres-dashboard" ;;
emailclient-app) container="metabuilder-emailclient-app" ;;
esac
echo -n " $service: "
for i in $(seq 1 30); do
status=$(docker inspect --format='{{.State.Health.Status}}' "$container" 2>/dev/null || echo "missing")
if [ "$status" = "healthy" ]; then
echo -e "${GREEN}healthy${NC}"
break
elif [ "$status" = "unhealthy" ]; then
echo -e "${RED}unhealthy${NC}"
HEALTHY=false
break
fi
sleep 2
done
if [ "$status" != "healthy" ] && [ "$status" != "unhealthy" ]; then
echo -e "${YELLOW}timeout (status: $status)${NC}"
HEALTHY=false
fi
done
echo ""
if [ "$HEALTHY" = true ]; then
echo -e "${GREEN}✓ All services deployed and healthy${NC}"
else
echo -e "${YELLOW}⚠ Some services are not healthy — check with: docker compose -f $COMPOSE_FILE ps${NC}"
fi

View File

@@ -1,48 +0,0 @@
#!/usr/bin/env python3
"""MetaBuilder Deployment CLI — JSON-powered, modular command system.
All command definitions live in cli/commands.json.
Each command dispatches to a Python module in cli/*.py.
Usage:
python3 deployment.py --help
python3 deployment.py build base [--force] [apt] [node-deps] ...
python3 deployment.py build apps [--force] [--sequential] [codegen] ...
python3 deployment.py build testcontainers [--skip-native] [--skip-sidecar]
python3 deployment.py deploy <app> [--all] [--no-cache]
python3 deployment.py stack up|down|build|logs|ps|clean [--monitoring] [--media]
python3 deployment.py release <app> [patch|minor|major|x.y.z]
python3 deployment.py nexus init|push|populate
python3 deployment.py npm publish-patches [--nexus] [--verdaccio]
python3 deployment.py artifactory init
"""
import sys
from cli.loader import build_parser, dispatch
def main() -> int:
parser, config = build_parser()
args = parser.parse_args()
if not args.command:
parser.print_help()
return 0
# Handle --list for build base
if args.command == "build" and getattr(args, "build_type", None) == "base" and getattr(args, "list", False):
for name, img in config["definitions"]["base_images"].items():
print(f" {name} -> {img['tag']}")
return 0
module_path = getattr(args, "_module", None)
if not module_path:
# No module set — print help for the subcommand group
parser.parse_args([args.command, "--help"])
return 0
return dispatch(args, config)
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,124 @@
# Local Package Registries — Nexus (Docker + npm) + Artifactory CE (Conan2)
#
# Nexus (Sonatype):
# 8091 → Nexus web UI (admin / nexus)
# 5050 → Docker hosted repository (push + pull)
# Repos: Docker (local), npm (hosted + proxy + group)
#
# Artifactory CE (JFrog):
# 8092 → Artifactory web UI (admin / password)
# Repos: Conan2 (local + remote), generic
#
# Prerequisites — Docker Desktop must trust the insecure local registry:
# Docker Desktop → Settings → Docker Engine → add to "insecure-registries":
# "insecure-registries": ["localhost:5050"]
# Then restart Docker Desktop.
#
# Note: Nexus ships amd64-only. On Apple Silicon it runs via Rosetta (works fine).
#
# Usage:
# docker compose -f docker-compose.nexus.yml up -d
# # Wait ~2 min for init containers to finish, then:
# ./push-to-nexus.sh # Docker images → Nexus
# ./publish-npm-patches.sh # Patched npm packages → Nexus
# conan remote add artifactory http://localhost:8092/artifactory/api/conan/conan-local
#
# URLs:
# Nexus: http://localhost:8091 (admin / nexus)
# Artifactory: http://localhost:8092 (admin / password)
# npm group: http://localhost:8091/repository/npm-group/
# Conan2: http://localhost:8092/artifactory/api/conan/conan-local
services:
# ============================================================================
# Sonatype Nexus — Docker registry + npm packages
# ============================================================================
nexus:
image: sonatype/nexus3:3.75.0
platform: linux/amd64 # Nexus has no ARM64 image; runs via Rosetta on Apple Silicon
container_name: nexus-registry
restart: unless-stopped
ports:
- "8091:8081" # Web UI + REST API
- "5050:5050" # Docker hosted repository
volumes:
- nexus-data:/nexus-data
environment:
INSTALL4J_ADD_VM_PARAMS: "-Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g"
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:8081/service/rest/v1/status || exit 1"]
interval: 30s
timeout: 10s
retries: 15
start_period: 120s
networks:
- registry-net
nexus-init:
image: alpine:3.21
container_name: nexus-init
depends_on:
nexus:
condition: service_healthy
volumes:
- nexus-data:/nexus-data:ro
- ./nexus-init.sh:/nexus-init.sh:ro
entrypoint: ["/bin/sh", "-c", "apk add --no-cache curl -q && sh /nexus-init.sh"]
environment:
NEXUS_URL: "http://nexus:8081"
NEXUS_ADMIN_NEW_PASS: "nexus"
DOCKER_REPO_PORT: "5050"
restart: "no"
networks:
- registry-net
# ============================================================================
# JFrog Artifactory CE — Conan2 package repository
# ============================================================================
artifactory:
image: releases-docker.jfrog.io/jfrog/artifactory-cpp-ce:latest
container_name: artifactory-ce
restart: unless-stopped
ports:
- "8092:8081" # Web UI + REST API
- "8093:8082" # Service port (router/health)
volumes:
- artifactory-data:/var/opt/jfrog/artifactory
environment:
JF_SHARED_DATABASE_TYPE: derby
JF_SHARED_DATABASE_ALLOWNONPOSTGRESQL: "true"
EXTRA_JAVA_OPTIONS: "-Xms512m -Xmx2g"
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:8082/router/api/v1/system/health || exit 1"]
interval: 30s
timeout: 10s
retries: 15
start_period: 120s
networks:
- registry-net
artifactory-init:
image: alpine:3.21
container_name: artifactory-init
depends_on:
artifactory:
condition: service_healthy
volumes:
- ./artifactory-init.sh:/artifactory-init.sh:ro
entrypoint: ["/bin/sh", "-c", "apk add --no-cache curl -q && sh /artifactory-init.sh"]
environment:
ARTIFACTORY_URL: "http://artifactory:8081"
ARTIFACTORY_ADMIN_PASS: "password"
restart: "no"
networks:
- registry-net
volumes:
nexus-data:
name: nexus-data
artifactory-data:
name: artifactory-data
networks:
registry-net:
name: registry-net

View File

@@ -0,0 +1,221 @@
# docker-compose.smoke.yml — Smoke test stack for CI.
#
# Includes a real DBAL daemon backed by PostgreSQL so E2E tests can seed
# and query data. Admin tools (phpMyAdmin, Mongo Express, RedisInsight)
# and an nginx gateway round out the stack.
#
# Usage:
# docker compose -f deployment/docker-compose.smoke.yml up -d --wait
# PLAYWRIGHT_BASE_URL=http://localhost/workflowui/ npx playwright test e2e/deployment-smoke.spec.ts
# docker compose -f deployment/docker-compose.smoke.yml down -v
services:
# ── Gateway stub ──────────────────────────────────────────────────────────
nginx:
image: nginx:alpine
ports:
- "80:80"
volumes:
- ./config/nginx-smoke/default.conf:/etc/nginx/conf.d/default.conf:ro
# host.docker.internal lets nginx proxy to playwright's webServer processes
# running on the host (workflowui :3000, pastebin :3001).
# On Linux (GitHub Actions) this requires the host-gateway extra_host.
extra_hosts:
- "host.docker.internal:host-gateway"
depends_on:
dbal:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1/"]
interval: 5s
timeout: 3s
retries: 10
networks:
- smoke
# ── DBAL + PostgreSQL ────────────────────────────────────────────────────
postgres:
image: postgres:15-alpine
environment:
POSTGRES_USER: metabuilder
POSTGRES_PASSWORD: metabuilder
POSTGRES_DB: metabuilder
tmpfs:
- /var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U metabuilder"]
interval: 5s
timeout: 3s
retries: 10
networks:
- smoke
dbal-init:
image: ${DBAL_INIT_IMAGE:-ghcr.io/johndoe6345789/metabuilder/dbal-init:latest}
build:
context: ..
dockerfile: deployment/config/dbal/Dockerfile.init
volumes:
- dbal-schemas:/target/schemas/entities
- dbal-templates:/target/templates/sql
networks:
- smoke
dbal:
image: ${DBAL_IMAGE:-ghcr.io/johndoe6345789/metabuilder/dbal:latest}
build:
context: ../dbal
dockerfile: production/build-config/Dockerfile
ports:
- "8080:8080"
environment:
DBAL_ADAPTER: postgres
DATABASE_URL: "postgresql://metabuilder:metabuilder@postgres:5432/metabuilder"
DBAL_SCHEMA_DIR: /app/schemas/entities
DBAL_TEMPLATE_DIR: /app/templates/sql
DBAL_SEED_DIR: /app/seeds/database
DBAL_SEED_ON_STARTUP: "true"
DBAL_BIND_ADDRESS: 0.0.0.0
DBAL_PORT: 8080
DBAL_MODE: production
DBAL_DAEMON: "true"
DBAL_LOG_LEVEL: info
DBAL_AUTO_CREATE_TABLES: "true"
DBAL_ENABLE_HEALTH_CHECK: "true"
DBAL_ADMIN_TOKEN: "smoke-test-admin-token"
DBAL_CORS_ORIGIN: "*"
JWT_SECRET_KEY: "test-secret"
volumes:
- dbal-schemas:/app/schemas/entities:ro
- dbal-templates:/app/templates/sql:ro
depends_on:
dbal-init:
condition: service_completed_successfully
postgres:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/health"]
interval: 5s
timeout: 3s
retries: 15
start_period: 10s
networks:
- smoke
# ── Infrastructure (stock images) ─────────────────────────────────────────
mysql:
image: mysql:8.0
environment:
MYSQL_ROOT_PASSWORD: metabuilder
MYSQL_USER: metabuilder
MYSQL_PASSWORD: metabuilder
MYSQL_DATABASE: metabuilder
command: --default-authentication-plugin=mysql_native_password
tmpfs:
- /var/lib/mysql
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "127.0.0.1", "-u", "root", "-pmetabuilder"]
interval: 5s
timeout: 5s
retries: 15
start_period: 20s
networks:
- smoke
mongodb:
image: mongo:7.0
environment:
MONGO_INITDB_ROOT_USERNAME: metabuilder
MONGO_INITDB_ROOT_PASSWORD: metabuilder
tmpfs:
- /data/db
healthcheck:
test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')", "--quiet"]
interval: 5s
timeout: 5s
retries: 15
networks:
- smoke
redis:
image: redis:7-alpine
tmpfs:
- /data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 3s
timeout: 3s
retries: 10
networks:
- smoke
# ── Admin tools (real containers, specific ports to match smoke tests) ────
phpmyadmin:
image: phpmyadmin:latest
ports:
- "8081:80"
environment:
PMA_HOST: mysql
PMA_PORT: "3306"
PMA_USER: metabuilder
PMA_PASSWORD: metabuilder
MYSQL_ROOT_PASSWORD: metabuilder
depends_on:
mysql:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-sf", "http://127.0.0.1/"]
interval: 10s
timeout: 5s
retries: 10
start_period: 15s
networks:
- smoke
mongo-express:
image: mongo-express:latest
ports:
- "8082:8081"
environment:
ME_CONFIG_MONGODB_ADMINUSERNAME: metabuilder
ME_CONFIG_MONGODB_ADMINPASSWORD: metabuilder
ME_CONFIG_MONGODB_URL: mongodb://metabuilder:metabuilder@mongodb:27017/?authSource=admin
ME_CONFIG_BASICAUTH: "false"
depends_on:
mongodb:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:8081/"]
interval: 10s
timeout: 5s
retries: 10
start_period: 20s
networks:
- smoke
redisinsight:
image: redis/redisinsight:latest
ports:
- "8083:5540"
depends_on:
redis:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:5540/api/health"]
interval: 10s
timeout: 5s
retries: 10
start_period: 10s
networks:
- smoke
volumes:
dbal-schemas:
driver: local
dbal-templates:
driver: local
networks:
smoke:
driver: bridge

View File

@@ -2,9 +2,7 @@
#
# Gateway: nginx on port 80 with welcome portal + subpath routing
# Apps: WorkflowUI, CodeForge, Pastebin, Postgres Dashboard,
# Email Client, Exploded Diagrams, Storybook, Frontend App,
# DBAL Frontend, Docker Terminal, Package Repo,
# RepoForge Portal, CaproverForge Portal
# Email Client, Exploded Diagrams, Storybook, Frontend App
# Backend: DBAL C++, Email Service (Flask), Pastebin Backend (Flask)
# Infra: PostgreSQL, MySQL, MongoDB, Redis, Elasticsearch,
# phpMyAdmin, Mongo Express, RedisInsight, Kibana
@@ -15,10 +13,10 @@
# --profile media Media daemon (FFmpeg/radio/retro), native HTTP streaming, HLS
#
# Usage:
# docker compose -f compose.yml up -d
# docker compose -f compose.yml --profile monitoring up -d
# docker compose -f compose.yml --profile media up -d
# docker compose -f compose.yml --profile monitoring --profile media up -d
# docker compose -f docker-compose.stack.yml up -d
# docker compose -f docker-compose.stack.yml --profile monitoring up -d
# docker compose -f docker-compose.stack.yml --profile media up -d
# docker compose -f docker-compose.stack.yml --profile monitoring --profile media up -d
#
# Access:
# http://localhost Welcome portal
@@ -30,13 +28,6 @@
# http://localhost/diagrams Exploded Diagrams
# http://localhost/storybook Storybook
# http://localhost/app Frontend App
# http://localhost/dbal DBAL Frontend
# http://localhost/terminal Docker Terminal
# http://localhost/packagerepo Package Repo
# http://localhost/repoforge RepoForge APK Portal
# http://localhost/caproverforge CaproverForge APK Portal
# http://localhost/grafana Grafana (monitoring profile)
# http://localhost/prometheus Prometheus (monitoring profile)
# http://localhost/api DBAL API
# http://localhost/pastebin-api Pastebin Flask API
# http://localhost/phpmyadmin/ phpMyAdmin (MySQL admin)
@@ -56,8 +47,8 @@ services:
ports:
- "4873:4873"
volumes:
- ../verdaccio.yaml:/verdaccio/conf/config.yaml:ro
- ../npm-patches:/verdaccio/patches:ro
- ./verdaccio.yaml:/verdaccio/conf/config.yaml:ro
- ./npm-patches:/verdaccio/patches:ro
- verdaccio-storage:/verdaccio/storage
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:4873/-/ping"]
@@ -283,7 +274,7 @@ services:
# DBAL config seeder — copies schemas and SQL templates into named volumes
dbal-init:
build:
context: ../..
context: ..
dockerfile: deployment/config/dbal/Dockerfile.init
container_name: metabuilder-dbal-init
volumes:
@@ -295,7 +286,7 @@ services:
# DBAL Daemon - C++ backend
dbal:
build:
context: ../../dbal
context: ../dbal
dockerfile: production/build-config/Dockerfile
args:
BUILD_TYPE: Release
@@ -367,7 +358,7 @@ services:
# WorkflowUI - Visual workflow editor (n8n-style)
workflowui:
build:
context: ../..
context: ..
dockerfile: frontends/workflowui/Dockerfile
args:
NODE_ENV: production
@@ -418,7 +409,7 @@ services:
# Dovecot - IMAP/POP3 server with TLS
dovecot:
build:
context: ../../frontends/emailclient/deployment/docker/dovecot
context: ../frontends/emailclient/deployment/docker/dovecot
dockerfile: Dockerfile
container_name: metabuilder-dovecot
restart: unless-stopped
@@ -441,7 +432,7 @@ services:
# SMTP Relay - Twisted relay pointing at local Postfix
smtp-relay:
build:
context: ../../services/smtprelay
context: ../services/smtprelay
dockerfile: Dockerfile
container_name: metabuilder-smtp-relay
restart: unless-stopped
@@ -476,7 +467,7 @@ services:
# Email Service - Flask backend for IMAP/SMTP operations
email-service:
build:
context: ../../frontends/emailclient
context: ../frontends/emailclient
dockerfile: deployment/docker/email-service/Dockerfile
container_name: metabuilder-email-service
restart: unless-stopped
@@ -524,7 +515,7 @@ services:
# nginx - Reverse proxy + welcome portal (config baked into image)
nginx:
build:
context: ../..
context: ..
dockerfile: deployment/config/nginx/Dockerfile
container_name: metabuilder-nginx
restart: unless-stopped
@@ -545,7 +536,7 @@ services:
# CodeForge IDE - Next.js + Monaco code editor
codegen:
build:
context: ../..
context: ..
dockerfile: frontends/codegen/Dockerfile
container_name: metabuilder-codegen
restart: unless-stopped
@@ -567,7 +558,7 @@ services:
# Pastebin - Code snippet sharing (dev mode: Turbopack + hot reload)
pastebin:
build:
context: ../..
context: ..
dockerfile: frontends/pastebin/Dockerfile.dev
container_name: metabuilder-pastebin
restart: unless-stopped
@@ -595,7 +586,7 @@ services:
# Pastebin Flask Backend - REST API for snippet storage
pastebin-backend:
build:
context: ../..
context: ..
dockerfile: frontends/pastebin/backend/Dockerfile
container_name: metabuilder-pastebin-backend
restart: unless-stopped
@@ -630,7 +621,7 @@ services:
# Postgres Dashboard - Database admin
postgres-dashboard:
build:
context: ../..
context: ..
dockerfile: frontends/postgres/Dockerfile
container_name: metabuilder-postgres-dashboard
restart: unless-stopped
@@ -640,7 +631,6 @@ services:
NODE_ENV: production
PORT: 3000
DATABASE_URL: "postgresql://${POSTGRES_USER:-metabuilder}:${POSTGRES_PASSWORD:-metabuilder}@postgres:5432/${POSTGRES_DB:-metabuilder}"
JWT_SECRET: "${JWT_SECRET:-changeme-in-production}"
depends_on:
postgres:
condition: service_healthy
@@ -656,7 +646,7 @@ services:
# Email Client - Email management UI
emailclient-app:
build:
context: ../..
context: ..
dockerfile: frontends/emailclient/Dockerfile
container_name: metabuilder-emailclient-app
restart: unless-stopped
@@ -681,7 +671,7 @@ services:
# Exploded Diagrams - Interactive 3D viewer
exploded-diagrams:
build:
context: ../..
context: ..
dockerfile: frontends/exploded-diagrams/Dockerfile
container_name: metabuilder-exploded-diagrams
restart: unless-stopped
@@ -702,9 +692,8 @@ services:
# Storybook - Component library
storybook:
profiles: ["storybook"]
build:
context: ../..
context: ..
dockerfile: storybook/Dockerfile
container_name: metabuilder-storybook
restart: unless-stopped
@@ -722,7 +711,7 @@ services:
# Frontend App - Main Next.js application
frontend-app:
build:
context: ../..
context: ..
dockerfile: frontends/nextjs/Dockerfile
container_name: metabuilder-frontend-app
restart: unless-stopped
@@ -746,169 +735,6 @@ services:
networks:
- metabuilder
# DBAL Frontend - Daemon overview + query console
dbal-frontend:
build:
context: ../..
dockerfile: frontends/dbal/Dockerfile
args:
DBAL_DAEMON_URL: http://dbal:8080
container_name: metabuilder-dbal-frontend
restart: unless-stopped
ports:
- "3015:3000"
environment:
NODE_ENV: production
PORT: 3000
HOSTNAME: "0.0.0.0"
DBAL_DAEMON_URL: http://dbal:8080
depends_on:
dbal:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:3000/dbal"]
interval: 15s
timeout: 5s
retries: 3
start_period: 30s
networks:
- metabuilder
# Docker Terminal - Container management UI + Flask backend
dockerterminal-backend:
build:
context: ../../frontends/dockerterminal/backend
dockerfile: Dockerfile
container_name: metabuilder-dockerterminal-backend
restart: unless-stopped
environment:
FLASK_ENV: production
volumes:
- /var/run/docker.sock:/var/run/docker.sock
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://127.0.0.1:5000/health', timeout=3)"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
networks:
- metabuilder
dockerterminal:
build:
context: ../../frontends/dockerterminal/frontend
dockerfile: Dockerfile
args:
NEXT_PUBLIC_API_URL: /terminal-api
container_name: metabuilder-dockerterminal
restart: unless-stopped
ports:
- "3010:3000"
environment:
NODE_ENV: production
PORT: 3000
HOSTNAME: "0.0.0.0"
depends_on:
- dockerterminal-backend
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:3000/terminal"]
interval: 15s
timeout: 5s
retries: 3
start_period: 30s
networks:
- metabuilder
# Package Repo - Package repository UI + Flask backend
packagerepo-backend:
build:
context: ../../frontends/packagerepo
dockerfile: backend/Dockerfile
container_name: metabuilder-packagerepo-backend
restart: unless-stopped
environment:
DATA_DIR: /data
FLASK_APP: app.py
volumes:
- packagerepo-data:/data
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://127.0.0.1:5000/health', timeout=3)"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
networks:
- metabuilder
packagerepo:
build:
context: ../../frontends/packagerepo/frontend
dockerfile: Dockerfile
args:
BACKEND_URL: http://packagerepo-backend:5000
container_name: metabuilder-packagerepo
restart: unless-stopped
ports:
- "3011:3000"
environment:
NODE_ENV: production
PORT: 3000
HOSTNAME: "0.0.0.0"
depends_on:
- packagerepo-backend
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:3000/packagerepo"]
interval: 15s
timeout: 5s
retries: 3
start_period: 30s
networks:
- metabuilder
# RepoForge - Android app download portal
repoforge:
build:
context: ../../frontends/repoforge/portal
dockerfile: Dockerfile
container_name: metabuilder-repoforge
restart: unless-stopped
ports:
- "3012:3000"
environment:
NODE_ENV: production
PORT: 3000
HOSTNAME: "0.0.0.0"
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:3000/repoforge"]
interval: 15s
timeout: 5s
retries: 3
start_period: 20s
networks:
- metabuilder
# CaproverForge - Android app download portal
caproverforge:
build:
context: ../../frontends/caproverforge/portal
dockerfile: Dockerfile
container_name: metabuilder-caproverforge
restart: unless-stopped
ports:
- "3013:3000"
environment:
NODE_ENV: production
PORT: 3000
HOSTNAME: "0.0.0.0"
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1:3000/caproverforge"]
interval: 15s
timeout: 5s
retries: 3
start_period: 20s
networks:
- metabuilder
# ============================================================================
# Monitoring (--profile monitoring)
# ============================================================================
@@ -916,9 +742,8 @@ services:
# Prometheus - Metrics collection (config baked into image)
prometheus:
build:
context: ../..
context: ..
dockerfile: deployment/config/prometheus/Dockerfile
image: deployment-prometheus:latest
container_name: metabuilder-prometheus
restart: unless-stopped
profiles: [monitoring]
@@ -929,7 +754,6 @@ services:
- '--storage.tsdb.path=/prometheus'
- '--storage.tsdb.retention.time=30d'
- '--web.enable-lifecycle'
- '--web.external-url=/prometheus/'
volumes:
- prometheus-data:/prometheus
healthcheck:
@@ -948,21 +772,18 @@ services:
# Grafana - Metrics visualization (config baked into image)
grafana:
build:
context: ../..
context: ..
dockerfile: deployment/config/grafana/Dockerfile
image: deployment-grafana:latest
container_name: metabuilder-grafana
restart: unless-stopped
profiles: [monitoring]
ports:
- "3014:3000"
- "3009:3000"
environment:
GF_SECURITY_ADMIN_USER: ${GRAFANA_ADMIN_USER:-admin}
GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_ADMIN_PASSWORD:-admin}
GF_INSTALL_PLUGINS: grafana-clock-panel,grafana-simple-json-datasource
GF_AUTH_ANONYMOUS_ENABLED: "false"
GF_SERVER_ROOT_URL: "%(protocol)s://%(domain)s/grafana/"
GF_SERVER_SERVE_FROM_SUB_PATH: "true"
volumes:
- grafana-data:/var/lib/grafana
depends_on:
@@ -984,9 +805,8 @@ services:
# Loki - Log aggregation (config baked into image)
loki:
build:
context: ../..
context: ..
dockerfile: deployment/config/loki/Dockerfile
image: deployment-loki:latest
container_name: metabuilder-loki
restart: unless-stopped
profiles: [monitoring]
@@ -1009,9 +829,8 @@ services:
# Promtail - Log shipper (config baked into image; host log paths are system mounts)
promtail:
build:
context: ../..
context: ..
dockerfile: deployment/config/promtail/Dockerfile
image: deployment-promtail:latest
container_name: metabuilder-promtail
restart: unless-stopped
profiles: [monitoring]
@@ -1113,28 +932,6 @@ services:
cpus: '0.25'
memory: 128M
# Starlink Exporter - Starlink dish metrics
starlink-exporter:
image: danopstech/starlink_exporter:latest
container_name: metabuilder-starlink-exporter
restart: unless-stopped
profiles: [monitoring]
ports:
- "9817:9817"
healthcheck:
test: ["CMD-SHELL", "wget --quiet --tries=1 --spider http://127.0.0.1:9817/metrics || exit 1"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
networks:
- metabuilder
deploy:
resources:
limits:
cpus: '0.25'
memory: 128M
# cAdvisor - Container metrics
cadvisor:
image: gcr.io/cadvisor/cadvisor:latest
@@ -1167,9 +964,8 @@ services:
# Alertmanager - Alert routing (config baked into image)
alertmanager:
build:
context: ../..
context: ..
dockerfile: deployment/config/alertmanager/Dockerfile
image: deployment-alertmanager:latest
container_name: metabuilder-alertmanager
restart: unless-stopped
profiles: [monitoring]
@@ -1200,9 +996,8 @@ services:
# Media Processing Daemon - FFmpeg, Radio, Retro Gaming
media-daemon:
build:
context: ../../services/media_daemon
context: ../services/media_daemon
dockerfile: Dockerfile
image: deployment-media-daemon:latest
container_name: metabuilder-media-daemon
restart: unless-stopped
profiles: [media]
@@ -1266,9 +1061,8 @@ services:
# HLS/DASH Streaming Server (config baked into image)
nginx-stream:
build:
context: ../..
context: ..
dockerfile: deployment/config/nginx/Dockerfile.stream
image: deployment-nginx-stream:latest
container_name: metabuilder-nginx-stream
restart: unless-stopped
profiles: [media]
@@ -1287,101 +1081,6 @@ services:
networks:
- metabuilder
# ============================================================================
# Package Registries (--profile registry) — Nexus + Artifactory
#
# Run with:
# docker compose -f compose.yml --profile registry up -d
# # Wait ~2 min for init containers, then:
# python3 deployment.py nexus push
# python3 deployment.py npm publish-patches
#
# Access:
# Nexus: http://localhost:8091 (admin / nexus)
# Artifactory: http://localhost:8092 (admin / password)
# ============================================================================
nexus:
image: sonatype/nexus3:3.75.0
platform: linux/amd64
profiles: [registry]
container_name: nexus-registry
restart: unless-stopped
ports:
- "8091:8081"
- "5050:5050"
volumes:
- nexus-data:/nexus-data
environment:
INSTALL4J_ADD_VM_PARAMS: "-Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g"
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:8081/service/rest/v1/status || exit 1"]
interval: 30s
timeout: 10s
retries: 15
start_period: 120s
networks:
- metabuilder
nexus-init:
image: python:3.12-alpine
profiles: [registry]
container_name: nexus-init
depends_on:
nexus:
condition: service_healthy
volumes:
- nexus-data:/nexus-data:ro
- ./nexus-init.py:/nexus-init.py:ro
entrypoint: ["/bin/sh", "-c", "pip install -q requests && python3 /nexus-init.py"]
environment:
NEXUS_URL: "http://nexus:8081"
NEXUS_ADMIN_NEW_PASS: "nexus"
DOCKER_REPO_PORT: "5050"
restart: "no"
networks:
- metabuilder
artifactory:
image: releases-docker.jfrog.io/jfrog/artifactory-cpp-ce:latest
profiles: [registry]
container_name: artifactory-ce
restart: unless-stopped
ports:
- "8092:8081"
- "8093:8082"
volumes:
- artifactory-data:/var/opt/jfrog/artifactory
environment:
JF_SHARED_DATABASE_TYPE: derby
JF_SHARED_DATABASE_ALLOWNONPOSTGRESQL: "true"
EXTRA_JAVA_OPTIONS: "-Xms512m -Xmx2g"
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:8082/router/api/v1/system/health || exit 1"]
interval: 30s
timeout: 10s
retries: 15
start_period: 120s
networks:
- metabuilder
artifactory-init:
image: python:3.12-alpine
profiles: [registry]
container_name: artifactory-init
depends_on:
artifactory:
condition: service_healthy
volumes:
- ./artifactory-init.py:/artifactory-init.py:ro
entrypoint: ["/bin/sh", "-c", "pip install -q requests && python3 /artifactory-init.py"]
environment:
ARTIFACTORY_URL: "http://artifactory:8081"
ARTIFACTORY_ADMIN_PASS: "password"
restart: "no"
networks:
- metabuilder
# ============================================================================
# Volumes
# ============================================================================
@@ -1442,13 +1141,6 @@ volumes:
driver: local
pastebin-backend-data:
driver: local
packagerepo-data:
driver: local
# Registry
nexus-data:
name: nexus-data
artifactory-data:
name: artifactory-data
# ============================================================================
# Networks

View File

@@ -0,0 +1,53 @@
# docker-compose.test.yml — Lightweight DB containers for DBAL integration tests.
#
# DEPRECATED: testcontainers-sidecar now starts/stops containers automatically.
# Keep as fallback for environments without Docker daemon access (e.g. some CI configs).
# Preferred: build with BUILD_INTEGRATION_TESTS=ON and let testcontainers manage lifecycle.
#
# Uses non-conflicting ports (5433, 3307) so it can run alongside the dev stack.
# tmpfs mounts make containers fast and ephemeral (data discarded on stop).
#
# Usage:
# docker compose -f deployment/docker-compose.test.yml up -d
# export DBAL_TEST_POSTGRES_URL=postgresql://testuser:testpass@localhost:5433/dbal_test
# export DBAL_TEST_MYSQL_URL=mysql://root:testpass@localhost:3307/dbal_test
# cd dbal/production/_build && ctest -R dbal_integration_tests --output-on-failure
# docker compose -f deployment/docker-compose.test.yml down
services:
postgres-test:
image: postgres:16-alpine
container_name: dbal-test-postgres
environment:
POSTGRES_PASSWORD: testpass
POSTGRES_USER: testuser
POSTGRES_DB: dbal_test
ports:
- "5433:5432" # 5433 avoids conflict with dev stack on 5432
tmpfs:
- /var/lib/postgresql/data # in-memory storage, instant teardown
healthcheck:
test: ["CMD-SHELL", "pg_isready -U testuser -d dbal_test"]
interval: 2s
timeout: 5s
retries: 10
mysql-test:
image: mysql:8-oracle
container_name: dbal-test-mysql
environment:
MYSQL_ROOT_PASSWORD: testpass
MYSQL_DATABASE: dbal_test
MYSQL_USER: testuser
MYSQL_PASSWORD: testpass
ports:
- "3307:3306" # 3307 avoids conflict with dev stack on 3306
tmpfs:
- /var/lib/mysql # in-memory storage, instant teardown
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost", "-u", "root", "-ptestpass"]
interval: 5s
timeout: 10s
retries: 10
start_period: 20s

View File

@@ -1,143 +0,0 @@
#!/usr/bin/env python3
"""One-shot Artifactory CE initialisation — runs inside the artifactory-init container.
Creates Conan2 local + remote + virtual repositories via YAML config API.
NOTE: The JSON REST repository API (PUT /api/repositories/) requires Pro license.
Artifactory CE supports YAML config patching instead:
PATCH /api/system/configuration (Content-Type: application/yaml)
"""
import os
import sys
import time
import requests
ARTIFACTORY_URL = os.environ.get("ARTIFACTORY_URL", "http://artifactory:8081")
ADMIN_PASS = os.environ.get("ARTIFACTORY_ADMIN_PASS", "password")
API = f"{ARTIFACTORY_URL}/artifactory/api"
REPO_CONFIGS = [
("conan-local", """\
localRepositories:
conan-local:
key: conan-local
type: conan
packageType: conan
description: "Local Conan2 repository for private packages"
repoLayoutRef: conan-default
handleReleases: true
handleSnapshots: false"""),
("conan-remote", """\
remoteRepositories:
conan-remote:
key: conan-remote
type: conan
packageType: conan
url: "https://center2.conan.io"
description: "Proxy cache for Conan Center"
repoLayoutRef: conan-default
handleReleases: true
handleSnapshots: false"""),
("generic-local", """\
localRepositories:
generic-local:
key: generic-local
type: generic
packageType: generic
description: "Generic artifact storage"
repoLayoutRef: simple-default
handleReleases: true
handleSnapshots: false"""),
("conan-virtual", """\
virtualRepositories:
conan-virtual:
key: conan-virtual
type: conan
packageType: conan
description: "Virtual Conan2 repo - local packages + ConanCenter cache"
repositories:
- conan-local
- conan-remote
defaultDeploymentRepo: conan-local"""),
]
def log(msg: str) -> None:
print(f"[artifactory-init] {msg}", flush=True)
def main() -> int:
auth = ("admin", ADMIN_PASS)
# Wait for Artifactory API to be ready
log("Waiting for Artifactory API...")
ready = False
for _ in range(30):
try:
r = requests.get(f"{API}/system/ping", auth=auth, timeout=5)
if r.status_code == 200:
ready = True
break
except requests.ConnectionError:
pass
time.sleep(2)
if not ready:
log("ERROR: Artifactory API not ready after 60s")
return 1
log("Artifactory API is ready")
# Check existing repos
try:
existing = requests.get(f"{API}/repositories", auth=auth, timeout=10).text
except requests.RequestException:
existing = "[]"
for repo_name, yaml_body in REPO_CONFIGS:
if f'"{repo_name}"' in existing:
log(f"{repo_name} already exists, skipping")
continue
r = requests.patch(f"{API}/system/configuration", auth=auth,
headers={"Content-Type": "application/yaml"},
data=yaml_body, timeout=30)
if r.status_code == 200:
log(f"Create {repo_name}{r.text.strip()}")
else:
log(f"ERROR: {repo_name} returned HTTP {r.status_code}: {r.text.strip()}")
return 1
# Verify repos are accessible
log("Verifying repositories...")
for repo_name in ["conan-local", "conan-remote", "conan-virtual", "generic-local"]:
try:
r = requests.get(f"{ARTIFACTORY_URL}/artifactory/{repo_name}/",
auth=auth, timeout=5)
indicator = "ok" if r.status_code == 200 else f"WARN (HTTP {r.status_code})"
except requests.RequestException:
indicator = "WARN (unreachable)"
log(f" {indicator} {repo_name}")
log("")
log("=" * 38)
log(" Artifactory CE ready!")
log(f" Web UI : http://localhost:8092")
log(f" Login : admin / {ADMIN_PASS}")
log("")
log(" Conan2 repos:")
log(f" Local : {ARTIFACTORY_URL}/artifactory/api/conan/conan-local")
log(f" Remote : {ARTIFACTORY_URL}/artifactory/api/conan/conan-remote")
log(f" Virtual : {ARTIFACTORY_URL}/artifactory/api/conan/conan-virtual")
log("")
log(" Conan client setup:")
log(" conan remote add artifactory http://localhost:8092/artifactory/api/conan/conan-virtual")
log(f" conan remote login artifactory admin -p {ADMIN_PASS}")
log("=" * 38)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,124 +0,0 @@
#!/usr/bin/env python3
"""One-shot Nexus initialisation — runs inside the nexus-init container.
Creates Docker + npm repositories. Conan2 is handled by Artifactory CE.
"""
import os
import sys
import requests
NEXUS_URL = os.environ.get("NEXUS_URL", "http://nexus:8081")
NEW_PASS = os.environ.get("NEXUS_ADMIN_NEW_PASS", "nexus")
DOCKER_PORT = int(os.environ.get("DOCKER_REPO_PORT", "5000"))
PASS_FILE = "/nexus-data/admin.password"
API = f"{NEXUS_URL}/service/rest/v1"
def log(msg: str) -> None:
print(f"[nexus-init] {msg}", flush=True)
def create_repo(repo_type: str, label: str, body: dict, auth: tuple) -> bool:
r = requests.post(f"{API}/repositories/{repo_type}", json=body, auth=auth)
if r.status_code == 201:
log(f"{label} repo created")
return True
if r.status_code == 400:
log(f"{label} repo already exists, skipping")
return True
log(f"ERROR: {label} repo creation returned HTTP {r.status_code}")
return False
def main() -> int:
auth = ("admin", NEW_PASS)
# Resolve admin password (idempotent across multiple runs)
r = requests.get(f"{API}/status", auth=auth)
if r.status_code == 200:
log(f"Already initialised with password '{NEW_PASS}'")
elif os.path.exists(PASS_FILE):
with open(PASS_FILE) as f:
init_pass = f.read().strip()
log("First run: changing admin password...")
r = requests.put(f"{API}/security/users/admin/change-password",
auth=("admin", init_pass),
headers={"Content-Type": "text/plain"}, data=NEW_PASS)
if r.status_code == 204:
log(f"Admin password set to '{NEW_PASS}'")
else:
log(f"ERROR: password change returned HTTP {r.status_code}")
return 1
else:
log("ERROR: cannot authenticate — is NEXUS_ADMIN_NEW_PASS correct?")
return 1
# Enable anonymous pull access
requests.put(f"{API}/security/anonymous", json={
"enabled": True, "userId": "anonymous", "realmName": "NexusAuthorizingRealm",
}, auth=auth)
log("Anonymous access enabled")
# Enable Docker + npm Bearer Token realms
requests.put(f"{API}/security/realms/active", auth=auth,
json=["NexusAuthenticatingRealm", "DockerToken", "NpmToken"])
log("Docker + npm Bearer Token realms enabled")
# Docker hosted repository
if not create_repo("docker/hosted", "Docker 'local'", {
"name": "local", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True,
"writePolicy": "allow"},
"docker": {"v1Enabled": False, "forceBasicAuth": False, "httpPort": DOCKER_PORT},
}, auth):
return 1
# npm hosted
if not create_repo("npm/hosted", "npm-hosted", {
"name": "npm-hosted", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True,
"writePolicy": "allow_once"},
}, auth):
return 1
# npm proxy (caches npmjs.org)
if not create_repo("npm/proxy", "npm-proxy", {
"name": "npm-proxy", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True},
"proxy": {"remoteUrl": "https://registry.npmjs.org",
"contentMaxAge": 1440, "metadataMaxAge": 1440},
"httpClient": {"blocked": False, "autoBlock": True},
"negativeCache": {"enabled": True, "timeToLive": 1440},
}, auth):
return 1
# npm group (hosted wins, proxy fallback)
if not create_repo("npm/group", "npm-group", {
"name": "npm-group", "online": True,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": True},
"group": {"memberNames": ["npm-hosted", "npm-proxy"]},
}, auth):
return 1
log("")
log("=" * 46)
log(" Nexus ready!")
log(f" Registry : localhost:{DOCKER_PORT}")
log(f" Web UI : http://localhost:8091")
log(f" Login : admin / {NEW_PASS}")
log("")
log(f" npm group URL: {NEXUS_URL}/repository/npm-group/")
log(f" npm hosted URL: {NEXUS_URL}/repository/npm-hosted/")
log("")
log(" Next steps:")
log(" python3 deployment.py nexus push (Docker images)")
log(" Conan2 is on Artifactory CE (port 8092)")
log(" python3 deployment.py npm publish-patches (Patched npm packages)")
log("=" * 46)
return 0
if __name__ == "__main__":
sys.exit(main())

88
deployment/nexus-ci-init.sh Executable file
View File

@@ -0,0 +1,88 @@
#!/bin/sh
# Lightweight Nexus initialisation for CI — npm repos only (no Docker, no Artifactory).
# Full local dev setup uses nexus-init.sh via docker compose.
set -e
NEXUS_URL="${NEXUS_URL:-http://localhost:8091}"
NEW_PASS="${NEXUS_ADMIN_NEW_PASS:-nexus}"
PASS_FILE="/tmp/nexus-data/admin.password"
log() { echo "[nexus-ci-init] $*"; }
# ── Resolve admin password ──────────────────────────────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" \
"$NEXUS_URL/service/rest/v1/status" -u "admin:$NEW_PASS")
if [ "$HTTP" = "200" ]; then
log "Already initialised with password '$NEW_PASS'"
elif [ -f "$PASS_FILE" ]; then
INIT_PASS=$(cat "$PASS_FILE")
log "First run: setting admin password..."
curl -sf -X PUT \
"$NEXUS_URL/service/rest/v1/security/users/admin/change-password" \
-u "admin:$INIT_PASS" -H "Content-Type: text/plain" -d "$NEW_PASS"
log "Admin password set"
else
log "ERROR: cannot authenticate and no password file found"
exit 1
fi
AUTH="admin:$NEW_PASS"
# ── Enable anonymous access ────────────────────────────────────────────────
curl -sf -X PUT "$NEXUS_URL/service/rest/v1/security/anonymous" \
-u "$AUTH" -H "Content-Type: application/json" \
-d '{"enabled":true,"userId":"anonymous","realmName":"NexusAuthorizingRealm"}' || true
log "Anonymous access enabled"
# Enable npm token realm
curl -sf -X PUT "$NEXUS_URL/service/rest/v1/security/realms/active" \
-u "$AUTH" -H "Content-Type: application/json" \
-d '["NexusAuthenticatingRealm","NpmToken"]' || true
# ── npm-hosted (patched packages) ─────────────────────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/npm/hosted" \
-u "$AUTH" -H "Content-Type: application/json" -d '{
"name": "npm-hosted",
"online": true,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": true, "writePolicy": "allow"}
}')
case "$HTTP" in
201) log "npm-hosted repo created" ;;
400) log "npm-hosted repo already exists" ;;
*) log "ERROR creating npm-hosted: HTTP $HTTP"; exit 1 ;;
esac
# ── npm-proxy (npmjs.org cache) ───────────────────────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/npm/proxy" \
-u "$AUTH" -H "Content-Type: application/json" -d '{
"name": "npm-proxy",
"online": true,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": true},
"proxy": {"remoteUrl": "https://registry.npmjs.org", "contentMaxAge": 1440, "metadataMaxAge": 1440},
"httpClient": {"blocked": false, "autoBlock": true},
"negativeCache": {"enabled": true, "timeToLive": 1440}
}')
case "$HTTP" in
201) log "npm-proxy repo created" ;;
400) log "npm-proxy repo already exists" ;;
*) log "ERROR creating npm-proxy: HTTP $HTTP"; exit 1 ;;
esac
# ── npm-group (combines hosted + proxy) ──────────────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/npm/group" \
-u "$AUTH" -H "Content-Type: application/json" -d '{
"name": "npm-group",
"online": true,
"storage": {"blobStoreName": "default", "strictContentTypeValidation": true},
"group": {"memberNames": ["npm-hosted", "npm-proxy"]}
}')
case "$HTTP" in
201) log "npm-group repo created" ;;
400) log "npm-group repo already exists" ;;
*) log "ERROR creating npm-group: HTTP $HTTP"; exit 1 ;;
esac
log "Nexus CI init complete"

177
deployment/nexus-init.sh Executable file
View File

@@ -0,0 +1,177 @@
#!/bin/sh
# One-shot Nexus initialisation — runs inside the nexus-init container.
# Creates Docker + npm repositories. Conan2 is handled by Artifactory CE.
set -e
NEXUS_URL="${NEXUS_URL:-http://nexus:8081}"
NEW_PASS="${NEXUS_ADMIN_NEW_PASS:-nexus}"
DOCKER_PORT="${DOCKER_REPO_PORT:-5000}"
PASS_FILE="/nexus-data/admin.password"
log() { echo "[nexus-init] $*"; }
# ── Resolve admin password (idempotent across multiple runs) ─────────────────
# On first boot Nexus writes a random password to admin.password, then deletes
# it once changed. On re-runs the file is gone — try NEW_PASS directly.
# Try the desired password first (idempotent re-runs)
HTTP=$(curl -s -o /dev/null -w "%{http_code}" \
"$NEXUS_URL/service/rest/v1/status" -u "admin:$NEW_PASS")
if [ "$HTTP" = "200" ]; then
log "Already initialised, continuing with password '$NEW_PASS'"
elif [ -f "$PASS_FILE" ]; then
INIT_PASS=$(cat "$PASS_FILE")
log "First run: changing admin password..."
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X PUT \
"$NEXUS_URL/service/rest/v1/security/users/admin/change-password" \
-u "admin:$INIT_PASS" -H "Content-Type: text/plain" -d "$NEW_PASS")
case "$HTTP" in
204) log "Admin password set to '$NEW_PASS'" ;;
*) log "ERROR: password change returned HTTP $HTTP"; exit 1 ;;
esac
else
log "ERROR: cannot authenticate — is NEXUS_ADMIN_NEW_PASS correct?"
exit 1
fi
AUTH="admin:$NEW_PASS"
# ── Enable anonymous pull access ─────────────────────────────────────────────
curl -sf -X PUT "$NEXUS_URL/service/rest/v1/security/anonymous" \
-u "$AUTH" -H "Content-Type: application/json" \
-d '{"enabled":true,"userId":"anonymous","realmName":"NexusAuthorizingRealm"}'
log "Anonymous access enabled"
# ── Enable Docker Bearer Token realm ─────────────────────────────────────────
# Valid IDs: NexusAuthenticatingRealm, DockerToken, ConanToken, NpmToken, etc.
# NexusAuthorizingRealm is always-on and NOT configurable via this API.
curl -sf -X PUT "$NEXUS_URL/service/rest/v1/security/realms/active" \
-u "$AUTH" -H "Content-Type: application/json" \
-d '["NexusAuthenticatingRealm","DockerToken"]'
log "Docker Bearer Token realm enabled"
# ── Create Docker hosted repository ─────────────────────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/docker/hosted" \
-u "$AUTH" -H "Content-Type: application/json" -d "$(cat <<JSON
{
"name": "local",
"online": true,
"storage": {
"blobStoreName": "default",
"strictContentTypeValidation": true,
"writePolicy": "allow"
},
"docker": {
"v1Enabled": false,
"forceBasicAuth": false,
"httpPort": $DOCKER_PORT
}
}
JSON
)")
case "$HTTP" in
201) log "Docker hosted repo 'local' created on port $DOCKER_PORT" ;;
400) log "Repo 'local' already exists, skipping" ;;
*) log "ERROR: repo creation returned HTTP $HTTP"; exit 1 ;;
esac
# ── Enable npm Bearer Token realm ──────────────────────────────────────────
# Note: Conan2 is handled by Artifactory CE — not Nexus
curl -sf -X PUT "$NEXUS_URL/service/rest/v1/security/realms/active" \
-u "$AUTH" -H "Content-Type: application/json" \
-d '["NexusAuthenticatingRealm","DockerToken","NpmToken"]'
log "npm Bearer Token realm enabled"
# ── Create npm hosted repository ──────────────────────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/npm/hosted" \
-u "$AUTH" -H "Content-Type: application/json" -d "$(cat <<JSON
{
"name": "npm-hosted",
"online": true,
"storage": {
"blobStoreName": "default",
"strictContentTypeValidation": true,
"writePolicy": "allow_once"
}
}
JSON
)")
case "$HTTP" in
201) log "npm hosted repo 'npm-hosted' created" ;;
400) log "npm repo 'npm-hosted' already exists, skipping" ;;
*) log "ERROR: npm hosted repo creation returned HTTP $HTTP"; exit 1 ;;
esac
# ── Create npm proxy repository (caches npmjs.org) ────────────────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/npm/proxy" \
-u "$AUTH" -H "Content-Type: application/json" -d "$(cat <<JSON
{
"name": "npm-proxy",
"online": true,
"storage": {
"blobStoreName": "default",
"strictContentTypeValidation": true
},
"proxy": {
"remoteUrl": "https://registry.npmjs.org",
"contentMaxAge": 1440,
"metadataMaxAge": 1440
},
"negativeCache": {
"enabled": true,
"timeToLive": 1440
},
"httpClient": {
"blocked": false,
"autoBlock": true
}
}
JSON
)")
case "$HTTP" in
201) log "npm proxy repo 'npm-proxy' created" ;;
400) log "npm repo 'npm-proxy' already exists, skipping" ;;
*) log "ERROR: npm proxy repo creation returned HTTP $HTTP"; exit 1 ;;
esac
# ── Create npm group repository (hosted wins, proxy fallback) ─────────────
HTTP=$(curl -s -o /dev/null -w "%{http_code}" -X POST \
"$NEXUS_URL/service/rest/v1/repositories/npm/group" \
-u "$AUTH" -H "Content-Type: application/json" -d "$(cat <<JSON
{
"name": "npm-group",
"online": true,
"storage": {
"blobStoreName": "default",
"strictContentTypeValidation": true
},
"group": {
"memberNames": ["npm-hosted", "npm-proxy"]
}
}
JSON
)")
case "$HTTP" in
201) log "npm group repo 'npm-group' created" ;;
400) log "npm repo 'npm-group' already exists, skipping" ;;
*) log "ERROR: npm group repo creation returned HTTP $HTTP"; exit 1 ;;
esac
log ""
log "══════════════════════════════════════════"
log " Nexus ready!"
log " Registry : localhost:$DOCKER_PORT"
log " Web UI : http://localhost:8091"
log " Login : admin / $NEW_PASS"
log ""
log " npm group URL: $NEXUS_URL/repository/npm-group/"
log " npm hosted URL: $NEXUS_URL/repository/npm-hosted/"
log ""
log " Next steps:"
log " cd deployment && ./push-to-nexus.sh (Docker images)"
log " Conan2 is on Artifactory CE (port 8092)"
log " cd deployment && ./publish-npm-patches.sh (Patched npm packages)"
log "══════════════════════════════════════════"

127
deployment/populate-nexus.sh Executable file
View File

@@ -0,0 +1,127 @@
#!/usr/bin/env bash
# Push all locally-built MetaBuilder images to local Nexus registry.
# Tags each image as both :main and :latest at localhost:5050/<owner>/<repo>/<name>.
#
# Usage:
# ./populate-nexus.sh [--skip-heavy]
#
# --skip-heavy skip base-conan-deps (32 GB), devcontainer (41 GB), media-daemon (3.5 GB)
set -euo pipefail
NEXUS="localhost:5050"
SLUG="johndoe6345789/metabuilder-small"
NEXUS_USER="admin"
NEXUS_PASS="nexus"
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; BLUE='\033[0;34m'; NC='\033[0m'
SKIP_HEAVY=false
[[ "${1:-}" == "--skip-heavy" ]] && SKIP_HEAVY=true
log() { echo -e "${BLUE}[nexus]${NC} $*"; }
ok() { echo -e "${GREEN}[nexus]${NC} $*"; }
warn() { echo -e "${YELLOW}[nexus]${NC} $*"; }
err() { echo -e "${RED}[nexus]${NC} $*"; }
# ── Login ────────────────────────────────────────────────────────────────────
log "Logging in to $NEXUS..."
echo "$NEXUS_PASS" | docker login "$NEXUS" -u "$NEXUS_USER" --password-stdin
# ── Image map: local_tag → nexus_name ────────────────────────────────────────
# Format: "local_image|nexus_name|size_hint"
#
# Base images (metabuilder/* prefix, built by build-base-images.sh)
declare -a BASE_IMAGES=(
"metabuilder/base-apt:latest|base-apt|2.8GB"
"metabuilder/base-node-deps:latest|base-node-deps|5.5GB"
"metabuilder/base-pip-deps:latest|base-pip-deps|1.4GB"
"metabuilder/base-android-sdk:latest|base-android-sdk|6.1GB"
)
# Heavy base images — pushed last (or skipped with --skip-heavy)
declare -a HEAVY_IMAGES=(
"metabuilder/base-conan-deps:latest|base-conan-deps|32GB"
"metabuilder/devcontainer:latest|devcontainer|41GB"
)
# App images (deployment-* prefix, built by docker-compose)
declare -a APP_IMAGES=(
"deployment-dbal-init:latest|dbal-init|12MB"
"deployment-storybook:latest|storybook|112MB"
"deployment-nginx:latest|nginx|92MB"
"deployment-nginx-stream:latest|nginx-stream|92MB"
"deployment-pastebin-backend:latest|pastebin-backend|236MB"
"deployment-emailclient-app:latest|emailclient|350MB"
"deployment-email-service:latest|email-service|388MB"
"deployment-exploded-diagrams:latest|exploded-diagrams|315MB"
"deployment-pastebin:latest|pastebin|382MB"
"deployment-frontend-app:latest|frontend-app|361MB"
"deployment-workflowui:latest|workflowui|542MB"
"deployment-postgres-dashboard:latest|postgres-dashboard|508MB"
"deployment-smtp-relay:latest|smtp-relay|302MB"
"deployment-dbal:latest|dbal|3.0GB"
"deployment-codegen:latest|codegen|5.6GB"
)
declare -a HEAVY_APP_IMAGES=(
"deployment-media-daemon:latest|media-daemon|3.5GB"
)
# ── Push function ─────────────────────────────────────────────────────────────
pushed=0; skipped=0; failed=0
push_image() {
local src="$1" name="$2" size="$3"
# Check source exists
if ! docker image inspect "$src" &>/dev/null; then
warn "SKIP $name$src not found locally"
((skipped++)) || true
return
fi
local dst_main="$NEXUS/$SLUG/$name:main"
local dst_latest="$NEXUS/$SLUG/$name:latest"
log "Pushing $name ($size)..."
docker tag "$src" "$dst_main"
docker tag "$src" "$dst_latest"
if docker push "$dst_main" && docker push "$dst_latest"; then
ok "$name → :main + :latest"
((pushed++)) || true
else
err "$name FAILED"
((failed++)) || true
fi
}
# ── Execute ──────────────────────────────────────────────────────────────────
echo ""
log "Registry : $NEXUS"
log "Slug : $SLUG"
log "Skip heavy: $SKIP_HEAVY"
echo ""
for entry in "${BASE_IMAGES[@]}"; do IFS='|' read -r src name size <<< "$entry"; push_image "$src" "$name" "$size"; done
for entry in "${APP_IMAGES[@]}"; do IFS='|' read -r src name size <<< "$entry"; push_image "$src" "$name" "$size"; done
if $SKIP_HEAVY; then
warn "Skipping heavy images (--skip-heavy set):"
for entry in "${HEAVY_IMAGES[@]}" "${HEAVY_APP_IMAGES[@]}"; do
IFS='|' read -r src name size <<< "$entry"; warn " $name ($size)"
done
else
log "--- Heavy images (this will take a while) ---"
for entry in "${HEAVY_APP_IMAGES[@]}"; do IFS='|' read -r src name size <<< "$entry"; push_image "$src" "$name" "$size"; done
for entry in "${HEAVY_IMAGES[@]}"; do IFS='|' read -r src name size <<< "$entry"; push_image "$src" "$name" "$size"; done
fi
echo ""
echo -e "${GREEN}══════════════════════════════════════════${NC}"
echo -e "${GREEN} Done. pushed=$pushed skipped=$skipped failed=$failed${NC}"
echo -e "${GREEN}══════════════════════════════════════════${NC}"
echo ""
echo -e "Browse: http://localhost:8091 (admin/nexus → Browse → docker/local)"
echo -e "Use: act push -j <job> --artifact-server-path /tmp/act-artifacts --env REGISTRY=localhost:5050"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -1,10 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32">
<defs>
<linearGradient id="bg" x1="0" y1="0" x2="1" y2="1">
<stop offset="0%" stop-color="#60a5fa"/>
<stop offset="100%" stop-color="#a78bfa"/>
</linearGradient>
</defs>
<rect width="32" height="32" rx="6" fill="url(#bg)"/>
<text x="16" y="22" text-anchor="middle" font-family="system-ui,sans-serif" font-weight="700" font-size="20" fill="#fff">M</text>
</svg>

Before

Width:  |  Height:  |  Size: 450 B

View File

@@ -3,7 +3,6 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="icon" type="image/svg+xml" href="/portal/favicon.svg">
<title>MetaBuilder</title>
<style>
* { margin: 0; padding: 0; box-sizing: border-box; }
@@ -185,51 +184,6 @@
<p>Main Next.js application with Drizzle ORM, i18n, Clerk auth, and DBAL integration.</p>
<div class="path">/app</div>
</a>
<a href="/dbal" class="card">
<div class="card-header">
<div class="card-icon bg-blue">DB</div>
<h2>DBAL Frontend</h2>
</div>
<p>Daemon overview, architecture docs, server status, and interactive REST query console.</p>
<div class="path">/dbal</div>
</a>
<a href="/terminal" class="card">
<div class="card-header">
<div class="card-icon bg-green">DT</div>
<h2>Docker Terminal</h2>
</div>
<p>Interactive web terminal for Docker containers with real-time status monitoring.</p>
<div class="path">/terminal</div>
</a>
<a href="/packagerepo" class="card">
<div class="card-header">
<div class="card-icon bg-purple">PR</div>
<h2>Package Repo</h2>
</div>
<p>Schema-driven package repository with content-addressed storage and admin interface.</p>
<div class="path">/packagerepo</div>
</a>
<a href="/repoforge" class="card">
<div class="card-header">
<div class="card-icon bg-cyan">RF</div>
<h2>RepoForge</h2>
</div>
<p>Mobile Git repository manager for GitHub and GitLab. Download the Android APK.</p>
<div class="path">/repoforge</div>
</a>
<a href="/caproverforge" class="card">
<div class="card-header">
<div class="card-icon bg-orange">CF</div>
<h2>CaproverForge</h2>
</div>
<p>Mobile admin panel for CapRover. Deploy apps and manage domains from your phone.</p>
<div class="path">/caproverforge</div>
</a>
</div>
<div class="section-label">Backend Services</div>
@@ -309,26 +263,5 @@
<div class="path">/kibana/</div>
</a>
</div>
<div class="section-label">Monitoring (--profile monitoring)</div>
<div class="grid">
<a href="/grafana/" class="card">
<div class="card-header">
<div class="card-icon bg-orange">GF</div>
<h2>Grafana</h2>
</div>
<p>Metrics visualization with DBAL, infrastructure, and Starlink dashboards.</p>
<div class="path">/grafana/</div>
</a>
<a href="/prometheus/" class="card">
<div class="card-header">
<div class="card-icon bg-red">PM</div>
<h2>Prometheus</h2>
</div>
<p>Metrics collection, alerting rules, and target status for all MetaBuilder services.</p>
<div class="path">/prometheus/</div>
</a>
</div>
</body>
</html>

166
deployment/publish-npm-patches.sh Executable file
View File

@@ -0,0 +1,166 @@
#!/usr/bin/env bash
# Publish patched npm packages to a local registry (Nexus or Verdaccio).
#
# These packages fix vulnerabilities in bundled transitive dependencies
# that npm overrides cannot reach (e.g. minimatch/tar inside the npm package).
#
# Prerequisites (choose one):
# Nexus: docker compose -f docker-compose.nexus.yml up -d
# Verdaccio: npx verdaccio --config deployment/verdaccio.yaml &
#
# Usage:
# ./publish-npm-patches.sh # auto-detect (Nexus first, Verdaccio fallback)
# ./publish-npm-patches.sh --verdaccio # force Verdaccio on :4873
# ./publish-npm-patches.sh --nexus # force Nexus on :8091
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; NC='\033[0m'
# Parse flags
USE_VERDACCIO=false
USE_NEXUS=false
for arg in "$@"; do
case "$arg" in
--verdaccio) USE_VERDACCIO=true ;;
--nexus) USE_NEXUS=true ;;
esac
done
# Auto-detect: try Nexus first, fall back to Verdaccio
if ! $USE_VERDACCIO && ! $USE_NEXUS; then
if curl -sf http://localhost:8091/service/rest/v1/status -u admin:nexus >/dev/null 2>&1; then
USE_NEXUS=true
else
USE_VERDACCIO=true
fi
fi
NEXUS_URL="${NEXUS_URL:-http://localhost:8091}"
NEXUS_NPM_HOSTED="${NEXUS_URL}/repository/npm-hosted/"
NEXUS_USER="${NEXUS_USER:-admin}"
NEXUS_PASS="${NEXUS_PASS:-nexus}"
VERDACCIO_URL="${VERDACCIO_URL:-http://localhost:4873}"
# Packages to patch — version must be the exact fixed version
PATCHES=(
"minimatch@10.2.4"
"tar@7.5.11"
)
# Pre-patched local packages (tarball already in deployment/npm-patches/)
# Format: "name@version:filename"
LOCAL_PATCHES=(
"@esbuild-kit/core-utils@3.3.3-metabuilder.0:esbuild-kit-core-utils-3.3.3-metabuilder.0.tgz"
)
WORK_DIR=$(mktemp -d)
trap 'rm -rf "$WORK_DIR"' EXIT
log() { echo -e "${GREEN}[npm-patch]${NC} $*"; }
warn() { echo -e "${YELLOW}[npm-patch]${NC} $*"; }
fail() { echo -e "${RED}[npm-patch]${NC} $*"; exit 1; }
NPM_RC="$WORK_DIR/.npmrc"
if $USE_NEXUS; then
log "Using Nexus at $NEXUS_URL..."
HTTP=$(curl -s -o /dev/null -w "%{http_code}" "$NEXUS_URL/service/rest/v1/status" -u "$NEXUS_USER:$NEXUS_PASS")
if [ "$HTTP" != "200" ]; then
fail "Cannot reach Nexus (HTTP $HTTP). Is it running?"
fi
log "Nexus is up"
NEXUS_AUTH=$(echo -n "$NEXUS_USER:$NEXUS_PASS" | base64)
cat > "$NPM_RC" <<EOF
//$(echo "$NEXUS_NPM_HOSTED" | sed 's|https\?://||'):_auth=$NEXUS_AUTH
EOF
PUBLISH_REGISTRY="$NEXUS_NPM_HOSTED"
PUBLISH_ARGS="--userconfig $NPM_RC"
else
log "Using Verdaccio at $VERDACCIO_URL..."
HTTP=$(curl -s -o /dev/null -w "%{http_code}" "$VERDACCIO_URL/-/ping")
if [ "$HTTP" != "200" ]; then
fail "Cannot reach Verdaccio (HTTP $HTTP). Start with: npx verdaccio --config deployment/verdaccio.yaml"
fi
log "Verdaccio is up"
cat > "$NPM_RC" <<EOF
registry=$VERDACCIO_URL/
//${VERDACCIO_URL#*://}/:_authToken=
EOF
PUBLISH_REGISTRY="$VERDACCIO_URL"
PUBLISH_ARGS="--registry $VERDACCIO_URL --userconfig $NPM_RC"
fi
published=0
skipped=0
PATCHES_DIR="$SCRIPT_DIR/npm-patches"
# Publish pre-patched local tarballs first
for entry in "${LOCAL_PATCHES[@]}"; do
pkg_spec="${entry%%:*}"
tarball_name="${entry##*:}"
pkg_name="${pkg_spec%%@*}"
# handle scoped packages like @scope/name
if [[ "$pkg_spec" == @* ]]; then
pkg_name="$(echo "$pkg_spec" | cut -d@ -f1-2 | tr -d '@')"
pkg_name="@${pkg_name}"
pkg_version="$(echo "$pkg_spec" | cut -d@ -f3)"
else
pkg_version="${pkg_spec##*@}"
fi
log "Processing local patch $pkg_name@$pkg_version..."
TARBALL="$PATCHES_DIR/$tarball_name"
if [ ! -f "$TARBALL" ]; then
fail " Patched tarball not found: $TARBALL"
fi
log " Publishing $tarball_name..."
if npm publish "$TARBALL" $PUBLISH_ARGS --tag patched 2>&1 | grep -v "^npm notice"; then
log " ${GREEN}Published${NC} $pkg_name@$pkg_version"
((published++)) || true
else
warn " $pkg_name@$pkg_version already exists or publish failed, skipping"
((skipped++)) || true
fi
done
for pkg_spec in "${PATCHES[@]}"; do
pkg_name="${pkg_spec%%@*}"
pkg_version="${pkg_spec##*@}"
log "Processing $pkg_name@$pkg_version..."
# Check if already published to Nexus
# Download from npmjs.org and publish to local registry
cd "$WORK_DIR"
TARBALL=$(npm pack "$pkg_spec" 2>/dev/null)
if [ ! -f "$TARBALL" ]; then
fail " Failed to download $pkg_spec"
fi
log " Publishing $TARBALL..."
if npm publish "$TARBALL" $PUBLISH_ARGS --tag patched 2>&1 | grep -v "^npm notice"; then
log " ${GREEN}Published${NC} $pkg_name@$pkg_version"
((published++)) || true
else
warn " $pkg_name@$pkg_version already exists or publish failed, skipping"
((skipped++)) || true
fi
rm -f "$TARBALL"
done
echo ""
log "Done. published=$published skipped=$skipped"
echo ""
if $USE_NEXUS; then
log "Nexus npm-group: ${NEXUS_URL}/repository/npm-group/"
else
log "Verdaccio registry: $VERDACCIO_URL"
fi

127
deployment/push-to-nexus.sh Executable file
View File

@@ -0,0 +1,127 @@
#!/usr/bin/env bash
# Push locally-built MetaBuilder images to the local Nexus registry.
#
# Re-tags ghcr.io/<owner>/<repo>/<image>:<tag> → localhost:5000/<owner>/<repo>/<image>:<tag>
# so act can use REGISTRY=localhost:5000 and pull from Nexus instead of GHCR.
#
# Usage:
# ./push-to-nexus.sh # push all images at current git ref
# ./push-to-nexus.sh --tag main # push with specific tag
# ./push-to-nexus.sh --src ghcr.io/... \ # pull from remote first, then push
# --pull
#
# Prerequisites:
# - Nexus running: docker compose -f docker-compose.nexus.yml up -d
# - localhost:5000 in Docker Desktop insecure-registries
# - Images already built locally (or use --pull to fetch from GHCR first)
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; NC='\033[0m'
LOCAL_REGISTRY="localhost:5050"
NEXUS_USER="admin"
NEXUS_PASS="nexus"
# Derive owner/repo from git remote (matches github.repository format)
REPO_SLUG=$(git -C "$SCRIPT_DIR/.." remote get-url origin 2>/dev/null \
| sed -E 's|.*github\.com[:/]([^/]+/[^/]+)(\.git)?$|\1|' \
| tr '[:upper:]' '[:lower:]')
REPO_SLUG="${REPO_SLUG:-johndoe6345789/metabuilder-small}"
SOURCE_REGISTRY="ghcr.io"
TAG=$(git -C "$SCRIPT_DIR/.." rev-parse --abbrev-ref HEAD 2>/dev/null || echo "main")
DO_PULL=false
# Parse args
while [[ $# -gt 0 ]]; do
case "$1" in
--tag) TAG="$2"; shift 2 ;;
--src) SOURCE_REGISTRY="$2"; shift 2 ;;
--pull) DO_PULL=true; shift ;;
-h|--help)
grep '^#' "$0" | sed 's/^# //' | sed 's/^#//'
exit 0 ;;
*) echo "Unknown arg: $1"; exit 1 ;;
esac
done
# Base images built by container-base-tier1/2/3
BASE_IMAGES=(
base-apt
base-node-deps
base-pip-deps
base-conan-deps
base-android-sdk
devcontainer
)
# App images built by container-build-apps
APP_IMAGES=(
pastebin
workflowui
codegen
postgres-dashboard
emailclient
exploded-diagrams
storybook
)
ALL_IMAGES=("${BASE_IMAGES[@]}" "${APP_IMAGES[@]}")
echo -e "${YELLOW}Registry:${NC} $LOCAL_REGISTRY"
echo -e "${YELLOW}Slug:${NC} $REPO_SLUG"
echo -e "${YELLOW}Tag:${NC} $TAG"
echo ""
# Log in to local Nexus
echo -e "${YELLOW}Logging in to $LOCAL_REGISTRY...${NC}"
echo "$NEXUS_PASS" | docker login "$LOCAL_REGISTRY" -u "$NEXUS_USER" --password-stdin
pushed=0
skipped=0
failed=0
for image in "${ALL_IMAGES[@]}"; do
src="$SOURCE_REGISTRY/$REPO_SLUG/$image:$TAG"
dst="$LOCAL_REGISTRY/$REPO_SLUG/$image:$TAG"
if $DO_PULL; then
echo -e " ${YELLOW}pulling${NC} $src..."
if ! docker pull "$src" 2>/dev/null; then
echo -e " ${YELLOW}skip${NC} $image (not found in $SOURCE_REGISTRY)"
((skipped++)) || true
continue
fi
fi
# Check image exists locally
if ! docker image inspect "$src" >/dev/null 2>&1; then
# Also check if it's already tagged for local registry
if ! docker image inspect "$dst" >/dev/null 2>&1; then
echo -e " ${YELLOW}skip${NC} $image (not found locally — build first or use --pull)"
((skipped++)) || true
continue
fi
# Already has local tag — just push it
else
docker tag "$src" "$dst"
fi
echo -e " ${GREEN}push${NC} $dst"
if docker push "$dst"; then
((pushed++)) || true
else
echo -e " ${RED}FAILED${NC} $image"
((failed++)) || true
fi
done
echo ""
echo -e "${GREEN}Done.${NC} pushed=$pushed skipped=$skipped failed=$failed"
echo ""
echo -e "Run act with:"
echo -e " act push -j <job> --artifact-server-path /tmp/act-artifacts \\"
echo -e " --env REGISTRY=localhost:5050"

87
deployment/release.sh Executable file
View File

@@ -0,0 +1,87 @@
#!/usr/bin/env bash
# Bump patch version, commit, push, and redeploy a MetaBuilder frontend.
#
# Usage:
# ./release.sh pastebin Bump patch (0.8.1 → 0.8.2)
# ./release.sh pastebin minor Bump minor (0.8.1 → 0.9.0)
# ./release.sh pastebin major Bump major (0.8.1 → 1.0.0)
# ./release.sh pastebin 1.2.3 Set exact version
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
COMPOSE_FILE="$SCRIPT_DIR/docker-compose.stack.yml"
APP="${1:-}"
BUMP="${2:-patch}"
if [[ -z "$APP" ]]; then
echo -e "${RED}Usage: $0 <app> [patch|minor|major|x.y.z]${NC}"
echo " Apps: pastebin, workflowui, codegen, emailclient, ..."
exit 1
fi
# Resolve package.json path
PKG_PATHS=(
"$REPO_ROOT/frontends/$APP/package.json"
"$REPO_ROOT/$APP/package.json"
)
PKG=""
for p in "${PKG_PATHS[@]}"; do
[[ -f "$p" ]] && PKG="$p" && break
done
if [[ -z "$PKG" ]]; then
echo -e "${RED}Cannot find package.json for '$APP'${NC}"
exit 1
fi
# Read current version
CURRENT=$(node -p "require('$PKG').version")
# Compute next version
if [[ "$BUMP" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
NEXT="$BUMP"
else
IFS='.' read -r MAJOR MINOR PATCH <<< "$CURRENT"
case "$BUMP" in
major) NEXT="$((MAJOR + 1)).0.0" ;;
minor) NEXT="${MAJOR}.$((MINOR + 1)).0" ;;
patch) NEXT="${MAJOR}.${MINOR}.$((PATCH + 1))" ;;
*)
echo -e "${RED}Unknown bump type '$BUMP'. Use patch, minor, major, or x.y.z${NC}"
exit 1
;;
esac
fi
echo -e "${CYAN}Releasing $APP: ${YELLOW}$CURRENT${CYAN}${GREEN}$NEXT${NC}"
# Update package.json
node -e "
const fs = require('fs');
const pkg = JSON.parse(fs.readFileSync('$PKG', 'utf8'));
pkg.version = '$NEXT';
fs.writeFileSync('$PKG', JSON.stringify(pkg, null, 2) + '\n');
"
# Commit and push
cd "$REPO_ROOT"
git add "$PKG"
git commit -m "chore: bump $APP to v$NEXT
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>"
git push origin main
echo -e "${CYAN}Building and deploying $APP...${NC}"
cd "$SCRIPT_DIR"
docker compose -f "$COMPOSE_FILE" up -d --build "$APP"
echo -e "${GREEN}$APP v$NEXT deployed${NC}"

314
deployment/start-stack.sh Executable file
View File

@@ -0,0 +1,314 @@
#!/bin/bash
# MetaBuilder Full Stack Startup Script
#
# Core: nginx gateway, PostgreSQL, MySQL, MongoDB, Redis, Elasticsearch,
# DBAL C++, WorkflowUI, CodeForge, Pastebin, Postgres Dashboard,
# Email Client, Exploded Diagrams, Storybook, Frontend App,
# Postfix, Dovecot, SMTP Relay, Email Service,
# phpMyAdmin, Mongo Express, RedisInsight, Kibana
# Monitoring: Prometheus, Grafana, Loki, Promtail, exporters, Alertmanager
# Media: Media daemon (FFmpeg/radio/retro), Icecast, HLS streaming
#
# Portal: http://localhost (nginx welcome page with links to all apps)
#
# Usage:
# ./start-stack.sh [COMMAND] [--monitoring] [--media] [--all]
set -e
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Pull a single image with exponential backoff retries.
# Skips silently if the image is already present and up-to-date.
pull_with_retry() {
local image="$1"
local max_attempts=5
local delay=5
for attempt in $(seq 1 $max_attempts); do
if docker pull "$image" 2>&1; then
return 0
fi
if [ "$attempt" -lt "$max_attempts" ]; then
echo -e "${YELLOW} Pull failed (attempt $attempt/$max_attempts), retrying in ${delay}s...${NC}"
sleep "$delay"
delay=$((delay * 2))
fi
done
echo -e "${RED} Failed to pull $image after $max_attempts attempts${NC}"
return 1
}
# Pull all external (non-built) images for the requested profiles.
# Built images (dbal, workflowui, etc.) are skipped — they're local.
pull_external_images() {
local profiles=("$@")
local core_images=(
"postgres:15-alpine"
"redis:7-alpine"
"docker.elastic.co/elasticsearch/elasticsearch:8.11.0"
"mysql:8.0"
"mongo:7.0"
"phpmyadmin:latest"
"mongo-express:latest"
"redis/redisinsight:latest"
"docker.elastic.co/kibana/kibana:8.11.0"
"boky/postfix:latest"
"nginx:alpine"
)
local monitoring_images=(
"prom/prometheus:latest"
"grafana/grafana:latest"
"grafana/loki:latest"
"grafana/promtail:latest"
"prom/node-exporter:latest"
"prometheuscommunity/postgres-exporter:latest"
"oliver006/redis_exporter:latest"
"gcr.io/cadvisor/cadvisor:latest"
"prom/alertmanager:latest"
)
local media_images=(
"libretime/icecast:2.4.4"
)
local images=("${core_images[@]}")
local want_monitoring=false
local want_media=false
for p in "${profiles[@]}"; do
[[ "$p" == "monitoring" ]] && want_monitoring=true
[[ "$p" == "media" ]] && want_media=true
done
$want_monitoring && images+=("${monitoring_images[@]}")
$want_media && images+=("${media_images[@]}")
local total=${#images[@]}
echo -e "${YELLOW}Pre-pulling $total external images (with retry on flaky connections)...${NC}"
local failed=0
for i in "${!images[@]}"; do
local img="${images[$i]}"
echo -e " [$(( i + 1 ))/$total] $img"
pull_with_retry "$img" || failed=$((failed + 1))
done
if [ "$failed" -gt 0 ]; then
echo -e "${RED}Warning: $failed image(s) failed to pull. Stack may be incomplete.${NC}"
else
echo -e "${GREEN}All images ready.${NC}"
fi
echo ""
}
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
COMPOSE_FILE="$SCRIPT_DIR/docker-compose.stack.yml"
# Parse arguments
COMMAND=""
PROFILES=()
for arg in "$@"; do
case "$arg" in
--monitoring) PROFILES+=("--profile" "monitoring") ;;
--media) PROFILES+=("--profile" "media") ;;
--all) PROFILES+=("--profile" "monitoring" "--profile" "media") ;;
*)
if [ -z "$COMMAND" ]; then
COMMAND="$arg"
fi
;;
esac
done
COMMAND=${COMMAND:-up}
# Check docker compose
if ! docker compose version &> /dev/null; then
echo -e "${RED}docker compose not found${NC}"
exit 1
fi
case "$COMMAND" in
up|start)
echo -e "${BLUE}Starting MetaBuilder stack...${NC}"
# Collect profile names for the image pre-pull
PROFILE_NAMES=()
for p in "${PROFILES[@]}"; do
[[ "$p" == "--profile" ]] && continue
PROFILE_NAMES+=("$p")
done
pull_external_images "${PROFILE_NAMES[@]}"
;;
down|stop)
echo -e "${YELLOW}Stopping MetaBuilder stack...${NC}"
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" down
echo -e "${GREEN}Stack stopped${NC}"
exit 0
;;
build)
echo -e "${YELLOW}Building MetaBuilder stack...${NC}"
PROFILE_NAMES=()
for p in "${PROFILES[@]}"; do
[[ "$p" == "--profile" ]] && continue
PROFILE_NAMES+=("$p")
done
pull_external_images "${PROFILE_NAMES[@]}"
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" up -d --build
echo -e "${GREEN}Stack built and started${NC}"
exit 0
;;
logs)
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" logs -f ${2:-}
exit 0
;;
restart)
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" restart
echo -e "${GREEN}Stack restarted${NC}"
exit 0
;;
ps|status)
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" ps
exit 0
;;
clean)
echo -e "${RED}This will remove all containers and volumes!${NC}"
read -p "Are you sure? (yes/no): " -r
if [[ $REPLY == "yes" ]]; then
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" down -v
echo -e "${GREEN}Stack cleaned${NC}"
fi
exit 0
;;
help|--help|-h)
echo "Usage: ./start-stack.sh [COMMAND] [--monitoring] [--media] [--all]"
echo ""
echo "Commands:"
echo " up, start Start the stack (default)"
echo " build Build and start the stack"
echo " down, stop Stop the stack"
echo " restart Restart all services"
echo " logs [svc] Show logs (optionally for specific service)"
echo " ps, status Show service status"
echo " clean Stop and remove all containers and volumes"
echo " help Show this help message"
echo ""
echo "Profiles:"
echo " --monitoring Add Prometheus, Grafana, Loki, exporters, Alertmanager"
echo " --media Add media daemon, Icecast, HLS streaming"
echo " --all Enable all profiles"
echo ""
echo "Core services (always started):"
echo " nginx 80 Gateway + welcome portal (http://localhost)"
echo " postgres 5432 PostgreSQL database"
echo " mysql 3306 MySQL database"
echo " mongodb 27017 MongoDB database"
echo " redis 6379 Cache layer"
echo " elasticsearch 9200 Search layer"
echo " dbal 8080 DBAL C++ backend"
echo " workflowui 3001 Visual workflow editor (/workflowui)"
echo " codegen 3002 CodeForge IDE (/codegen)"
echo " pastebin 3003 Code snippet sharing (/pastebin)"
echo " postgres-dashboard 3004 PostgreSQL admin (/postgres)"
echo " emailclient-app 3005 Email client (/emailclient)"
echo " exploded-diagrams 3006 3D diagram viewer (/diagrams)"
echo " storybook 3007 Component library (/storybook)"
echo " frontend-app 3008 Main application (/app)"
echo " phpmyadmin 8081 MySQL admin (/phpmyadmin/)"
echo " mongo-express 8082 MongoDB admin (/mongo-express/)"
echo " redisinsight 8083 Redis admin (/redis-insight/)"
echo " kibana 5601 Elasticsearch admin (/kibana/)"
echo " postfix 1025 SMTP relay"
echo " dovecot 1143 IMAP/POP3"
echo " smtp-relay 2525 SMTP relay (dashboard: 8025)"
echo " email-service 8500 Flask email API"
echo ""
echo "Monitoring services (--monitoring):"
echo " prometheus 9090 Metrics"
echo " grafana 3009 Dashboards"
echo " loki 3100 Log aggregation"
echo " promtail - Log shipper"
echo " node-exporter 9100 Host metrics"
echo " postgres-exporter 9187 DB metrics"
echo " redis-exporter 9121 Cache metrics"
echo " cadvisor 8084 Container metrics"
echo " alertmanager 9093 Alert routing"
echo ""
echo "Media services (--media):"
echo " media-daemon 8090 FFmpeg, radio, retro gaming"
echo " icecast 8000 Radio streaming"
echo " nginx-stream 8088 HLS/DASH streaming"
exit 0
;;
*)
echo -e "${RED}Unknown command: $COMMAND${NC}"
echo "Run './start-stack.sh help' for usage"
exit 1
;;
esac
# Start
docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" up -d
echo ""
echo -e "${GREEN}Stack started!${NC}"
echo ""
# Count expected healthy services
# Core: postgres, redis, elasticsearch, mysql, mongodb (5)
# Admin tools: phpmyadmin, mongo-express, redisinsight, kibana (4)
# Backend: dbal, email-service (2)
# Mail: postfix, dovecot, smtp-relay (3)
# Gateway: nginx (1)
# Apps: workflowui, codegen, pastebin, postgres-dashboard, emailclient-app,
# exploded-diagrams, storybook, frontend-app (8)
# Total: 23
CORE_COUNT=23
PROFILE_INFO="core"
for arg in "$@"; do
case "$arg" in
--monitoring) CORE_COUNT=$((CORE_COUNT + 9)); PROFILE_INFO="core + monitoring" ;;
--media) CORE_COUNT=$((CORE_COUNT + 3)); PROFILE_INFO="core + media" ;;
--all) CORE_COUNT=$((CORE_COUNT + 12)); PROFILE_INFO="core + monitoring + media" ;;
esac
done
echo -e "${YELLOW}Waiting for services ($PROFILE_INFO)...${NC}"
MAX_WAIT=120
ELAPSED=0
while [ $ELAPSED -lt $MAX_WAIT ]; do
HEALTHY=$(docker compose -f "$COMPOSE_FILE" "${PROFILES[@]}" ps --format json 2>/dev/null | grep -c '"healthy"' || true)
if [ "$HEALTHY" -ge "$CORE_COUNT" ]; then
echo -e "\n${GREEN}All $CORE_COUNT services healthy!${NC}"
echo ""
echo -e "Portal: ${BLUE}http://localhost${NC}"
echo ""
echo "Quick commands:"
echo " ./start-stack.sh logs View all logs"
echo " ./start-stack.sh logs dbal View DBAL logs"
echo " ./start-stack.sh stop Stop the stack"
echo " ./start-stack.sh restart Restart services"
exit 0
fi
echo -ne "\r Services healthy: $HEALTHY/$CORE_COUNT (${ELAPSED}s)"
sleep 2
ELAPSED=$((ELAPSED + 2))
done
echo ""
echo -e "${YELLOW}Timeout waiting for all services. Check with:${NC}"
echo " ./start-stack.sh status"
echo " ./start-stack.sh logs"

View File

@@ -4,12 +4,12 @@
#
# Usage:
# Local dev: npx verdaccio --config deployment/verdaccio.yaml &
# Compose: docker compose -f compose.yml up verdaccio
# Compose: docker compose -f docker-compose.stack.yml up verdaccio
# CI: uses inline config with /tmp/verdaccio-storage
# Then: bash deployment/publish-npm-patches.sh --verdaccio
# .npmrc already points @esbuild-kit:registry to localhost:4873
# Docker container path (volume-mounted in compose.yml).
# Docker container path (volume-mounted in docker-compose.stack.yml).
# For local dev, use the CI composite action or npx verdaccio (default config).
storage: /verdaccio/storage
uplinks:

View File

@@ -8,7 +8,7 @@ import { test, expect } from '@playwright/test'
* Playwright dev servers bind to 0.0.0.0 so nginx can proxy via host.docker.internal.
*
* Local:
* cd deployment && docker compose -f compose.yml up -d
* cd deployment && docker compose -f docker-compose.stack.yml up -d
* PLAYWRIGHT_BASE_URL=http://localhost/workflowui/ npx playwright test deployment-smoke
*/

View File

@@ -13,20 +13,6 @@ const __dirname = dirname(fileURLToPath(import.meta.url))
let environment: Awaited<ReturnType<DockerComposeEnvironment['up']>> | undefined
async function waitForServer(url: string, timeoutMs = 60000): Promise<void> {
const deadline = Date.now() + timeoutMs
while (Date.now() < deadline) {
try {
const res = await fetch(url, { method: 'GET' })
if (res.ok || res.status === 401 || res.status === 405) return // server is up
} catch {
// not ready yet
}
await new Promise(r => setTimeout(r, 1000))
}
throw new Error(`Server at ${url} did not become ready within ${timeoutMs}ms`)
}
async function globalSetup() {
// ── 1. Start smoke stack via Testcontainers ──────────────────────────────
console.log('[setup] Starting smoke stack via Testcontainers...')
@@ -46,14 +32,14 @@ async function globalSetup() {
;(globalThis as Record<string, unknown>).__TESTCONTAINERS_ENV__ = environment
// ── 2. Wait for dev servers (started by Playwright webServer config) ─────
await new Promise(resolve => setTimeout(resolve, 2000))
// ── 3. Seed database ────────────────────────────────────────────────────
// workflowui uses basePath: '/workflowui', so the setup route is at /workflowui/api/setup
const setupUrl = process.env.PLAYWRIGHT_BASE_URL
? new URL('/workflowui/api/setup', process.env.PLAYWRIGHT_BASE_URL.replace(/\/workflowui\/?$/, '')).href
: 'http://localhost:3000/workflowui/api/setup'
await waitForServer(setupUrl)
try {
const response = await fetch(setupUrl, { method: 'POST' })
if (!response.ok) {

View File

@@ -1,27 +0,0 @@
FROM node:24-alpine AS base
FROM base AS deps
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY package.json ./
RUN npm install
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
RUN mkdir -p public && npm run build
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
COPY --from=builder /app/public ./public
RUN mkdir .next && chown nextjs:nodejs .next
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000
ENV PORT=3000 HOSTNAME="0.0.0.0"
CMD ["node", "server.js"]

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1,35 +0,0 @@
*,
*::before,
*::after {
box-sizing: border-box;
margin: 0;
padding: 0;
}
:root {
--bg: #0a0a0a;
--bg-card: #141414;
--text: #ededed;
--text-muted: #a0a0a0;
--accent: #10b981;
--accent-hover: #059669;
--border: #262626;
}
html,
body {
background: var(--bg);
color: var(--text);
font-family: var(--font-inter), system-ui, -apple-system, sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
a {
color: var(--accent);
text-decoration: none;
}
a:hover {
text-decoration: underline;
}

View File

@@ -1,4 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32">
<rect width="32" height="32" rx="6" fill="#10b981"/>
<text x="16" y="22" text-anchor="middle" font-family="system-ui,sans-serif" font-weight="700" font-size="14" fill="#fff">CF</text>
</svg>

Before

Width:  |  Height:  |  Size: 256 B

View File

@@ -1,26 +0,0 @@
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";
const inter = Inter({
variable: "--font-inter",
subsets: ["latin"],
});
export const metadata: Metadata = {
title: "CaproverForge - Mobile Admin for CapRover",
description:
"Mobile admin panel for CapRover. Deploy apps, manage domains, monitor containers, and configure your PaaS from your phone.",
};
export default function RootLayout({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
return (
<html lang="en">
<body className={inter.variable}>{children}</body>
</html>
);
}

View File

@@ -1,151 +0,0 @@
const features = [
"One-click app deployment",
"Domain management",
"Container monitoring",
"Environment variables",
"SSL certificate management",
"Real-time logs",
];
export default function Home() {
return (
<main style={{ maxWidth: 640, margin: "0 auto", padding: "64px 24px" }}>
{/* App Icon */}
<div
style={{
width: 96,
height: 96,
borderRadius: 24,
background: "linear-gradient(135deg, #10b981 0%, #059669 100%)",
margin: "0 auto 24px",
}}
/>
{/* Title */}
<h1
style={{
fontSize: 36,
fontWeight: 700,
textAlign: "center",
marginBottom: 16,
}}
>
CaproverForge
</h1>
{/* Description */}
<p
style={{
fontSize: 18,
lineHeight: 1.6,
color: "var(--text-muted)",
textAlign: "center",
marginBottom: 48,
}}
>
Mobile admin panel for CapRover. Deploy apps, manage domains, monitor
containers, and configure your PaaS &mdash; all from your phone.
</p>
{/* Download Button */}
<div style={{ textAlign: "center", marginBottom: 56 }}>
<a
href="https://github.com/nicholasgriffintn/caproverforge/releases/latest"
target="_blank"
rel="noopener noreferrer"
style={{
display: "inline-block",
background: "var(--accent)",
color: "#fff",
fontSize: 18,
fontWeight: 600,
padding: "14px 36px",
borderRadius: 12,
textDecoration: "none",
}}
>
Download APK
</a>
</div>
{/* Features */}
<section style={{ marginBottom: 56 }}>
<h2 style={{ fontSize: 22, fontWeight: 600, marginBottom: 20 }}>
Features
</h2>
<ul
style={{
listStyle: "none",
display: "grid",
gap: 12,
}}
>
{features.map((feature) => (
<li
key={feature}
style={{
padding: "14px 18px",
background: "var(--bg-card)",
border: "1px solid var(--border)",
borderRadius: 10,
fontSize: 15,
}}
>
<span style={{ color: "var(--accent)", marginRight: 10 }}>
&#10003;
</span>
{feature}
</li>
))}
</ul>
</section>
{/* Build from Source */}
<section>
<h2 style={{ fontSize: 22, fontWeight: 600, marginBottom: 16 }}>
Build from Source
</h2>
<div
style={{
background: "var(--bg-card)",
border: "1px solid var(--border)",
borderRadius: 10,
padding: 20,
}}
>
<pre
style={{
fontSize: 14,
lineHeight: 1.7,
overflowX: "auto",
color: "var(--text-muted)",
}}
>
{`git clone https://github.com/nicholasgriffintn/caproverforge.git
cd caproverforge
./gradlew assembleDebug`}
</pre>
<p
style={{
fontSize: 13,
color: "var(--text-muted)",
marginTop: 14,
}}
>
Requires Android SDK and JDK 17+. The built APK will be at{" "}
<code
style={{
background: "var(--bg)",
padding: "2px 6px",
borderRadius: 4,
fontSize: 12,
}}
>
app/build/outputs/apk/debug/
</code>
</p>
</div>
</section>
</main>
);
}

View File

@@ -1,8 +0,0 @@
import type { NextConfig } from "next";
const nextConfig: NextConfig = {
output: "standalone",
basePath: "/caproverforge",
};
export default nextConfig;

View File

@@ -1,21 +0,0 @@
{
"name": "caproverforge-portal",
"version": "1.0.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start"
},
"dependencies": {
"next": "^16.0.0",
"react": "^19.0.0",
"react-dom": "^19.0.0"
},
"devDependencies": {
"@types/node": "^22.0.0",
"@types/react": "^19.0.0",
"@types/react-dom": "^19.0.0",
"typescript": "^5.7.0"
}
}

View File

@@ -1,27 +0,0 @@
{
"compilerOptions": {
"target": "ES2017",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"plugins": [
{
"name": "next"
}
],
"paths": {
"@/*": ["./*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
}

View File

@@ -12,7 +12,6 @@ add_executable(metabuilder-cli
src/commands/command_dispatch.cpp
src/commands/dbal_commands.cpp
src/commands/package_commands.cpp
src/commands/workflow_commands.cpp
src/lua/lua_runner.cpp
src/utils/http_client.cpp
)

View File

@@ -1,31 +0,0 @@
# Build the metabuilder-cli C++ binary
# Context: monorepo root (..)
# Requires: metabuilder/base-conan-deps:latest base image
ARG BASE_REGISTRY=metabuilder
FROM ${BASE_REGISTRY}/base-conan-deps:latest AS builder
# Copy CLI source
COPY frontends/cli/ /app/cli/
# Install Conan deps and build
WORKDIR /app/cli
RUN conan install . \
--output-folder=build \
--build=missing \
-s build_type=Release \
-c tools.system.package_manager:mode=install \
&& cmake -S . -B build -G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_TOOLCHAIN_FILE=build/conan_toolchain.cmake \
&& cmake --build build --config Release
# Minimal runtime image
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends \
libssl3 ca-certificates \
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/cli/build/metabuilder-cli /usr/local/bin/metabuilder-cli
ENTRYPOINT ["metabuilder-cli"]

View File

@@ -1,7 +1,6 @@
#include "command_dispatch.h"
#include "dbal_commands.h"
#include "package_commands.h"
#include "workflow_commands.h"
#include <cpr/cpr.h>
#include <iostream>
@@ -19,7 +18,6 @@ Available commands:
tenant get <tenantId> Get a tenant by ID
dbal <subcommand> DBAL operations (use 'dbal help' for details)
package <subcommand> Package operations (use 'package help' for details)
workflow <subcommand> Workflow operations (use 'workflow help' for details)
)";
}
@@ -138,11 +136,7 @@ int dispatch(const HttpClient &client, const std::vector<std::string> &args) {
}
if (args[0] == "package") {
return handle_package(client, args);
}
if (args[0] == "workflow") {
return handle_workflow(client, args);
return handle_package(args);
}
print_help();

View File

@@ -1,30 +1,17 @@
#include "package_commands.h"
#include "../lua/lua_runner.h"
#include <cpr/cpr.h>
#include <nlohmann/json.hpp>
#include <algorithm>
#include <cstdlib>
#include <filesystem>
#include <fstream>
#include <iomanip>
#include <iostream>
#include <sstream>
namespace fs = std::filesystem;
using json = nlohmann::json;
namespace {
void print_response(const cpr::Response &response) {
std::cout << "status: " << response.status_code << '\n';
if (response.error) {
std::cout << "error: " << response.error.message << '\n';
}
std::cout << response.text << '\n';
}
void print_package_help() {
std::cout << R"(Usage: metabuilder-cli package <command> [options]
@@ -32,10 +19,6 @@ Commands:
list List available packages with scripts
run <package> <script> [args] Run a Lua script from a package
generate <package_id> [opts] Generate a new package
install <package_id> Install a package via DBAL
uninstall <package_id> Uninstall a package via DBAL
info <package_id> Show package details from DBAL
search <query> Search available packages
Generate options:
--name <name> Display name (default: derived from package_id)
@@ -56,10 +39,6 @@ Examples:
metabuilder-cli package list
metabuilder-cli package run codegen_studio package_template
metabuilder-cli package generate my_forum --category social --with-schema --entities Thread,Post
metabuilder-cli package install forum
metabuilder-cli package uninstall forum
metabuilder-cli package info forum
metabuilder-cli package search social
)";
}
@@ -77,7 +56,7 @@ fs::path find_packages_dir() {
// Try relative to executable
// (would need to pass argv[0] for this)
return {};
}
@@ -98,16 +77,16 @@ std::vector<std::string> split_csv(const std::string& str) {
int handle_list(const fs::path& packages_dir) {
std::cout << "Available packages with scripts:\n\n";
int count = 0;
for (const auto& entry : fs::directory_iterator(packages_dir)) {
if (!entry.is_directory()) continue;
auto scripts_path = entry.path() / "seed" / "scripts";
if (!fs::exists(scripts_path)) continue;
std::cout << " " << entry.path().filename().string() << "\n";
// List available scripts/modules
for (const auto& script : fs::directory_iterator(scripts_path)) {
if (script.is_directory()) {
@@ -124,11 +103,11 @@ int handle_list(const fs::path& packages_dir) {
}
++count;
}
if (count == 0) {
std::cout << " (no packages with scripts found)\n";
}
return 0;
}
@@ -143,7 +122,7 @@ int handle_run(const fs::path& packages_dir, const std::vector<std::string>& arg
std::string func_name = args.size() > 4 ? args[4] : "main";
lua::LuaRunner runner(packages_dir);
if (!runner.load_module(package_id, script_name)) {
std::cerr << "Error: " << runner.last_error() << "\n";
return 1;
@@ -161,7 +140,7 @@ int handle_run(const fs::path& packages_dir, const std::vector<std::string>& arg
}
auto result = runner.call(func_name, config);
if (!result.success) {
std::cerr << "Error: " << result.error << "\n";
return 1;
@@ -181,7 +160,7 @@ int handle_generate(const fs::path& packages_dir, const std::vector<std::string>
}
const auto& package_id = args[2];
// Validate package_id format
if (package_id.empty() || !std::isalpha(package_id[0])) {
std::cerr << "Error: package_id must start with a letter\n";
@@ -207,13 +186,13 @@ int handle_generate(const fs::path& packages_dir, const std::vector<std::string>
config["withSchema"] = false;
config["withTests"] = true;
config["withComponents"] = false;
bool dry_run = false;
std::string output_dir = packages_dir.string();
for (size_t i = 3; i < args.size(); ++i) {
const auto& arg = args[i];
if (arg == "--name" && i + 1 < args.size()) {
config["name"] = args[++i];
} else if (arg == "--description" && i + 1 < args.size()) {
@@ -245,7 +224,7 @@ int handle_generate(const fs::path& packages_dir, const std::vector<std::string>
// Load package_template module from codegen_studio
lua::LuaRunner runner(packages_dir);
if (!runner.load_module("codegen_studio", "package_template")) {
std::cerr << "Error: Could not load package_template module\n";
std::cerr << " " << runner.last_error() << "\n";
@@ -265,7 +244,7 @@ int handle_generate(const fs::path& packages_dir, const std::vector<std::string>
// Generate files
auto result = runner.call("generate", config);
if (!result.success) {
std::cerr << "Error generating package: " << result.error << "\n";
return 1;
@@ -299,179 +278,31 @@ int handle_generate(const fs::path& packages_dir, const std::vector<std::string>
for (const auto& file : result.files) {
fs::path full_path = package_path / file.path;
fs::path dir = full_path.parent_path();
if (!dir.empty() && !fs::exists(dir)) {
fs::create_directories(dir);
}
std::ofstream out(full_path, std::ios::binary);
if (!out) {
std::cerr << " Error writing: " << file.path << "\n";
continue;
}
out << file.content;
out.close();
std::cout << " Created: " << file.path << "\n";
++written;
}
std::cout << "\nPackage '" << package_id << "' created successfully!\n";
std::cout << "\nPackage '" << package_id << "' created successfully!\n";
std::cout << " Files: " << written << "\n";
std::cout << "\nNext steps:\n";
std::cout << " 1. Review generated files in " << package_path << "\n";
std::cout << " 2. Add package-specific logic to seed/scripts/\n";
std::cout << " 3. Run: npm run packages:index\n";
return 0;
}
/**
* @brief Print package list from DBAL as a formatted table
*/
void print_package_table(const cpr::Response &response) {
if (response.status_code != 200) {
print_response(response);
return;
}
try {
auto data = json::parse(response.text);
json items;
if (data.is_array()) {
items = data;
} else if (data.contains("data") && data["data"].is_array()) {
items = data["data"];
} else {
print_response(response);
return;
}
if (items.empty()) {
std::cout << "No packages found.\n";
return;
}
// Header
std::cout << '\n';
std::cout << " " << std::left
<< std::setw(24) << "ID"
<< std::setw(28) << "NAME"
<< std::setw(14) << "CATEGORY"
<< std::setw(10) << "STATUS"
<< "VERSION" << '\n';
std::cout << " " << std::string(86, '-') << '\n';
for (const auto &pkg : items) {
std::string id = pkg.value("id", pkg.value("packageId", "-"));
std::string name = pkg.value("name", pkg.value("title", "-"));
std::string category = pkg.value("category", "-");
std::string status = pkg.value("status", pkg.value("state", "-"));
std::string version = pkg.value("version", "-");
std::cout << " " << std::left
<< std::setw(24) << id
<< std::setw(28) << name
<< std::setw(14) << category
<< std::setw(10) << status
<< version << '\n';
}
std::cout << '\n' << items.size() << " package(s) found.\n";
} catch (const json::exception &) {
print_response(response);
}
}
int handle_install(const HttpClient &client, const std::vector<std::string> &args) {
if (args.size() < 3) {
std::cerr << "Usage: metabuilder-cli package install <package_id>\n";
return 1;
}
std::string package_id = args[2];
std::string body = "{\"packageId\":\"" + package_id + "\",\"action\":\"install\"}";
std::cout << "Installing package: " << package_id << "\n";
auto response = client.post("/api/dbal/package/" + package_id + "/install", body);
if (response.status_code >= 200 && response.status_code < 300) {
std::cout << "[OK] Package '" << package_id << "' installed successfully.\n";
try {
auto data = json::parse(response.text);
if (data.contains("version")) {
std::cout << " version: " << data["version"].get<std::string>() << '\n';
}
if (data.contains("dependencies")) {
std::cout << " dependencies resolved: " << data["dependencies"].size() << '\n';
}
} catch (const json::exception &) {
// Ignore parse errors for status output
}
} else {
std::cout << "[!!] Failed to install package '" << package_id << "'.\n";
print_response(response);
}
return response.status_code >= 200 && response.status_code < 300 ? 0 : 1;
}
int handle_uninstall(const HttpClient &client, const std::vector<std::string> &args) {
if (args.size() < 3) {
std::cerr << "Usage: metabuilder-cli package uninstall <package_id>\n";
return 1;
}
std::string package_id = args[2];
std::cout << "Uninstalling package: " << package_id << "\n";
auto response = client.post("/api/dbal/package/" + package_id + "/uninstall",
"{\"packageId\":\"" + package_id + "\",\"action\":\"uninstall\"}");
if (response.status_code >= 200 && response.status_code < 300) {
std::cout << "[OK] Package '" << package_id << "' uninstalled successfully.\n";
} else {
std::cout << "[!!] Failed to uninstall package '" << package_id << "'.\n";
print_response(response);
}
return response.status_code >= 200 && response.status_code < 300 ? 0 : 1;
}
int handle_info(const HttpClient &client, const std::vector<std::string> &args) {
if (args.size() < 3) {
std::cerr << "Usage: metabuilder-cli package info <package_id>\n";
return 1;
}
std::string package_id = args[2];
auto response = client.get("/api/dbal/package/" + package_id);
if (response.status_code != 200) {
print_response(response);
return 1;
}
try {
auto data = json::parse(response.text);
std::cout << data.dump(2) << '\n';
} catch (const json::exception &) {
print_response(response);
}
return 0;
}
int handle_search(const HttpClient &client, const std::vector<std::string> &args) {
if (args.size() < 3) {
std::cerr << "Usage: metabuilder-cli package search <query>\n";
return 1;
}
std::string query = args[2];
print_package_table(client.get("/api/dbal/package?search=" + query));
return 0;
}
@@ -479,59 +310,30 @@ int handle_search(const HttpClient &client, const std::vector<std::string> &args
namespace commands {
int handle_package(const HttpClient &client, const std::vector<std::string>& args) {
int handle_package(const std::vector<std::string>& args) {
if (args.size() < 2 || args[1] == "help" || args[1] == "--help") {
print_package_help();
return 0;
}
auto packages_dir = find_packages_dir();
if (packages_dir.empty()) {
std::cerr << "Error: Could not find packages directory\n";
std::cerr << "Run from the MetaBuilder project root or set METABUILDER_PACKAGES\n";
return 1;
}
const auto& subcommand = args[1];
// DBAL-backed commands (require HTTP client)
if (subcommand == "install") {
return handle_install(client, args);
}
if (subcommand == "uninstall") {
return handle_uninstall(client, args);
}
if (subcommand == "info") {
return handle_info(client, args);
}
if (subcommand == "search") {
return handle_search(client, args);
}
// Local filesystem commands
if (subcommand == "list") {
auto packages_dir = find_packages_dir();
if (packages_dir.empty()) {
std::cerr << "Error: Could not find packages directory\n";
std::cerr << "Run from the MetaBuilder project root or set METABUILDER_PACKAGES\n";
return 1;
}
return handle_list(packages_dir);
}
if (subcommand == "run") {
auto packages_dir = find_packages_dir();
if (packages_dir.empty()) {
std::cerr << "Error: Could not find packages directory\n";
std::cerr << "Run from the MetaBuilder project root or set METABUILDER_PACKAGES\n";
return 1;
}
return handle_run(packages_dir, args);
}
if (subcommand == "generate") {
auto packages_dir = find_packages_dir();
if (packages_dir.empty()) {
std::cerr << "Error: Could not find packages directory\n";
std::cerr << "Run from the MetaBuilder project root or set METABUILDER_PACKAGES\n";
return 1;
}
return handle_generate(packages_dir, args);
}

Some files were not shown because too many files have changed in this diff Show More