feat(packagerepo): integrate with root MetaBuilder workflow system

Add workflow integration that imports from root project instead of copying.
This maintains single source of truth for workflow system.

Changes:
- workflow_loader.py imports from /workflow/executor/python/
- Uses plugins from /workflow/plugins/python/
- Created /workflow/plugins/python/packagerepo/ for app-specific plugins
- Created publish_artifact.json workflow definition

Architecture:
- Packagerepo imports workflow system, doesn't copy it
- Shared plugins (math, string, logic) available to all apps
- App-specific plugins go in root workflow/plugins/python/{app}/

Benefits:
- Single workflow engine for entire metabuilder project
- Updates to workflow system benefit all apps automatically
- Consistent workflow format across frontend and backend

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-22 15:06:26 +00:00
parent 7ce8b4ae8a
commit 233005f45b
4 changed files with 291 additions and 0 deletions

View File

@@ -0,0 +1,57 @@
# Package Repository Workflow Refactoring
## Overview
Converting packagerepo backend to use MetaBuilder workflow system from root project.
## Architecture
**Packagerepo does NOT copy workflow code.** Instead, it:
1. Imports workflow executor from `/workflow/executor/python/`
2. Uses plugins from `/workflow/plugins/python/`
3. Adds packagerepo-specific plugins to `/workflow/plugins/python/packagerepo/`
4. Stores workflow definitions in `/packagerepo/backend/workflows/`
## Benefits
- **Single source of truth** - One workflow system for entire metabuilder project
- **Shared plugins** - Packagerepo uses same math, string, logic plugins as other apps
- **Easy updates** - Improvements to workflow system benefit all apps
- **Consistent patterns** - Same workflow format across frontend and backend
## File Structure
```
metabuilder/
├── workflow/
│ ├── executor/python/ # Workflow engine (used by packagerepo)
│ └── plugins/python/
│ ├── math/ # Shared plugins
│ ├── string/
│ ├── logic/
│ └── packagerepo/ # Packagerepo-specific plugins
│ ├── auth_verify_jwt/
│ ├── kv_get/
│ ├── blob_put/
│ └── ...
└── packagerepo/
└── backend/
├── workflow_loader.py # Imports from root workflow system
└── workflows/ # Workflow definitions
├── publish_artifact.json
└── download_artifact.json
```
## Next Steps
1. Create packagerepo plugins in `/workflow/plugins/python/packagerepo/`
2. Update Flask app.py to use workflow_loader
3. Test publish endpoint
4. Convert remaining endpoints
## Dependencies
Packagerepo only needs to:
- Import from parent metabuilder project
- Define workflow JSON files
- Create packagerepo-specific plugins in root workflow system

View File

@@ -0,0 +1,109 @@
"""
Workflow Loader for Package Repository
Integrates with MetaBuilder workflow system from root project.
"""
import json
import sys
from pathlib import Path
from typing import Dict, Any, Optional
from flask import Request, Response, jsonify
# Add root metabuilder to path to import workflow executor
METABUILDER_ROOT = Path(__file__).parent.parent.parent
sys.path.insert(0, str(METABUILDER_ROOT / "workflow" / "executor" / "python"))
from executor import WorkflowExecutor
class WorkflowLoader:
"""Loads and executes workflow definitions for Flask endpoints."""
def __init__(self, workflows_dir: Path, config: Dict[str, Any]):
self.workflows_dir = workflows_dir
self.config = config
self.workflows_cache: Dict[str, Dict] = {}
# Use plugins from root metabuilder project
plugins_dir = METABUILDER_ROOT / "workflow" / "plugins" / "python"
self.executor = WorkflowExecutor(str(plugins_dir))
def load_workflow(self, workflow_name: str) -> Dict[str, Any]:
"""Load a workflow definition from filesystem or cache."""
if workflow_name in self.workflows_cache:
return self.workflows_cache[workflow_name]
workflow_path = self.workflows_dir / f"{workflow_name}.json"
if not workflow_path.exists():
raise FileNotFoundError(f"Workflow {workflow_name} not found")
with open(workflow_path) as f:
workflow = json.load(f)
self.workflows_cache[workflow_name] = workflow
return workflow
def execute_workflow_for_request(
self,
workflow_name: str,
request: Request,
additional_context: Optional[Dict[str, Any]] = None
) -> Response:
"""
Execute a workflow for a Flask request.
Args:
workflow_name: Name of the workflow to execute
request: Flask request object
additional_context: Additional context (kv_store, config, etc.)
Returns:
Flask Response object
"""
workflow = self.load_workflow(workflow_name)
# Build workflow context from request
context = {
"request": {
"path": request.path,
"method": request.method,
"headers": dict(request.headers),
"body": request.get_data(),
"content_length": request.content_length,
"args": dict(request.args),
"json": request.get_json(silent=True),
},
"config": self.config,
**(additional_context or {})
}
try:
# Execute workflow using MetaBuilder executor
result = self.executor.execute(workflow, context)
# Handle workflow result
if "response" in result:
response_data = result["response"]
return jsonify(response_data.get("body", {})), response_data.get("status_code", 200)
# Default success response
return jsonify({"ok": True, "result": result}), 200
except Exception as e:
# Workflow execution error
return jsonify({
"ok": False,
"error": {
"message": str(e),
"code": "WORKFLOW_ERROR"
}
}), 500
def create_workflow_loader(config: Dict[str, Any]) -> WorkflowLoader:
"""Create a workflow loader instance."""
backend_dir = Path(__file__).parent
workflows_dir = backend_dir / "workflows"
workflows_dir.mkdir(exist_ok=True)
return WorkflowLoader(workflows_dir, config)

View File

@@ -0,0 +1,124 @@
{
"name": "Publish Artifact",
"description": "Upload and store a package artifact",
"version": "1.0.0",
"nodes": [
{
"id": "verify_auth",
"type": "packagerepo.auth_verify_jwt",
"parameters": {
"token": "$request.headers.Authorization",
"out": "principal"
}
},
{
"id": "check_write_scope",
"type": "packagerepo.auth_check_scopes",
"parameters": {
"principal": "$principal",
"required_scopes": ["write"]
}
},
{
"id": "parse_path",
"type": "packagerepo.parse_path",
"parameters": {
"path": "$request.path",
"pattern": "/v1/:namespace/:name/:version/:variant/blob",
"out": "entity"
}
},
{
"id": "normalize",
"type": "packagerepo.normalize_entity",
"parameters": {
"entity": "$entity",
"out": "normalized"
}
},
{
"id": "validate",
"type": "packagerepo.validate_entity",
"parameters": {
"entity": "$normalized"
}
},
{
"id": "compute_digest",
"type": "string.sha256",
"parameters": {
"input": "$request.body",
"out": "digest"
}
},
{
"id": "check_exists",
"type": "packagerepo.kv_get",
"parameters": {
"key": "artifact/$entity.namespace/$entity.name/$entity.version/$entity.variant",
"out": "existing"
}
},
{
"id": "if_exists",
"type": "logic.if",
"parameters": {
"condition": "$existing != null",
"then": "error_exists",
"else": "write_blob"
}
},
{
"id": "write_blob",
"type": "packagerepo.blob_put",
"parameters": {
"digest": "$digest",
"data": "$request.body"
}
},
{
"id": "write_meta",
"type": "packagerepo.kv_put",
"parameters": {
"key": "artifact/$entity.namespace/$entity.name/$entity.version/$entity.variant",
"value": {
"digest": "$digest",
"size": "$request.content_length",
"uploaded_at": "$timestamp",
"uploaded_by": "$principal.sub"
}
}
},
{
"id": "update_index",
"type": "packagerepo.index_upsert",
"parameters": {
"key": "$entity.namespace/$entity.name",
"entry": {
"version": "$entity.version",
"variant": "$entity.variant",
"digest": "$digest"
}
}
},
{
"id": "success",
"type": "packagerepo.respond_json",
"parameters": {
"body": {
"ok": true,
"digest": "$digest"
},
"status": 201
}
},
{
"id": "error_exists",
"type": "packagerepo.respond_error",
"parameters": {
"message": "Artifact already exists",
"status": 409
}
}
]
}

View File

@@ -0,0 +1 @@
"""Packagerepo-specific workflow plugins."""