ROADMAP.md

This commit is contained in:
2026-01-10 12:16:49 +00:00
parent 7e0454771b
commit 5bdf916dbf
12 changed files with 937 additions and 597 deletions

View File

@@ -1,41 +1,45 @@
{
"template": "package.assets",
"name": "Assets Catalog",
"nodes": [
{
"id": "asset_roots",
"plugin": "list.literal",
"name": "Asset Roots",
"type": "list.literal",
"typeVersion": 1,
"position": [0, 0],
"outputs": {
"list": "assets.roots"
},
"parameters": {
"items": [
"assets/audio",
"assets/fonts",
"assets/images"
],
"type": "string"
"type": "string",
"outputs": {
"list": "assets.roots"
}
}
},
{
"id": "assert_asset_roots",
"plugin": "value.assert.type",
"name": "Assert Asset Roots",
"type": "value.assert.type",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"value": "assets.roots"
},
"parameters": {
"inputs": {
"value": "assets.roots"
},
"type": "string_list"
}
}
],
"connections": {
"asset_roots": {
"main": [
[
{ "node": "assert_asset_roots", "type": "main", "index": 0 }
"Asset Roots": {
"main": {
"0": [
{ "node": "Assert Asset Roots", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,93 +1,113 @@
{
"template": "boot.default",
"name": "Boot Default",
"nodes": [
{
"id": "load_config",
"plugin": "config.load",
"name": "Load Config",
"type": "config.load",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"path": "config.path"
},
"outputs": {
"document": "config.document"
"parameters": {
"inputs": {
"path": "config.path"
},
"outputs": {
"document": "config.document"
}
}
},
{
"id": "validate_version",
"plugin": "config.version.validate",
"name": "Validate Version",
"type": "config.version.validate",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"document": "config.document",
"path": "config.path"
},
"outputs": {
"version": "config.version"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path"
},
"outputs": {
"version": "config.version"
}
}
},
{
"id": "migrate_version",
"plugin": "config.migrate",
"name": "Migrate Version",
"type": "config.migrate",
"typeVersion": 1,
"position": [520, 0],
"inputs": {
"document": "config.document",
"path": "config.path",
"version": "config.version"
},
"outputs": {
"document": "config.document",
"version": "config.version"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path",
"version": "config.version"
},
"outputs": {
"document": "config.document",
"version": "config.version"
}
}
},
{
"id": "validate_schema",
"plugin": "config.schema.validate",
"name": "Validate Schema",
"type": "config.schema.validate",
"typeVersion": 1,
"position": [780, 0],
"inputs": {
"document": "config.document",
"path": "config.path"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path"
}
}
},
{
"id": "build_runtime_config",
"plugin": "runtime.config.build",
"name": "Build Runtime Config",
"type": "runtime.config.build",
"typeVersion": 1,
"position": [1040, 0],
"inputs": {
"document": "config.document",
"path": "config.path"
},
"outputs": {
"runtime": "config.runtime"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path"
},
"outputs": {
"runtime": "config.runtime"
}
}
}
],
"connections": {
"load_config": {
"main": [
[
{ "node": "validate_version", "type": "main", "index": 0 }
"Load Config": {
"main": {
"0": [
{ "node": "Validate Version", "type": "main", "index": 0 }
]
]
}
},
"validate_version": {
"main": [
[
{ "node": "migrate_version", "type": "main", "index": 0 }
"Validate Version": {
"main": {
"0": [
{ "node": "Migrate Version", "type": "main", "index": 0 }
]
]
}
},
"migrate_version": {
"main": [
[
{ "node": "validate_schema", "type": "main", "index": 0 }
"Migrate Version": {
"main": {
"0": [
{ "node": "Validate Schema", "type": "main", "index": 0 }
]
]
}
},
"validate_schema": {
"main": [
[
{ "node": "build_runtime_config", "type": "main", "index": 0 }
"Validate Schema": {
"main": {
"0": [
{ "node": "Build Runtime Config", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,79 +1,99 @@
{
"template": "frame.default",
"name": "Frame Default",
"nodes": [
{
"id": "begin_frame",
"plugin": "frame.begin",
"name": "Begin Frame",
"type": "frame.begin",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
}
}
},
{
"id": "step_physics",
"plugin": "frame.physics",
"name": "Step Physics",
"type": "frame.physics",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "update_scene",
"plugin": "frame.scene",
"name": "Update Scene",
"type": "frame.scene",
"typeVersion": 1,
"position": [520, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "render_frame",
"plugin": "frame.render",
"name": "Render Frame",
"type": "frame.render",
"typeVersion": 1,
"position": [780, 0],
"inputs": {
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"elapsed": "frame.elapsed"
}
}
},
{
"id": "update_audio",
"plugin": "frame.audio",
"name": "Update Audio",
"type": "frame.audio",
"typeVersion": 1,
"position": [1040, -120]
},
{
"id": "dispatch_gui",
"plugin": "frame.gui",
"name": "Dispatch GUI",
"type": "frame.gui",
"typeVersion": 1,
"position": [1040, 120]
}
],
"connections": {
"begin_frame": {
"main": [
[
{ "node": "step_physics", "type": "main", "index": 0 }
"Begin Frame": {
"main": {
"0": [
{ "node": "Step Physics", "type": "main", "index": 0 }
]
]
}
},
"step_physics": {
"main": [
[
{ "node": "update_scene", "type": "main", "index": 0 }
"Step Physics": {
"main": {
"0": [
{ "node": "Update Scene", "type": "main", "index": 0 }
]
]
}
},
"update_scene": {
"main": [
[
{ "node": "render_frame", "type": "main", "index": 0 }
"Update Scene": {
"main": {
"0": [
{ "node": "Render Frame", "type": "main", "index": 0 }
]
]
}
},
"render_frame": {
"main": [
[
{ "node": "update_audio", "type": "main", "index": 0 },
{ "node": "dispatch_gui", "type": "main", "index": 0 }
"Render Frame": {
"main": {
"0": [
{ "node": "Update Audio", "type": "main", "index": 0 },
{ "node": "Dispatch GUI", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,34 +1,42 @@
{
"template": "n8n.skeleton",
"name": "N8N Skeleton",
"nodes": [
{
"id": "load_config",
"plugin": "config.load",
"name": "Load Config",
"type": "config.load",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"path": "config.path"
},
"outputs": {
"document": "config.document"
"parameters": {
"inputs": {
"path": "config.path"
},
"outputs": {
"document": "config.document"
}
}
},
{
"id": "validate_schema",
"plugin": "config.schema.validate",
"name": "Validate Schema",
"type": "config.schema.validate",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"document": "config.document",
"path": "config.path"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path"
}
}
}
],
"connections": {
"load_config": {
"main": [
[
{ "node": "validate_schema", "type": "main", "index": 0 }
"Load Config": {
"main": {
"0": [
{ "node": "Validate Schema", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,68 +1,84 @@
{
"template": "boot.default",
"name": "Engine Tester Validation Tour",
"nodes": [
{
"id": "load_config",
"plugin": "config.load",
"name": "Load Config",
"type": "config.load",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"path": "config.path"
},
"outputs": {
"document": "config.document"
"parameters": {
"inputs": {
"path": "config.path"
},
"outputs": {
"document": "config.document"
}
}
},
{
"id": "validate_schema",
"plugin": "config.schema.validate",
"name": "Validate Schema",
"type": "config.schema.validate",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"document": "config.document",
"path": "config.path"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path"
}
}
},
{
"id": "build_runtime",
"plugin": "runtime.config.build",
"name": "Build Runtime Config",
"type": "runtime.config.build",
"typeVersion": 1,
"position": [520, 0],
"inputs": {
"document": "config.document",
"path": "config.path"
},
"outputs": {
"runtime": "config.runtime"
"parameters": {
"inputs": {
"document": "config.document",
"path": "config.path"
},
"outputs": {
"runtime": "config.runtime"
}
}
},
{
"id": "validation_probe",
"plugin": "validation.tour.checkpoint",
"name": "Validation Probe",
"type": "validation.tour.checkpoint",
"typeVersion": 1,
"position": [780, 0],
"inputs": {
"checkpoint": "packages.engine_tester"
"parameters": {
"inputs": {
"checkpoint": "packages.engine_tester"
}
}
}
],
"connections": {
"load_config": {
"main": [
[
{ "node": "validate_schema", "type": "main", "index": 0 }
"Load Config": {
"main": {
"0": [
{ "node": "Validate Schema", "type": "main", "index": 0 }
]
]
}
},
"validate_schema": {
"main": [
[
{ "node": "build_runtime", "type": "main", "index": 0 }
"Validate Schema": {
"main": {
"0": [
{ "node": "Build Runtime Config", "type": "main", "index": 0 }
]
]
}
},
"build_runtime": {
"main": [
[
{ "node": "validation_probe", "type": "main", "index": 0 }
"Build Runtime Config": {
"main": {
"0": [
{ "node": "Validation Probe", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,61 +1,77 @@
{
"template": "frame.default",
"name": "GUI Frame",
"nodes": [
{
"id": "gui_begin",
"plugin": "frame.begin",
"name": "GUI Begin",
"type": "frame.begin",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
}
}
},
{
"id": "gui_layout",
"plugin": "frame.gui",
"name": "GUI Layout",
"type": "frame.gui",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"elapsed": "frame.elapsed"
}
}
},
{
"id": "render_ui",
"plugin": "frame.render",
"name": "Render UI",
"type": "frame.render",
"typeVersion": 1,
"position": [520, 0],
"inputs": {
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"elapsed": "frame.elapsed"
}
}
},
{
"id": "capture_ui",
"plugin": "validation.tour.checkpoint",
"name": "Capture UI",
"type": "validation.tour.checkpoint",
"typeVersion": 1,
"position": [780, 0],
"inputs": {
"checkpoint": "packages.gui_demo"
"parameters": {
"inputs": {
"checkpoint": "packages.gui_demo"
}
}
}
],
"connections": {
"gui_begin": {
"main": [
[
{ "node": "gui_layout", "type": "main", "index": 0 }
"GUI Begin": {
"main": {
"0": [
{ "node": "GUI Layout", "type": "main", "index": 0 }
]
]
}
},
"gui_layout": {
"main": [
[
{ "node": "render_ui", "type": "main", "index": 0 }
"GUI Layout": {
"main": {
"0": [
{ "node": "Render UI", "type": "main", "index": 0 }
]
]
}
},
"render_ui": {
"main": [
[
{ "node": "capture_ui", "type": "main", "index": 0 }
"Render UI": {
"main": {
"0": [
{ "node": "Capture UI", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,41 +1,45 @@
{
"template": "package.materialx",
"name": "MaterialX Catalog",
"nodes": [
{
"id": "materialx_paths",
"plugin": "list.literal",
"name": "MaterialX Paths",
"type": "list.literal",
"typeVersion": 1,
"position": [0, 0],
"outputs": {
"list": "materialx.paths"
},
"parameters": {
"items": [
"libraries",
"resources",
"documents"
],
"type": "string"
"type": "string",
"outputs": {
"list": "materialx.paths"
}
}
},
{
"id": "assert_materialx_paths",
"plugin": "value.assert.type",
"name": "Assert MaterialX Paths",
"type": "value.assert.type",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"value": "materialx.paths"
},
"parameters": {
"inputs": {
"value": "materialx.paths"
},
"type": "string_list"
}
}
],
"connections": {
"materialx_paths": {
"main": [
[
{ "node": "assert_materialx_paths", "type": "main", "index": 0 }
"MaterialX Paths": {
"main": {
"0": [
{ "node": "Assert MaterialX Paths", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,75 +1,95 @@
{
"template": "frame.default",
"name": "Quake3 Frame",
"nodes": [
{
"id": "quake_begin",
"plugin": "frame.begin",
"name": "Quake Begin",
"type": "frame.begin",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "quake_physics",
"plugin": "frame.bullet_physics",
"name": "Quake Physics",
"type": "frame.bullet_physics",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "quake_scene",
"plugin": "frame.scene",
"name": "Quake Scene",
"type": "frame.scene",
"typeVersion": 1,
"position": [520, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "quake_render",
"plugin": "frame.render",
"name": "Quake Render",
"type": "frame.render",
"typeVersion": 1,
"position": [780, 0],
"inputs": {
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"elapsed": "frame.elapsed"
}
}
},
{
"id": "quake_validation",
"plugin": "validation.tour.checkpoint",
"name": "Quake Validation",
"type": "validation.tour.checkpoint",
"typeVersion": 1,
"position": [1040, 0],
"inputs": {
"checkpoint": "packages.quake3_map"
"parameters": {
"inputs": {
"checkpoint": "packages.quake3_map"
}
}
}
],
"connections": {
"quake_begin": {
"main": [
[
{ "node": "quake_physics", "type": "main", "index": 0 }
"Quake Begin": {
"main": {
"0": [
{ "node": "Quake Physics", "type": "main", "index": 0 }
]
]
}
},
"quake_physics": {
"main": [
[
{ "node": "quake_scene", "type": "main", "index": 0 }
"Quake Physics": {
"main": {
"0": [
{ "node": "Quake Scene", "type": "main", "index": 0 }
]
]
}
},
"quake_scene": {
"main": [
[
{ "node": "quake_render", "type": "main", "index": 0 }
"Quake Scene": {
"main": {
"0": [
{ "node": "Quake Render", "type": "main", "index": 0 }
]
]
}
},
"quake_render": {
"main": [
[
{ "node": "quake_validation", "type": "main", "index": 0 }
"Quake Render": {
"main": {
"0": [
{ "node": "Quake Validation", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,95 +1,119 @@
{
"template": "frame.default",
"name": "Seed Demo Gameplay",
"nodes": [
{
"id": "begin_frame",
"plugin": "frame.begin",
"name": "Begin Frame",
"type": "frame.begin",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
}
}
},
{
"id": "camera_control",
"plugin": "frame.camera",
"name": "Camera Control",
"type": "frame.camera",
"typeVersion": 1,
"position": [260, 0],
"inputs": {
"delta": "frame.delta"
},
"outputs": {
"view_state": "frame.view_state"
"parameters": {
"inputs": {
"delta": "frame.delta"
},
"outputs": {
"view_state": "frame.view_state"
}
}
},
{
"id": "bullet_physics",
"plugin": "frame.bullet_physics",
"name": "Bullet Physics",
"type": "frame.bullet_physics",
"typeVersion": 1,
"position": [520, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "scene",
"plugin": "frame.scene",
"name": "Scene Update",
"type": "frame.scene",
"typeVersion": 1,
"position": [780, 0],
"inputs": {
"delta": "frame.delta"
"parameters": {
"inputs": {
"delta": "frame.delta"
}
}
},
{
"id": "render",
"plugin": "frame.render",
"name": "Render Frame",
"type": "frame.render",
"typeVersion": 1,
"position": [1040, 0],
"inputs": {
"elapsed": "frame.elapsed",
"view_state": "frame.view_state"
"parameters": {
"inputs": {
"elapsed": "frame.elapsed",
"view_state": "frame.view_state"
}
}
},
{
"id": "validate_capture",
"plugin": "validation.tour.checkpoint",
"name": "Validate Capture",
"type": "validation.tour.checkpoint",
"typeVersion": 1,
"position": [1300, 0],
"inputs": {
"checkpoint": "gameplay.startup_camera"
"parameters": {
"inputs": {
"checkpoint": "gameplay.startup_camera"
}
}
}
],
"connections": {
"begin_frame": {
"main": [
[
{ "node": "camera_control", "type": "main", "index": 0 }
"Begin Frame": {
"main": {
"0": [
{ "node": "Camera Control", "type": "main", "index": 0 }
]
]
}
},
"camera_control": {
"main": [
[
{ "node": "bullet_physics", "type": "main", "index": 0 }
"Camera Control": {
"main": {
"0": [
{ "node": "Bullet Physics", "type": "main", "index": 0 }
]
]
}
},
"bullet_physics": {
"main": [
[
{ "node": "scene", "type": "main", "index": 0 }
"Bullet Physics": {
"main": {
"0": [
{ "node": "Scene Update", "type": "main", "index": 0 }
]
]
}
},
"scene": {
"main": [
[
{ "node": "render", "type": "main", "index": 0 }
"Scene Update": {
"main": {
"0": [
{ "node": "Render Frame", "type": "main", "index": 0 }
]
]
}
},
"render": {
"main": [
[
{ "node": "validate_capture", "type": "main", "index": 0 }
"Render Frame": {
"main": {
"0": [
{ "node": "Validate Capture", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -1,100 +1,124 @@
{
"template": "frame.default",
"name": "Soundboard Flow",
"nodes": [
{
"id": "begin_frame",
"plugin": "frame.begin",
"name": "Begin Frame",
"type": "frame.begin",
"typeVersion": 1,
"position": [0, 0],
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
"parameters": {
"inputs": {
"delta": "frame.delta",
"elapsed": "frame.elapsed"
}
}
},
{
"id": "catalog_scan",
"plugin": "soundboard.catalog.scan",
"name": "Catalog Scan",
"type": "soundboard.catalog.scan",
"typeVersion": 1,
"position": [260, -120],
"outputs": {
"catalog": "soundboard.catalog"
"parameters": {
"outputs": {
"catalog": "soundboard.catalog"
}
}
},
{
"id": "gui_render",
"plugin": "soundboard.gui",
"name": "GUI Render",
"type": "soundboard.gui",
"typeVersion": 1,
"position": [520, -120],
"inputs": {
"catalog": "soundboard.catalog"
},
"outputs": {
"selection": "soundboard.selection",
"gui_commands": "soundboard.gui.commands"
"parameters": {
"inputs": {
"catalog": "soundboard.catalog"
},
"outputs": {
"selection": "soundboard.selection",
"gui_commands": "soundboard.gui.commands"
}
}
},
{
"id": "audio_dispatch",
"plugin": "soundboard.audio",
"name": "Audio Dispatch",
"type": "soundboard.audio",
"typeVersion": 1,
"position": [780, -120],
"inputs": {
"selection": "soundboard.selection"
},
"outputs": {
"status": "soundboard.status"
"parameters": {
"inputs": {
"selection": "soundboard.selection"
},
"outputs": {
"status": "soundboard.status"
}
}
},
{
"id": "render_frame",
"plugin": "frame.render",
"name": "Render Frame",
"type": "frame.render",
"typeVersion": 1,
"position": [520, 120],
"inputs": {
"elapsed": "frame.elapsed",
"gui_commands": "soundboard.gui.commands"
"parameters": {
"inputs": {
"elapsed": "frame.elapsed",
"gui_commands": "soundboard.gui.commands"
}
}
},
{
"id": "validation_capture",
"plugin": "validation.tour.checkpoint",
"name": "Validation Capture",
"type": "validation.tour.checkpoint",
"typeVersion": 1,
"position": [780, 120],
"inputs": {
"checkpoint": "packages.soundboard"
"parameters": {
"inputs": {
"checkpoint": "packages.soundboard"
}
}
}
],
"connections": {
"begin_frame": {
"main": [
[
{ "node": "catalog_scan", "type": "main", "index": 0 }
"Begin Frame": {
"main": {
"0": [
{ "node": "Catalog Scan", "type": "main", "index": 0 }
]
]
}
},
"catalog_scan": {
"main": [
[
{ "node": "gui_render", "type": "main", "index": 0 }
"Catalog Scan": {
"main": {
"0": [
{ "node": "GUI Render", "type": "main", "index": 0 }
]
]
}
},
"gui_render": {
"main": [
[
{ "node": "audio_dispatch", "type": "main", "index": 0 },
{ "node": "render_frame", "type": "main", "index": 0 }
"GUI Render": {
"main": {
"0": [
{ "node": "Audio Dispatch", "type": "main", "index": 0 },
{ "node": "Render Frame", "type": "main", "index": 0 }
]
]
}
},
"audio_dispatch": {
"main": [
[
{ "node": "validation_capture", "type": "main", "index": 0 }
"Audio Dispatch": {
"main": {
"0": [
{ "node": "Validation Capture", "type": "main", "index": 0 }
]
]
}
},
"render_frame": {
"main": [
[
{ "node": "validation_capture", "type": "main", "index": 0 }
"Render Frame": {
"main": {
"0": [
{ "node": "Validation Capture", "type": "main", "index": 0 }
]
]
}
}
}
}

View File

@@ -11,6 +11,7 @@ import argparse
import json
import logging
import sys
from dataclasses import dataclass
from pathlib import Path
from typing import Callable, Iterable, Optional, Sequence
@@ -22,8 +23,6 @@ FIELD_TO_FOLDER = {
"shaders": "shaders",
"workflows": "workflows",
}
WORKFLOW_TOP_LEVEL_KEYS = {"template", "nodes", "steps", "connections"}
WORKFLOW_NODE_KEYS = {"id", "name", "plugin", "type", "position", "inputs", "outputs", "parameters"}
PACKAGE_ALLOWED_KEYS = {
"name",
"version",
@@ -38,60 +37,29 @@ PACKAGE_ALLOWED_KEYS = {
"notes",
}
class WorkflowReferenceProfile:
def __init__(self,
required_top_keys: set[str],
allowed_top_keys: set[str],
require_nodes: bool,
require_template: bool,
require_connections: bool,
require_id: bool,
require_plugin: bool,
require_position: bool):
self.required_top_keys = required_top_keys
self.allowed_top_keys = allowed_top_keys
self.require_nodes = require_nodes
self.require_template = require_template
self.require_connections = require_connections
self.require_id = require_id
self.require_plugin = require_plugin
self.require_position = require_position
def build_workflow_profile(reference: dict) -> WorkflowReferenceProfile:
required_top_keys = set(reference.keys())
allowed_top_keys = set(reference.keys())
require_nodes = "nodes" in reference
require_template = "template" in reference
require_connections = "connections" in reference
require_id = True
require_plugin = True
require_position = False
if require_nodes:
nodes = reference.get("nodes")
if isinstance(nodes, list) and nodes:
require_position = all(
isinstance(node, dict) and "position" in node
for node in nodes
)
return WorkflowReferenceProfile(
required_top_keys,
allowed_top_keys,
require_nodes,
require_template,
require_connections,
require_id,
require_plugin,
require_position,
)
logger = logging.getLogger("package_lint")
try:
from jsonschema import Draft7Validator
from jsonschema import Draft202012Validator
except ImportError:
Draft7Validator = None
Draft202012Validator = None
@dataclass(frozen=True)
class WorkflowSchemaDefinition:
raw_schema: dict
top_level_keys: set[str]
required_top_keys: set[str]
node_keys: set[str]
node_required: set[str]
tag_keys: set[str]
tag_required: set[str]
settings_keys: set[str]
credential_ref_keys: set[str]
credential_ref_required: set[str]
credential_binding_keys: set[str]
credential_binding_required: set[str]
connection_types: set[str]
def load_json(path: Path) -> dict:
@@ -100,6 +68,58 @@ def load_json(path: Path) -> dict:
return json.load(handle)
def load_schema_from_roadmap(roadmap_path: Path) -> dict:
if not roadmap_path.exists():
raise FileNotFoundError(f"ROADMAP not found at {roadmap_path}")
lines = roadmap_path.read_text(encoding="utf-8").splitlines()
in_schema_section = False
in_block = False
schema_lines: list[str] = []
for line in lines:
if not in_schema_section:
if line.strip().lower() == "n8n style schema:":
in_schema_section = True
continue
if not in_block:
if line.strip().startswith("```json"):
in_block = True
continue
if line.strip().startswith("```"):
break
schema_lines.append(line)
if not schema_lines:
raise ValueError("Failed to locate n8n schema block in ROADMAP.md")
return json.loads("\n".join(schema_lines))
def build_schema_definition(schema: dict) -> WorkflowSchemaDefinition:
properties = schema.get("properties") or {}
required = schema.get("required") or []
defs = schema.get("$defs") or {}
node_def = defs.get("node") or {}
tag_def = defs.get("tag") or {}
settings_def = defs.get("workflowSettings") or {}
credential_ref_def = defs.get("credentialRef") or {}
credential_binding_def = defs.get("credentialBinding") or {}
connections_def = defs.get("nodeConnectionsByType") or {}
connection_types = set((connections_def.get("properties") or {}).keys())
return WorkflowSchemaDefinition(
raw_schema=schema,
top_level_keys=set(properties.keys()),
required_top_keys=set(required),
node_keys=set((node_def.get("properties") or {}).keys()),
node_required=set(node_def.get("required") or []),
tag_keys=set((tag_def.get("properties") or {}).keys()),
tag_required=set(tag_def.get("required") or []),
settings_keys=set((settings_def.get("properties") or {}).keys()),
credential_ref_keys=set((credential_ref_def.get("properties") or {}).keys()),
credential_ref_required=set(credential_ref_def.get("required") or []),
credential_binding_keys=set((credential_binding_def.get("properties") or {}).keys()),
credential_binding_required=set(credential_binding_def.get("required") or []),
connection_types=connection_types or {"main", "error"},
)
def check_paths(
root: Path,
entries: Iterable[str],
@@ -122,27 +142,18 @@ def check_paths(
return missing
def validate_workflow_schema(workflow_path: Path, validator) -> list[str]:
"""Validate a workflow JSON file against the provided schema validator."""
try:
content = load_json(workflow_path)
except json.JSONDecodeError as exc:
return [f"invalid JSON: {exc}"]
issues: list[str] = []
for err in sorted(
validator.iter_errors(content),
key=lambda x: tuple(x.absolute_path),
):
pointer = "/".join(str(part) for part in err.absolute_path) or "<root>"
issues.append(f"schema violation at {pointer}: {err.message}")
return issues
def _is_non_empty_string(value: object) -> bool:
return isinstance(value, str) and value.strip() != ""
def _is_number(value: object) -> bool:
return isinstance(value, (int, float)) and not isinstance(value, bool)
def _is_int(value: object) -> bool:
return isinstance(value, int) and not isinstance(value, bool)
def _validate_string_map(value: object, context: str) -> list[str]:
if not isinstance(value, dict):
return [f"{context} must be an object"]
@@ -185,44 +196,195 @@ def _validate_parameters(value: object) -> list[str]:
if not _is_non_empty_string(key):
issues.append("parameters keys must be non-empty strings")
continue
if key in {"inputs", "outputs"}:
issues.extend(_validate_string_map(item, f"parameters.{key}"))
continue
issues.extend(_validate_parameter_value(item, f"parameters.{key}"))
return issues
def _validate_node_entry(node: dict,
index: int,
reference_profile: Optional[WorkflowReferenceProfile]) -> tuple[str, list[str]]:
def _validate_tags(tags: object, schema_def: WorkflowSchemaDefinition) -> list[str]:
if not isinstance(tags, list):
return ["tags must be an array"]
issues: list[str] = []
if not isinstance(node, dict):
return "", [f"nodes[{index}] must be an object"]
extra_keys = set(node.keys()) - WORKFLOW_NODE_KEYS
for index, tag in enumerate(tags):
if not isinstance(tag, dict):
issues.append(f"tags[{index}] must be an object")
continue
extra_keys = set(tag.keys()) - schema_def.tag_keys
if extra_keys:
issues.append(f"tags[{index}] has unsupported keys: {sorted(extra_keys)}")
missing_keys = schema_def.tag_required - set(tag.keys())
if missing_keys:
issues.append(f"tags[{index}] missing required keys: {sorted(missing_keys)}")
name = tag.get("name")
if not _is_non_empty_string(name):
issues.append(f"tags[{index}].name must be a non-empty string")
if "id" in tag and not isinstance(tag["id"], (str, int)):
issues.append(f"tags[{index}].id must be a string or integer")
return issues
def _validate_settings(settings: object, schema_def: WorkflowSchemaDefinition) -> list[str]:
if not isinstance(settings, dict):
return ["settings must be an object"]
issues: list[str] = []
extra_keys = set(settings.keys()) - schema_def.settings_keys
if extra_keys:
issues.append(
f"nodes[{index}] has unsupported keys: {sorted(extra_keys)}"
)
node_id = node.get("id") if reference_profile and reference_profile.require_id else node.get("id") or node.get("name")
if not _is_non_empty_string(node_id):
issues.append(f"nodes[{index}] requires non-empty id")
plugin = node.get("plugin") if reference_profile and reference_profile.require_plugin else node.get("plugin") or node.get("type")
if not _is_non_empty_string(plugin):
issues.append(f"nodes[{index}] requires non-empty plugin")
if "inputs" in node:
issues.extend(_validate_string_map(node["inputs"], f"nodes[{index}].inputs"))
if "outputs" in node:
issues.extend(_validate_string_map(node["outputs"], f"nodes[{index}].outputs"))
if "parameters" in node:
issues.extend(_validate_parameters(node["parameters"]))
if reference_profile and reference_profile.require_position and "position" not in node:
issues.append(f"nodes[{index}] requires position")
if "position" in node:
position = node["position"]
if (not isinstance(position, list) or len(position) != 2 or
not all(isinstance(item, (int, float)) for item in position)):
issues.append(f"nodes[{index}].position must be [x, y] numbers")
return (node_id if isinstance(node_id, str) else ""), issues
issues.append(f"settings has unsupported keys: {sorted(extra_keys)}")
if "timezone" in settings and not _is_non_empty_string(settings["timezone"]):
issues.append("settings.timezone must be a non-empty string")
if "executionTimeout" in settings:
value = settings["executionTimeout"]
if not _is_int(value) or value < 0:
issues.append("settings.executionTimeout must be an integer >= 0")
for key in ("saveExecutionProgress", "saveManualExecutions"):
if key in settings and not isinstance(settings[key], bool):
issues.append(f"settings.{key} must be a boolean")
for key in ("saveDataErrorExecution", "saveDataSuccessExecution", "saveDataManualExecution"):
if key in settings:
value = settings[key]
if not _is_non_empty_string(value):
issues.append(f"settings.{key} must be a non-empty string")
elif value not in {"all", "none"}:
issues.append(f"settings.{key} must be 'all' or 'none'")
if "errorWorkflowId" in settings and not isinstance(settings["errorWorkflowId"], (str, int)):
issues.append("settings.errorWorkflowId must be a string or integer")
if "callerPolicy" in settings and not _is_non_empty_string(settings["callerPolicy"]):
issues.append("settings.callerPolicy must be a non-empty string")
return issues
def _validate_connections(connections: object, node_ids: set[str]) -> list[str]:
def _validate_credential_ref(value: object, context: str, schema_def: WorkflowSchemaDefinition) -> list[str]:
if not isinstance(value, dict):
return [f"{context} must be an object"]
issues: list[str] = []
extra_keys = set(value.keys()) - schema_def.credential_ref_keys
if extra_keys:
issues.append(f"{context} has unsupported keys: {sorted(extra_keys)}")
missing = schema_def.credential_ref_required - set(value.keys())
if missing:
issues.append(f"{context} missing required keys: {sorted(missing)}")
if "id" in value and not isinstance(value["id"], (str, int)):
issues.append(f"{context}.id must be a string or integer")
if "name" in value and not _is_non_empty_string(value["name"]):
issues.append(f"{context}.name must be a non-empty string")
return issues
def _validate_credential_binding(value: object, index: int, schema_def: WorkflowSchemaDefinition) -> list[str]:
context = f"credentials[{index}]"
if not isinstance(value, dict):
return [f"{context} must be an object"]
issues: list[str] = []
extra_keys = set(value.keys()) - schema_def.credential_binding_keys
if extra_keys:
issues.append(f"{context} has unsupported keys: {sorted(extra_keys)}")
missing = schema_def.credential_binding_required - set(value.keys())
if missing:
issues.append(f"{context} missing required keys: {sorted(missing)}")
if "nodeId" in value and not _is_non_empty_string(value["nodeId"]):
issues.append(f"{context}.nodeId must be a non-empty string")
if "credentialType" in value and not _is_non_empty_string(value["credentialType"]):
issues.append(f"{context}.credentialType must be a non-empty string")
if "credentialId" in value and not isinstance(value["credentialId"], (str, int)):
issues.append(f"{context}.credentialId must be a string or integer")
return issues
def _validate_nodes(nodes: object, schema_def: WorkflowSchemaDefinition) -> tuple[list[str], list[str], list[str]]:
if not isinstance(nodes, list):
return ["nodes must be an array"], [], []
if not nodes:
return ["nodes must contain at least one node"], [], []
issues: list[str] = []
node_names: list[str] = []
node_ids: list[str] = []
seen_names: set[str] = set()
seen_ids: set[str] = set()
for index, node in enumerate(nodes):
if not isinstance(node, dict):
issues.append(f"nodes[{index}] must be an object")
continue
extra_keys = set(node.keys()) - schema_def.node_keys
if extra_keys:
issues.append(f"nodes[{index}] has unsupported keys: {sorted(extra_keys)}")
missing_keys = schema_def.node_required - set(node.keys())
if missing_keys:
issues.append(f"nodes[{index}] missing required keys: {sorted(missing_keys)}")
node_id = node.get("id")
if not _is_non_empty_string(node_id):
issues.append(f"nodes[{index}].id must be a non-empty string")
else:
if node_id in seen_ids:
issues.append(f"duplicate node id '{node_id}'")
seen_ids.add(node_id)
node_ids.append(node_id)
node_name = node.get("name")
if not _is_non_empty_string(node_name):
issues.append(f"nodes[{index}].name must be a non-empty string")
else:
if node_name in seen_names:
issues.append(f"duplicate node name '{node_name}'")
seen_names.add(node_name)
node_names.append(node_name)
node_type = node.get("type")
if not _is_non_empty_string(node_type):
issues.append(f"nodes[{index}].type must be a non-empty string")
version = node.get("typeVersion")
if version is not None:
if not _is_number(version) or version < 1:
issues.append(f"nodes[{index}].typeVersion must be a number >= 1")
position = node.get("position")
if position is not None:
if (not isinstance(position, list) or len(position) != 2 or
not all(_is_number(item) for item in position)):
issues.append(f"nodes[{index}].position must be [x, y] numbers")
for key in ("disabled", "notesInFlow", "retryOnFail", "continueOnFail",
"alwaysOutputData", "executeOnce"):
if key in node and not isinstance(node[key], bool):
issues.append(f"nodes[{index}].{key} must be a boolean")
if "notes" in node and not isinstance(node["notes"], str):
issues.append(f"nodes[{index}].notes must be a string")
if "maxTries" in node:
value = node["maxTries"]
if not _is_int(value) or value < 1:
issues.append(f"nodes[{index}].maxTries must be an integer >= 1")
if "waitBetweenTries" in node:
value = node["waitBetweenTries"]
if not _is_int(value) or value < 0:
issues.append(f"nodes[{index}].waitBetweenTries must be an integer >= 0")
if "parameters" in node:
issues.extend(_validate_parameters(node["parameters"]))
if "credentials" in node:
credentials = node["credentials"]
if not isinstance(credentials, dict):
issues.append(f"nodes[{index}].credentials must be an object")
else:
for cred_key, cred_value in credentials.items():
if not _is_non_empty_string(cred_key):
issues.append(f"nodes[{index}].credentials keys must be non-empty strings")
continue
issues.extend(
_validate_credential_ref(
cred_value,
f"nodes[{index}].credentials.{cred_key}",
schema_def,
)
)
if "webhookId" in node and not _is_non_empty_string(node["webhookId"]):
issues.append(f"nodes[{index}].webhookId must be a non-empty string")
if "onError" in node:
value = node["onError"]
allowed = {"stopWorkflow", "continueRegularOutput", "continueErrorOutput"}
if not _is_non_empty_string(value) or value not in allowed:
issues.append(f"nodes[{index}].onError must be one of {sorted(allowed)}")
return issues, node_names, node_ids
def _validate_connections(connections: object,
node_names: set[str],
schema_def: WorkflowSchemaDefinition) -> list[str]:
if not isinstance(connections, dict):
return ["connections must be an object"]
issues: list[str] = []
@@ -230,124 +392,129 @@ def _validate_connections(connections: object, node_ids: set[str]) -> list[str]:
if not _is_non_empty_string(from_node):
issues.append("connections keys must be non-empty strings")
continue
if from_node not in node_ids:
if from_node not in node_names:
issues.append(f"connections references unknown node '{from_node}'")
if not isinstance(link, dict):
issues.append(f"connections.{from_node} must be an object")
continue
extra_keys = set(link.keys()) - {"main"}
extra_keys = set(link.keys()) - schema_def.connection_types
if extra_keys:
issues.append(f"connections.{from_node} has unsupported keys: {sorted(extra_keys)}")
if "main" not in link:
continue
main_value = link["main"]
if not isinstance(main_value, list):
issues.append(f"connections.{from_node}.main must be an array")
continue
for branch_index, branch in enumerate(main_value):
if not isinstance(branch, list):
issues.append(f"connections.{from_node}.main[{branch_index}] must be an array")
if not any(key in link for key in schema_def.connection_types):
issues.append(f"connections.{from_node} must define at least one connection type")
for conn_type in schema_def.connection_types:
if conn_type not in link:
continue
for entry_index, entry in enumerate(branch):
if not isinstance(entry, dict):
index_map = link[conn_type]
if not isinstance(index_map, dict):
issues.append(f"connections.{from_node}.{conn_type} must be an object")
continue
for index_key, targets in index_map.items():
if not _is_non_empty_string(index_key) or not index_key.isdigit():
issues.append(
f"connections.{from_node}.main[{branch_index}][{entry_index}] must be an object"
f"connections.{from_node}.{conn_type} index keys must be numeric strings"
)
continue
node_name = entry.get("node")
if not _is_non_empty_string(node_name):
if not isinstance(targets, list):
issues.append(
f"connections.{from_node}.main[{branch_index}][{entry_index}] missing node"
f"connections.{from_node}.{conn_type}.{index_key} must be an array"
)
continue
if node_name not in node_ids:
issues.append(
f"connections.{from_node}.main[{branch_index}][{entry_index}] "
f"references unknown node '{node_name}'"
)
if "type" in entry and not _is_non_empty_string(entry["type"]):
issues.append(
f"connections.{from_node}.main[{branch_index}][{entry_index}].type "
"must be a non-empty string"
)
if "index" in entry and not isinstance(entry["index"], int):
issues.append(
f"connections.{from_node}.main[{branch_index}][{entry_index}].index "
"must be an integer"
)
for target_index, target in enumerate(targets):
context = f"connections.{from_node}.{conn_type}.{index_key}[{target_index}]"
if not isinstance(target, dict):
issues.append(f"{context} must be an object")
continue
extra_keys = set(target.keys()) - {"node", "type", "index"}
if extra_keys:
issues.append(f"{context} has unsupported keys: {sorted(extra_keys)}")
node_name = target.get("node")
if not _is_non_empty_string(node_name):
issues.append(f"{context}.node must be a non-empty string")
elif node_name not in node_names:
issues.append(f"{context} references unknown node '{node_name}'")
if "type" in target and not _is_non_empty_string(target["type"]):
issues.append(f"{context}.type must be a non-empty string")
if "index" in target:
index_value = target["index"]
if not _is_int(index_value) or index_value < 0:
issues.append(f"{context}.index must be an integer >= 0")
return issues
def validate_workflow_structure(workflow_path: Path,
content: dict,
reference_profile: Optional[WorkflowReferenceProfile]) -> list[str]:
schema_def: WorkflowSchemaDefinition) -> list[str]:
issues: list[str] = []
logger.debug("Validating workflow structure: %s", workflow_path)
allowed_top_keys = WORKFLOW_TOP_LEVEL_KEYS
required_top_keys = set()
if reference_profile:
allowed_top_keys = reference_profile.allowed_top_keys
required_top_keys = reference_profile.required_top_keys
extra_keys = set(content.keys()) - allowed_top_keys
extra_keys = set(content.keys()) - schema_def.top_level_keys
if extra_keys:
issues.append(f"unsupported workflow keys: {sorted(extra_keys)}")
missing_keys = required_top_keys - set(content.keys())
missing_keys = schema_def.required_top_keys - set(content.keys())
if missing_keys:
issues.append(f"workflow missing required keys: {sorted(missing_keys)}")
has_nodes = "nodes" in content
has_steps = "steps" in content
if has_nodes and has_steps:
issues.append("workflow cannot define both 'nodes' and 'steps'")
if reference_profile and reference_profile.require_nodes and has_steps:
issues.append("workflow must not define 'steps' when using reference schema")
if not has_nodes and not has_steps:
issues.append("workflow must define 'nodes' or 'steps'")
return issues
if reference_profile and reference_profile.require_template and "template" not in content:
issues.append("workflow missing required template")
if "template" in content and not _is_non_empty_string(content["template"]):
issues.append("workflow template must be a non-empty string")
if reference_profile and reference_profile.require_connections and "connections" not in content:
issues.append("workflow missing required connections")
if "name" in content and not _is_non_empty_string(content["name"]):
issues.append("workflow name must be a non-empty string")
if "id" in content and not isinstance(content["id"], (str, int)):
issues.append("workflow id must be a string or integer")
if "active" in content and not isinstance(content["active"], bool):
issues.append("workflow active must be a boolean")
for key in ("versionId", "createdAt", "updatedAt"):
if key in content and not isinstance(content[key], str):
issues.append(f"workflow {key} must be a string")
if "tags" in content:
issues.extend(_validate_tags(content["tags"], schema_def))
if "meta" in content and not isinstance(content["meta"], dict):
issues.append("workflow meta must be an object")
if "settings" in content:
issues.extend(_validate_settings(content["settings"], schema_def))
if "pinData" in content:
pin_data = content["pinData"]
if not isinstance(pin_data, dict):
issues.append("workflow pinData must be an object")
else:
for pin_key, pin_value in pin_data.items():
if not _is_non_empty_string(pin_key):
issues.append("workflow pinData keys must be non-empty strings")
continue
if not isinstance(pin_value, list):
issues.append(f"workflow pinData.{pin_key} must be an array")
continue
for entry_index, entry in enumerate(pin_value):
if not isinstance(entry, dict):
issues.append(f"workflow pinData.{pin_key}[{entry_index}] must be an object")
if "staticData" in content and not isinstance(content["staticData"], dict):
issues.append("workflow staticData must be an object")
node_issues: list[str] = []
node_names: list[str] = []
node_ids: list[str] = []
if has_nodes:
nodes = content.get("nodes")
if not isinstance(nodes, list) or not nodes:
issues.append("workflow nodes must be a non-empty array")
else:
seen = set()
for index, node in enumerate(nodes):
node_id, node_issues = _validate_node_entry(node, index, reference_profile)
issues.extend(node_issues)
if node_id:
if node_id in seen:
issues.append(f"duplicate node id '{node_id}'")
else:
seen.add(node_id)
node_ids.append(node_id)
if has_steps:
steps = content.get("steps")
if not isinstance(steps, list) or not steps:
issues.append("workflow steps must be a non-empty array")
else:
seen = set()
for index, step in enumerate(steps):
node_id, node_issues = _validate_node_entry(step, index, reference_profile)
issues.extend(node_issues)
if node_id:
if node_id in seen:
issues.append(f"duplicate step id '{node_id}'")
else:
seen.add(node_id)
node_ids.append(node_id)
if "nodes" in content:
node_issues, node_names, node_ids = _validate_nodes(content["nodes"], schema_def)
issues.extend(node_issues)
if "connections" in content:
issues.extend(_validate_connections(content["connections"], set(node_ids)))
issues.extend(_validate_connections(content["connections"], set(node_names), schema_def))
if "credentials" in content:
credentials = content["credentials"]
if not isinstance(credentials, list):
issues.append("workflow credentials must be an array")
else:
for index, entry in enumerate(credentials):
issues.extend(_validate_credential_binding(entry, index, schema_def))
if node_ids and "credentials" in content and isinstance(content.get("credentials"), list):
known_ids = set(node_ids)
for index, entry in enumerate(content.get("credentials", [])):
if isinstance(entry, dict) and "nodeId" in entry:
node_id = entry["nodeId"]
if isinstance(node_id, str) and node_id not in known_ids:
issues.append(
f"credentials[{index}].nodeId references unknown node id '{node_id}'"
)
return issues
def validate_workflow(workflow_path: Path,
validator: Optional["Draft7Validator"],
reference_profile: Optional[WorkflowReferenceProfile]) -> list[str]:
validator: Optional["Draft202012Validator"],
schema_def: WorkflowSchemaDefinition) -> list[str]:
try:
content = load_json(workflow_path)
except json.JSONDecodeError as exc:
@@ -360,7 +527,7 @@ def validate_workflow(workflow_path: Path,
):
pointer = "/".join(str(part) for part in err.absolute_path) or "<root>"
issues.append(f"schema violation at {pointer}: {err.message}")
issues.extend(validate_workflow_structure(workflow_path, content, reference_profile))
issues.extend(validate_workflow_structure(workflow_path, content, schema_def))
return issues
@@ -369,8 +536,8 @@ def validate_package(
pkg_data: dict,
registry_names: Sequence[str],
available_dirs: Sequence[str],
workflow_schema_validator: Optional["Draft7Validator"] = None,
workflow_reference_profile: Optional[WorkflowReferenceProfile] = None,
workflow_schema_validator: Optional["Draft202012Validator"],
workflow_schema_def: WorkflowSchemaDefinition,
) -> tuple[list[str], list[str]]:
errors: list[str] = []
warnings: list[str] = []
@@ -426,7 +593,7 @@ def validate_package(
def on_exist(candidate: Path, rel: str) -> None:
schema_issues = validate_workflow(candidate,
workflow_schema_validator,
workflow_reference_profile)
workflow_schema_def)
for issue in schema_issues:
errors.append(f"workflow `{rel}`: {issue}")
def validate_entry(entry: str) -> None:
@@ -488,15 +655,15 @@ def main() -> int:
help="Root folder containing package directories",
)
parser.add_argument(
"--workflow-schema",
"--roadmap",
type=Path,
help="Optional workflow JSON schema (default: config/schema/workflow_v1.schema.json when available)",
default=Path("ROADMAP.md"),
help="Path to ROADMAP containing the n8n workflow schema",
)
parser.add_argument(
"--workflow-reference",
"--workflow-schema",
type=Path,
help="Reference n8n-style workflow JSON used to validate workflow structure "
"(default: packages/seed/workflows/demo_gameplay.json when available)",
help="Optional workflow JSON schema override",
)
parser.add_argument(
"--verbose",
@@ -515,46 +682,39 @@ def main() -> int:
return 2
schema_candidate = args.workflow_schema
default_schema = Path("config/schema/workflow_v1.schema.json")
if schema_candidate is None and default_schema.exists():
schema_candidate = default_schema
if schema_candidate is None:
schema_candidate = args.roadmap
workflow_validator: Optional["Draft7Validator"] = None
workflow_schema: Optional[dict] = None
if schema_candidate:
if not schema_candidate.exists():
logger.error("specified workflow schema %s not found", schema_candidate)
logger.error("specified workflow schema source %s not found", schema_candidate)
return 5
try:
workflow_schema = load_json(schema_candidate)
except json.JSONDecodeError as exc:
logger.error("invalid JSON schema %s: %s", schema_candidate, exc)
workflow_schema = (
load_json(schema_candidate)
if schema_candidate.suffix == ".json"
else load_schema_from_roadmap(schema_candidate)
)
except (json.JSONDecodeError, ValueError, FileNotFoundError) as exc:
logger.error("invalid workflow schema source %s: %s", schema_candidate, exc)
return 6
if Draft7Validator is None:
logger.warning("jsonschema dependency not installed; skipping workflow schema validation")
else:
try:
workflow_validator = Draft7Validator(workflow_schema)
except Exception as exc:
logger.error("failed to compile workflow schema %s: %s", schema_candidate, exc)
return 7
reference_path = args.workflow_reference
default_reference = Path("packages/seed/workflows/demo_gameplay.json")
if reference_path is None and default_reference.exists():
reference_path = default_reference
if not workflow_schema:
logger.error("workflow schema could not be loaded")
return 7
workflow_reference_profile: Optional[WorkflowReferenceProfile] = None
if reference_path:
if not reference_path.exists():
logger.error("specified workflow reference %s not found", reference_path)
return 8
workflow_schema_def = build_schema_definition(workflow_schema)
workflow_validator: Optional["Draft202012Validator"] = None
if Draft202012Validator is None:
logger.warning("jsonschema dependency not installed; skipping JSON Schema validation")
else:
try:
reference_workflow = load_json(reference_path)
except json.JSONDecodeError as exc:
logger.error("invalid workflow reference %s: %s", reference_path, exc)
return 9
workflow_reference_profile = build_workflow_profile(reference_workflow)
logger.info("workflow reference loaded: %s", reference_path)
workflow_validator = Draft202012Validator(workflow_schema)
except Exception as exc:
logger.error("failed to compile workflow schema: %s", exc)
return 8
logger.info("workflow schema loaded from %s", schema_candidate)
package_dirs = [
child
@@ -594,7 +754,7 @@ def main() -> int:
registry_names,
available_dirs,
workflow_validator,
workflow_reference_profile,
workflow_schema_def,
)
for err in errors:
logger.error("%s: %s", pkg_json_file, err)

View File

@@ -1,12 +1,18 @@
#include <gtest/gtest.h>
#include "services/impl/config/config_compiler_service.hpp"
#include "services/impl/config/json_config_service.hpp"
#include "services/impl/diagnostics/logger_service.hpp"
#include "services/impl/render/render_coordinator_service.hpp"
#include "services/interfaces/i_config_compiler_service.hpp"
#include "services/impl/shader/shader_system_registry.hpp"
#include "services/interfaces/i_graphics_service.hpp"
#include "services/interfaces/i_shader_system_registry.hpp"
#include <algorithm>
#include <chrono>
#include <filesystem>
#include <fstream>
#include <memory>
#include <stdexcept>
#include <string>
#include <unordered_map>
#include <vector>
@@ -60,40 +66,58 @@ public:
void* GetGraphicsQueue() const override { return nullptr; }
};
class StubShaderSystemRegistry : public sdl3cpp::services::IShaderSystemRegistry {
class ScopedTempDir {
public:
std::unordered_map<std::string, sdl3cpp::services::ShaderPaths> BuildShaderMap() override {
return {};
}
sdl3cpp::services::ShaderReflection GetReflection(const std::string&) const override {
return {};
}
std::vector<sdl3cpp::services::ShaderPaths::TextureBinding> GetDefaultTextures(
const std::string&) const override {
return {};
}
std::string GetActiveSystemId() const override {
return "materialx";
}
};
class StubConfigCompilerService final : public sdl3cpp::services::IConfigCompilerService {
public:
explicit StubConfigCompilerService(const sdl3cpp::services::ConfigCompilerResult& result)
: result_(result) {}
sdl3cpp::services::ConfigCompilerResult Compile(const std::string&) override {
return result_;
ScopedTempDir() {
auto base = std::filesystem::temp_directory_path();
const auto suffix = std::to_string(
std::chrono::steady_clock::now().time_since_epoch().count());
path_ = base / ("sdl3cpp_render_coordinator_test_" + suffix);
std::filesystem::create_directories(path_);
}
const sdl3cpp::services::ConfigCompilerResult& GetLastResult() const override {
return result_;
~ScopedTempDir() {
std::error_code ec;
std::filesystem::remove_all(path_, ec);
}
const std::filesystem::path& Path() const {
return path_;
}
private:
sdl3cpp::services::ConfigCompilerResult result_;
std::filesystem::path path_;
};
std::filesystem::path GetRepoRoot() {
return std::filesystem::path(__FILE__).parent_path().parent_path();
}
void WriteFile(const std::filesystem::path& path, const std::string& contents) {
std::filesystem::create_directories(path.parent_path());
std::ofstream output(path);
if (!output.is_open()) {
throw std::runtime_error("Failed to open file for write: " + path.string());
}
output << contents;
}
void CopyBootTemplate(const std::filesystem::path& targetDir) {
const auto repoRoot = GetRepoRoot();
const auto source = repoRoot / "packages" / "bootstrap" / "workflows" / "boot_default.json";
const auto destination = targetDir / "workflows" / "templates" / "boot_default.json";
std::filesystem::create_directories(destination.parent_path());
std::ifstream input(source);
if (!input.is_open()) {
throw std::runtime_error("Missing boot workflow template: " + source.string());
}
std::ofstream output(destination);
if (!output.is_open()) {
throw std::runtime_error("Failed to write boot workflow template: " + destination.string());
}
output << input.rdbuf();
}
std::string JoinCalls(const std::vector<std::string>& calls) {
std::string joined;
for (size_t index = 0; index < calls.size(); ++index) {