Add workflow JSON validation tool with tests and CI integration

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot]
2026-01-10 23:28:52 +00:00
parent a33a300c5e
commit 759c9efcbf
6 changed files with 472 additions and 1 deletions

View File

@@ -1,6 +1,18 @@
{
"name": "Blank Canvas",
"active": false,
"nodes": [],
"nodes": [
{
"id": "start",
"name": "Start",
"type": "core.start",
"typeVersion": 1,
"position": [
0,
0
],
"parameters": {}
}
],
"connections": {}
}

View File

@@ -0,0 +1,113 @@
#!/usr/bin/env python3
"""Tool to validate all workflow JSON files against the N8N schema."""
import json
import sys
from pathlib import Path
from typing import List, Tuple
# Add the backend directory to the path to import the schema module
backend_dir = Path(__file__).parent.parent.parent
sys.path.insert(0, str(backend_dir))
from autometabuilder.workflow.n8n_schema import N8NWorkflow
def find_workflow_files(base_path: Path) -> List[Path]:
"""Find all workflow.json files in the packages directory."""
packages_dir = base_path / "packages"
if not packages_dir.exists():
return []
workflow_files = []
for workflow_file in packages_dir.rglob("workflow.json"):
workflow_files.append(workflow_file)
return sorted(workflow_files)
def validate_workflow_file(workflow_path: Path) -> Tuple[bool, str]:
"""
Validate a single workflow JSON file.
Returns:
Tuple of (is_valid, error_message)
"""
try:
with open(workflow_path, 'r', encoding='utf-8') as f:
workflow_data = json.load(f)
except json.JSONDecodeError as e:
return False, f"JSON parsing error: {e}"
except Exception as e:
return False, f"Error reading file: {e}"
# Basic structure checks
if not isinstance(workflow_data, dict):
return False, "Workflow data must be an object"
# Check required fields
required_fields = ["name", "nodes", "connections"]
missing_fields = [field for field in required_fields if field not in workflow_data]
if missing_fields:
return False, f"Missing required fields: {', '.join(missing_fields)}"
# Check name
if not isinstance(workflow_data["name"], str) or not workflow_data["name"]:
return False, "Field 'name' must be a non-empty string"
# Check nodes
if not isinstance(workflow_data["nodes"], list):
return False, "Field 'nodes' must be an array"
if len(workflow_data["nodes"]) < 1:
return False, "Field 'nodes' must contain at least 1 node (use a start node for blank workflows)"
# Check connections
if not isinstance(workflow_data["connections"], dict):
return False, "Field 'connections' must be an object"
# Full validation
is_valid = N8NWorkflow.validate(workflow_data)
if not is_valid:
return False, "Schema validation failed (check node structure, position, types, etc.)"
return True, ""
def main():
"""Main function to validate all workflow files."""
# Find the backend directory
script_dir = Path(__file__).parent.parent.parent / "autometabuilder"
# Find all workflow files
workflow_files = find_workflow_files(script_dir)
if not workflow_files:
print("No workflow.json files found in packages directory.")
return 1
print(f"Found {len(workflow_files)} workflow file(s) to validate\n")
errors = []
for workflow_path in workflow_files:
relative_path = workflow_path.relative_to(script_dir)
is_valid, error_msg = validate_workflow_file(workflow_path)
if is_valid:
print(f"{relative_path}")
else:
print(f"{relative_path}: {error_msg}")
errors.append((relative_path, error_msg))
print()
if errors:
print(f"Validation failed for {len(errors)} file(s):")
for path, error in errors:
print(f" - {path}: {error}")
return 1
else:
print(f"All {len(workflow_files)} workflow file(s) are valid!")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,165 @@
"""Tests for workflow JSON validation tool."""
import json
from pathlib import Path
import pytest
from autometabuilder.tools.validate_workflows import (
find_workflow_files,
validate_workflow_file,
)
def test_find_workflow_files():
"""Test that workflow files are found."""
backend_dir = Path(__file__).parent.parent / "autometabuilder"
workflow_files = find_workflow_files(backend_dir)
assert len(workflow_files) > 0
assert all(f.name == "workflow.json" for f in workflow_files)
assert all(f.exists() for f in workflow_files)
def test_validate_all_workflow_files():
"""Test that all workflow files in packages directory are valid."""
backend_dir = Path(__file__).parent.parent / "autometabuilder"
workflow_files = find_workflow_files(backend_dir)
errors = []
for workflow_path in workflow_files:
relative_path = workflow_path.relative_to(backend_dir)
is_valid, error_msg = validate_workflow_file(workflow_path)
if not is_valid:
errors.append((relative_path, error_msg))
# Report all errors for debugging
if errors:
error_report = "\n".join(f" - {path}: {error}" for path, error in errors)
pytest.fail(f"Workflow validation failed for {len(errors)} file(s):\n{error_report}")
def test_validate_minimal_valid_workflow(tmp_path):
"""Test validation of a minimal valid workflow."""
workflow_data = {
"name": "Test Workflow",
"nodes": [
{
"id": "node-1",
"name": "Test Node",
"type": "core.test",
"typeVersion": 1,
"position": [0, 0]
}
],
"connections": {}
}
workflow_file = tmp_path / "workflow.json"
workflow_file.write_text(json.dumps(workflow_data))
is_valid, error_msg = validate_workflow_file(workflow_file)
assert is_valid, f"Validation failed: {error_msg}"
def test_validate_workflow_with_missing_name(tmp_path):
"""Test validation of workflow missing required 'name' field."""
workflow_data = {
"nodes": [
{
"id": "node-1",
"name": "Test Node",
"type": "core.test",
"typeVersion": 1,
"position": [0, 0]
}
],
"connections": {}
}
workflow_file = tmp_path / "workflow.json"
workflow_file.write_text(json.dumps(workflow_data))
is_valid, error_msg = validate_workflow_file(workflow_file)
assert not is_valid
assert "name" in error_msg.lower()
def test_validate_workflow_with_empty_nodes(tmp_path):
"""Test validation of workflow with empty nodes array."""
workflow_data = {
"name": "Empty Workflow",
"nodes": [],
"connections": {}
}
workflow_file = tmp_path / "workflow.json"
workflow_file.write_text(json.dumps(workflow_data))
is_valid, error_msg = validate_workflow_file(workflow_file)
assert not is_valid
assert "nodes" in error_msg.lower()
assert "at least 1" in error_msg.lower()
def test_validate_workflow_with_invalid_json(tmp_path):
"""Test validation of file with invalid JSON."""
workflow_file = tmp_path / "workflow.json"
workflow_file.write_text("{ invalid json }")
is_valid, error_msg = validate_workflow_file(workflow_file)
assert not is_valid
assert "json" in error_msg.lower()
def test_validate_workflow_with_invalid_node(tmp_path):
"""Test validation of workflow with invalid node structure."""
workflow_data = {
"name": "Test Workflow",
"nodes": [
{
"id": "node-1",
# Missing required fields: name, type, typeVersion, position
}
],
"connections": {}
}
workflow_file = tmp_path / "workflow.json"
workflow_file.write_text(json.dumps(workflow_data))
is_valid, error_msg = validate_workflow_file(workflow_file)
assert not is_valid
def test_validate_workflow_with_triggers(tmp_path):
"""Test validation of workflow with triggers array."""
workflow_data = {
"name": "Test Workflow with Triggers",
"nodes": [
{
"id": "webhook-1",
"name": "Webhook",
"type": "n8n-nodes-base.webhook",
"typeVersion": 1,
"position": [0, 0]
}
],
"connections": {},
"triggers": [
{
"nodeId": "webhook-1",
"kind": "webhook",
"enabled": True,
"meta": {
"path": "/api/test"
}
}
]
}
workflow_file = tmp_path / "workflow.json"
workflow_file.write_text(json.dumps(workflow_data))
is_valid, error_msg = validate_workflow_file(workflow_file)
assert is_valid, f"Validation failed: {error_msg}"