Delete backend/autometabuilder/plugins and convert all backend functions to workflow plugins

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot]
2026-01-10 13:23:00 +00:00
parent 9e43fdd57c
commit eff020adca
16 changed files with 389 additions and 167 deletions

View File

@@ -1,19 +1,10 @@
"""Application runner."""
import logging
import os
from . import load_messages
from .cli_args import parse_args
from .env_loader import load_env
from .github_service import create_github_integration
from .logging_config import configure_logging
from .metadata_loader import load_metadata
from .openai_factory import create_openai_client
from .plugin_loader import load_plugins
from .prompt_loader import load_prompt_yaml
from .tool_map_builder import build_tool_map
from .tool_policy_loader import load_tool_policies
from .tool_registry_loader import load_tool_registry
from .tools_loader import load_tools
from .web.server import start_web_ui
from .workflow_config_loader import load_workflow_config
from .workflow_context_builder import build_workflow_context
@@ -32,33 +23,21 @@ def run_app() -> None:
start_web_ui()
return
msgs = load_messages()
token = os.environ.get("GITHUB_TOKEN")
if not token:
logger.error(msgs["error_github_token_missing"])
logger.error("GITHUB_TOKEN environment variable is required")
return
gh = create_github_integration(token, msgs)
client = create_openai_client(token)
prompt = load_prompt_yaml()
metadata = load_metadata()
tools = load_tools(metadata)
tool_map = build_tool_map(gh, load_tool_registry())
load_plugins(tool_map, tools)
# Build minimal workflow context - workflow plugins handle initialization
context_parts = {
"args": args,
"gh": gh,
"msgs": msgs,
"client": client,
"tools": tools,
"tool_map": tool_map,
"prompt": prompt,
"tool_policies": load_tool_policies()
"github_token": token
}
workflow_context = build_workflow_context(context_parts)
logger.debug("Workflow context ready with %s tools", len(tool_map))
engine = build_workflow_engine(load_workflow_config(metadata), workflow_context, logger)
metadata = load_metadata()
workflow_config = load_workflow_config(metadata)
logger.info("Starting workflow: %s", workflow_config.get("name", "Unnamed"))
engine = build_workflow_engine(workflow_config, workflow_context, logger)
engine.execute()

View File

@@ -1,6 +1,6 @@
{
"tools_path": "tools",
"workflow_path": "workflow.json",
"workflow_path": "packages/default_app_workflow/workflow.json",
"workflow_packages_path": "packages",
"messages": {
"en": "messages/en",

View File

@@ -0,0 +1,7 @@
{
"name": "default_app_workflow",
"version": "1.0.0",
"description": "Default application workflow with backend bootstrap and iterative AI loop",
"keywords": ["backend", "bootstrap", "ai", "iterative", "default"],
"license": "MIT"
}

View File

@@ -0,0 +1,335 @@
{
"name": "Default Application Workflow",
"active": false,
"nodes": [
{
"id": "load_messages",
"name": "Load Messages",
"type": "backend.load_messages",
"typeVersion": 1,
"position": [0, 0],
"parameters": {}
},
{
"id": "load_metadata",
"name": "Load Metadata",
"type": "backend.load_metadata",
"typeVersion": 1,
"position": [300, 0],
"parameters": {}
},
{
"id": "load_prompt",
"name": "Load Prompt",
"type": "backend.load_prompt",
"typeVersion": 1,
"position": [600, 0],
"parameters": {}
},
{
"id": "create_github",
"name": "Create GitHub Client",
"type": "backend.create_github",
"typeVersion": 1,
"position": [900, 0],
"parameters": {}
},
{
"id": "create_openai",
"name": "Create OpenAI Client",
"type": "backend.create_openai",
"typeVersion": 1,
"position": [1200, 0],
"parameters": {}
},
{
"id": "load_tools",
"name": "Load Tools",
"type": "backend.load_tools",
"typeVersion": 1,
"position": [1500, 0],
"parameters": {}
},
{
"id": "build_tool_map",
"name": "Build Tool Map",
"type": "backend.build_tool_map",
"typeVersion": 1,
"position": [1800, 0],
"parameters": {}
},
{
"id": "load_plugins",
"name": "Load Plugins",
"type": "backend.load_plugins",
"typeVersion": 1,
"position": [2100, 0],
"parameters": {}
},
{
"id": "load_tool_policies",
"name": "Load Tool Policies",
"type": "backend.load_tool_policies",
"typeVersion": 1,
"position": [2400, 0],
"parameters": {}
},
{
"id": "load_context",
"name": "Load Context",
"type": "core.load_context",
"typeVersion": 1,
"position": [0, 300],
"parameters": {}
},
{
"id": "seed_messages",
"name": "Seed Messages",
"type": "core.seed_messages",
"typeVersion": 1,
"position": [300, 300],
"parameters": {}
},
{
"id": "append_context",
"name": "Append Context",
"type": "core.append_context_message",
"typeVersion": 1,
"position": [600, 300],
"parameters": {}
},
{
"id": "append_user_instruction",
"name": "Append User Instruction",
"type": "core.append_user_instruction",
"typeVersion": 1,
"position": [900, 300],
"parameters": {}
},
{
"id": "main_loop",
"name": "Main Loop",
"type": "control.loop",
"typeVersion": 1,
"position": [1200, 300],
"parameters": {
"max_iterations": 10,
"stop_when": "$no_tool_calls",
"stop_on": "true"
}
},
{
"id": "ai_request",
"name": "AI Request",
"type": "core.ai_request",
"typeVersion": 1,
"position": [1500, 300],
"parameters": {}
},
{
"id": "run_tool_calls",
"name": "Run Tool Calls",
"type": "core.run_tool_calls",
"typeVersion": 1,
"position": [1800, 300],
"parameters": {}
},
{
"id": "append_tool_results",
"name": "Append Tool Results",
"type": "core.append_tool_results",
"typeVersion": 1,
"position": [2100, 300],
"parameters": {}
}
],
"connections": {
"Load Messages": {
"main": {
"0": [
{
"node": "Load Metadata",
"type": "main",
"index": 0
}
]
}
},
"Load Metadata": {
"main": {
"0": [
{
"node": "Load Prompt",
"type": "main",
"index": 0
}
]
}
},
"Load Prompt": {
"main": {
"0": [
{
"node": "Create GitHub Client",
"type": "main",
"index": 0
}
]
}
},
"Create GitHub Client": {
"main": {
"0": [
{
"node": "Create OpenAI Client",
"type": "main",
"index": 0
}
]
}
},
"Create OpenAI Client": {
"main": {
"0": [
{
"node": "Load Tools",
"type": "main",
"index": 0
}
]
}
},
"Load Tools": {
"main": {
"0": [
{
"node": "Build Tool Map",
"type": "main",
"index": 0
}
]
}
},
"Build Tool Map": {
"main": {
"0": [
{
"node": "Load Plugins",
"type": "main",
"index": 0
}
]
}
},
"Load Plugins": {
"main": {
"0": [
{
"node": "Load Tool Policies",
"type": "main",
"index": 0
}
]
}
},
"Load Tool Policies": {
"main": {
"0": [
{
"node": "Load Context",
"type": "main",
"index": 0
}
]
}
},
"Load Context": {
"main": {
"0": [
{
"node": "Seed Messages",
"type": "main",
"index": 0
}
]
}
},
"Seed Messages": {
"main": {
"0": [
{
"node": "Append Context",
"type": "main",
"index": 0
}
]
}
},
"Append Context": {
"main": {
"0": [
{
"node": "Append User Instruction",
"type": "main",
"index": 0
}
]
}
},
"Append User Instruction": {
"main": {
"0": [
{
"node": "Main Loop",
"type": "main",
"index": 0
}
]
}
},
"Main Loop": {
"main": {
"0": [
{
"node": "AI Request",
"type": "main",
"index": 0
}
]
}
},
"AI Request": {
"main": {
"0": [
{
"node": "Run Tool Calls",
"type": "main",
"index": 0
}
]
}
},
"Run Tool Calls": {
"main": {
"0": [
{
"node": "Append Tool Results",
"type": "main",
"index": 0
}
]
}
},
"Append Tool Results": {
"main": {
"0": [
{
"node": "Main Loop",
"type": "main",
"index": 0
}
]
}
}
}
}

View File

@@ -1,15 +0,0 @@
def hello_plugin():
"""A simple plugin that returns a greeting."""
return "Hello from the plugin system!"
hello_plugin.tool_metadata = {
"type": "function",
"function": {
"name": "hello_plugin",
"description": "A simple greeting from the plugin system.",
"parameters": {
"type": "object",
"properties": {}
}
}
}

View File

@@ -1,116 +0,0 @@
{
"name": "Default Workflow",
"active": false,
"nodes": [
{
"id": "load_context",
"name": "Load Context",
"type": "core.load_context",
"typeVersion": 1,
"position": [0, 0],
"parameters": {}
},
{
"id": "seed_messages",
"name": "Seed Messages",
"type": "core.seed_messages",
"typeVersion": 1,
"position": [0, 100],
"parameters": {}
},
{
"id": "append_context",
"name": "Append Context",
"type": "core.append_context_message",
"typeVersion": 1,
"position": [300, 50],
"parameters": {}
},
{
"id": "append_user_instruction",
"name": "Append User Instruction",
"type": "core.append_user_instruction",
"typeVersion": 1,
"position": [600, 50],
"parameters": {}
},
{
"id": "main_loop",
"name": "Main Loop",
"type": "control.loop",
"typeVersion": 1,
"position": [900, 50],
"parameters": {
"max_iterations": 10,
"stop_when": "$no_tool_calls",
"stop_on": "true"
}
},
{
"id": "ai_request",
"name": "AI Request",
"type": "core.ai_request",
"typeVersion": 1,
"position": [1200, 50],
"parameters": {}
},
{
"id": "run_tool_calls",
"name": "Run Tool Calls",
"type": "core.run_tool_calls",
"typeVersion": 1,
"position": [1500, 50],
"parameters": {}
},
{
"id": "append_tool_results",
"name": "Append Tool Results",
"type": "core.append_tool_results",
"typeVersion": 1,
"position": [1800, 50],
"parameters": {}
}
],
"connections": {
"Load Context": {
"main": {
"0": [{"node": "Append Context", "type": "main", "index": 0}]
}
},
"Seed Messages": {
"main": {
"0": [{"node": "Append Context", "type": "main", "index": 0}]
}
},
"Append Context": {
"main": {
"0": [{"node": "Append User Instruction", "type": "main", "index": 0}]
}
},
"Append User Instruction": {
"main": {
"0": [{"node": "Main Loop", "type": "main", "index": 0}]
}
},
"Main Loop": {
"main": {
"0": [{"node": "AI Request", "type": "main", "index": 0}]
}
},
"AI Request": {
"main": {
"0": [{"node": "Run Tool Calls", "type": "main", "index": 0}]
}
},
"Run Tool Calls": {
"main": {
"0": [{"node": "Append Tool Results", "type": "main", "index": 0}]
}
},
"Append Tool Results": {
"main": {
"0": [{"node": "Main Loop", "type": "main", "index": 0}]
}
}
}
}

View File

@@ -8,4 +8,6 @@ def run(runtime, _inputs):
gh = runtime.context.get("gh")
registry = load_tool_registry()
tool_map = build_tool_map(gh, registry)
# Store in both store (for workflow) and context (for other plugins)
runtime.context["tool_map"] = tool_map
return {"result": tool_map}

View File

@@ -8,4 +8,6 @@ def run(runtime, _inputs):
msgs = runtime.context.get("msgs", {})
gh = create_github_integration(token, msgs)
# Store in both store (for workflow) and context (for other plugins)
runtime.context["gh"] = gh
return {"result": gh, "initialized": gh is not None}

View File

@@ -7,4 +7,6 @@ def run(runtime, _inputs):
token = runtime.context.get("github_token")
client = create_openai_client(token)
# Store in both store (for workflow) and context (for other plugins)
runtime.context["client"] = client
return {"result": client, "initialized": client is not None}

View File

@@ -2,7 +2,9 @@
from ... import load_messages
def run(_runtime, _inputs):
def run(runtime, _inputs):
"""Load translation messages."""
messages = load_messages()
# Store in both store (for workflow) and context (for other plugins)
runtime.context["msgs"] = messages
return {"result": messages}

View File

@@ -2,7 +2,9 @@
from ...metadata_loader import load_metadata
def run(_runtime, _inputs):
def run(runtime, _inputs):
"""Load metadata.json."""
metadata = load_metadata()
# Store in both store (for workflow) and context (for other plugins)
runtime.context["metadata"] = metadata
return {"result": metadata}

View File

@@ -1,8 +1,13 @@
"""Workflow plugin: load prompt configuration."""
from ...prompt_loader import load_prompt_yaml
from ...model_resolver import resolve_model_name
def run(_runtime, _inputs):
def run(runtime, _inputs):
"""Load prompt.yml."""
prompt = load_prompt_yaml()
# Store in both store (for workflow) and context (for other plugins)
runtime.context["prompt"] = prompt
# Update model_name based on loaded prompt
runtime.context["model_name"] = resolve_model_name(prompt)
return {"result": prompt}

View File

@@ -0,0 +1,10 @@
"""Workflow plugin: load tool policies."""
from ...tool_policy_loader import load_tool_policies
def run(runtime, _inputs):
"""Load tool_policies.json."""
tool_policies = load_tool_policies()
# Store in both store (for workflow) and context (for other plugins)
runtime.context["tool_policies"] = tool_policies
return {"result": tool_policies}

View File

@@ -6,4 +6,6 @@ def run(runtime, _inputs):
"""Load tool definitions."""
metadata = runtime.context.get("metadata", {})
tools = load_tools(metadata)
# Store in both store (for workflow) and context (for other plugins)
runtime.context["tools"] = tools
return {"result": tools}

View File

@@ -4,7 +4,12 @@ from .model_resolver import resolve_model_name
def build_workflow_context(parts: dict) -> dict:
"""Build the workflow context dict."""
prompt = parts["prompt"]
context = dict(parts)
context["model_name"] = resolve_model_name(prompt)
# Only resolve model if prompt is available, otherwise use default
if "prompt" in parts:
prompt = parts["prompt"]
context["model_name"] = resolve_model_name(prompt)
else:
# Workflow plugins will load prompt, model will be resolved then
context["model_name"] = resolve_model_name({})
return context