Remove web/ folder - fully workflow-based web server

Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot]
2026-01-10 18:03:02 +00:00
parent e4ac69588a
commit 676221efac
30 changed files with 31 additions and 965 deletions

View File

@@ -4,7 +4,6 @@ import logging
import os
from .loaders import load_env
from .loaders import load_metadata
from .web.server import start_web_ui
from .engine import load_workflow_config, build_workflow_context, build_workflow_engine
TRACE_LEVEL = 5
@@ -56,6 +55,26 @@ def parse_args():
return parser.parse_args()
def run_web_workflow(logger):
"""Start web server using workflow."""
# Load web server bootstrap workflow
from .data.workflow import load_workflow_packages
packages = load_workflow_packages()
web_server_package = next((p for p in packages if p.get("id") == "web_server_bootstrap"), None)
if not web_server_package:
logger.error("web_server_bootstrap workflow package not found")
return
logger.info("Starting Web UI via workflow...")
workflow_config = web_server_package.get("workflow", {})
workflow_context = build_workflow_context({})
engine = build_workflow_engine(workflow_config, workflow_context, logger)
engine.execute()
def run_app() -> None:
"""Run the AutoMetabuilder CLI."""
load_env()
@@ -64,8 +83,7 @@ def run_app() -> None:
args = parse_args()
if args.web:
logger.info("Starting Web UI...")
start_web_ui()
run_web_workflow(logger)
return
token = os.environ.get("GITHUB_TOKEN")

View File

@@ -74,7 +74,7 @@ These packages test the correctness of workflow plugins using the `test.*` asser
Packages are loaded via `load_workflow_packages()` in `web/data/workflow.py`:
```python
from autometabuilder.web.data import load_workflow_packages
from autometabuilder.data import load_workflow_packages
packages = load_workflow_packages()
```

View File

@@ -38,7 +38,7 @@
"typeVersion": 1,
"position": [900, -150],
"parameters": {
"blueprint_path": "autometabuilder.web.routes.context.context_bp"
"blueprint_path": "autometabuilder.data.routes.context.context_bp"
}
},
{
@@ -48,7 +48,7 @@
"typeVersion": 1,
"position": [900, -50],
"parameters": {
"blueprint_path": "autometabuilder.web.routes.run.run_bp"
"blueprint_path": "autometabuilder.data.routes.run.run_bp"
}
},
{
@@ -58,7 +58,7 @@
"typeVersion": 1,
"position": [900, 50],
"parameters": {
"blueprint_path": "autometabuilder.web.routes.prompt.prompt_bp"
"blueprint_path": "autometabuilder.data.routes.prompt.prompt_bp"
}
},
{
@@ -68,7 +68,7 @@
"typeVersion": 1,
"position": [900, 150],
"parameters": {
"blueprint_path": "autometabuilder.web.routes.settings.settings_bp"
"blueprint_path": "autometabuilder.data.routes.settings.settings_bp"
}
},
{
@@ -78,7 +78,7 @@
"typeVersion": 1,
"position": [900, 250],
"parameters": {
"blueprint_path": "autometabuilder.web.routes.translations.translations_bp"
"blueprint_path": "autometabuilder.data.routes.translations.translations_bp"
}
},
{
@@ -88,7 +88,7 @@
"typeVersion": 1,
"position": [900, 350],
"parameters": {
"blueprint_path": "autometabuilder.web.routes.navigation.navigation_bp"
"blueprint_path": "autometabuilder.data.routes.navigation.navigation_bp"
}
},
{

View File

@@ -1,30 +0,0 @@
"""Web module: Flask HTTP server and REST API backend.
This module provides the HTTP/REST API backend for the AutoMetabuilder frontend.
It serves the Next.js web UI by handling HTTP requests and managing web application state.
Key Components:
- server.py: Flask application setup and entry point
- routes/: HTTP endpoint handlers (6 blueprints, ~20 endpoints)
- data/: Data access functions shared with workflow plugins
- run_state.py: Bot execution state management
- workflow_graph.py: Workflow visualization for UI
Relationship with Workflow Plugins:
The web module and workflow plugins in workflow/plugins/web/ serve different purposes:
- Web module: External HTTP interface (frontend <-> backend)
- Workflow plugins: Internal workflow operations (workflow automation)
Both systems coexist and complement each other:
- Flask routes call data functions to serve HTTP responses
- Workflow plugins call the same data functions for workflow operations
- Data functions in web/data/ provide shared business logic
This module CANNOT be replaced by workflow plugins because:
1. Workflow plugins cannot run HTTP servers
2. Workflow plugins cannot handle web requests
3. Workflow plugins cannot serve as REST API backends
4. The frontend requires HTTP endpoints to function
See WEB_MODULE_ANALYSIS.md for detailed architecture documentation.
"""

View File

@@ -1,46 +0,0 @@
from __future__ import annotations
from .env import get_env_vars, persist_env_vars
from .logs import get_recent_logs
from .metadata import get_messages_map, load_metadata, write_metadata
from .navigation import get_navigation_items
from .prompt import build_prompt_yaml, get_prompt_content, write_prompt
from .translations import (
create_translation,
delete_translation,
get_ui_messages,
list_translations,
load_translation,
update_translation,
)
from .workflow import (
get_workflow_content,
get_workflow_packages_dir,
load_workflow_packages,
summarize_workflow_packages,
write_workflow,
)
__all__ = [
"build_prompt_yaml",
"create_translation",
"delete_translation",
"get_env_vars",
"get_messages_map",
"get_navigation_items",
"get_prompt_content",
"get_recent_logs",
"get_ui_messages",
"get_workflow_content",
"get_workflow_packages_dir",
"list_translations",
"load_metadata",
"load_translation",
"load_workflow_packages",
"persist_env_vars",
"summarize_workflow_packages",
"update_translation",
"write_metadata",
"write_prompt",
"write_workflow",
]

View File

@@ -1,29 +0,0 @@
from __future__ import annotations
from pathlib import Path
def get_env_vars() -> dict[str, str]:
env_path = Path(".env")
if not env_path.exists():
return {}
result: dict[str, str] = {}
for raw in env_path.read_text(encoding="utf-8").splitlines():
line = raw.strip()
if not line or line.startswith("#"):
continue
if "=" not in line:
continue
key, value = line.split("=", 1)
value = value.strip().strip("'\"")
result[key.strip()] = value
return result
def persist_env_vars(updates: dict[str, str]) -> None:
from dotenv import set_key
env_path = Path(".env")
env_path.touch(exist_ok=True)
for key, value in updates.items():
set_key(env_path, key, value)

View File

@@ -1,14 +0,0 @@
from __future__ import annotations
import json
from pathlib import Path
from typing import Any
def read_json(path: Path) -> dict[str, Any]:
if not path.exists():
return {}
try:
return json.loads(path.read_text(encoding="utf-8"))
except json.JSONDecodeError:
return {}

View File

@@ -1,11 +0,0 @@
from __future__ import annotations
from .paths import LOG_FILE
def get_recent_logs(lines: int = 50) -> str:
if not LOG_FILE.exists():
return ""
with LOG_FILE.open("r", encoding="utf-8") as handle:
content = handle.readlines()
return "".join(content[-lines:])

View File

@@ -1,46 +0,0 @@
from __future__ import annotations
import json
from pathlib import Path
from typing import Any
from .json_utils import read_json
from .paths import PACKAGE_ROOT
def load_messages(path: Path) -> dict[str, Any]:
if path.is_dir():
merged: dict[str, Any] = {}
for file_path in sorted(path.glob("*.json")):
merged.update(read_json(file_path))
return merged
return read_json(path)
def group_messages(payload_content: dict[str, Any]) -> dict[str, dict[str, Any]]:
grouped: dict[str, dict[str, Any]] = {}
for key, value in payload_content.items():
parts = key.split(".")
group = ".".join(parts[:2]) if len(parts) >= 2 else "root"
grouped.setdefault(group, {})[key] = value
return grouped
def write_messages_dir(base_dir: Path, payload_content: dict[str, Any]) -> None:
base_dir.mkdir(parents=True, exist_ok=True)
grouped = group_messages(payload_content)
existing = {path.stem for path in base_dir.glob("*.json")}
desired = set(grouped.keys())
for name in existing - desired:
(base_dir / f"{name}.json").unlink()
for name, entries in grouped.items():
target_path = base_dir / f"{name}.json"
target_path.write_text(json.dumps(entries, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
def resolve_messages_target(messages_map: dict[str, str], lang: str) -> str:
if lang in messages_map:
return messages_map[lang]
if (PACKAGE_ROOT / "messages" / lang).exists():
return f"messages/{lang}"
return f"messages_{lang}.json"

View File

@@ -1,27 +0,0 @@
from __future__ import annotations
import json
from typing import Any
from autometabuilder.loaders.metadata_loader import load_metadata as load_metadata_full
from .json_utils import read_json
from .paths import PACKAGE_ROOT
def load_metadata() -> dict[str, Any]:
return load_metadata_full()
def load_metadata_base() -> dict[str, Any]:
metadata_path = PACKAGE_ROOT / "metadata.json"
return read_json(metadata_path)
def write_metadata(metadata: dict[str, Any]) -> None:
path = PACKAGE_ROOT / "metadata.json"
path.write_text(json.dumps(metadata, indent=2, ensure_ascii=False), encoding="utf-8")
def get_messages_map(metadata: dict[str, Any] | None = None) -> dict[str, str]:
metadata = metadata or load_metadata_base()
return metadata.get("messages", {})

View File

@@ -1,14 +0,0 @@
from __future__ import annotations
from typing import Any
from .json_utils import read_json
from .paths import PACKAGE_ROOT
def get_navigation_items() -> list[dict[str, Any]]:
nav_path = PACKAGE_ROOT / "web" / "navigation_items.json"
nav = read_json(nav_path)
if isinstance(nav, list):
return nav
return []

View File

@@ -1,74 +0,0 @@
"""Load workflow packages from npm-style package directories."""
from __future__ import annotations
import logging
from pathlib import Path
from typing import Any, Dict, List
from .json_utils import read_json
logger = logging.getLogger(__name__)
def load_package(package_dir: Path) -> Dict[str, Any] | None:
"""Load a single workflow package."""
package_json = package_dir / "package.json"
if not package_json.exists():
logger.warning("Package %s missing package.json", package_dir.name)
return None
# Read package.json
pkg_data = read_json(package_json)
if not isinstance(pkg_data, dict):
logger.warning("Invalid package.json in %s", package_dir.name)
return None
# Read workflow file
workflow_file = pkg_data.get("main", "workflow.json")
workflow_path = package_dir / workflow_file
if not workflow_path.exists():
logger.warning("Workflow file %s not found in %s", workflow_file, package_dir.name)
return None
workflow_data = read_json(workflow_path)
if not isinstance(workflow_data, dict):
logger.warning("Invalid workflow in %s", package_dir.name)
return None
# Combine package metadata with workflow
metadata = pkg_data.get("metadata", {})
return {
"id": pkg_data.get("name", package_dir.name),
"name": pkg_data.get("name", package_dir.name),
"version": pkg_data.get("version", "1.0.0"),
"description": pkg_data.get("description", ""),
"author": pkg_data.get("author", ""),
"license": pkg_data.get("license", ""),
"keywords": pkg_data.get("keywords", []),
"label": metadata.get("label", package_dir.name),
"tags": metadata.get("tags", []),
"icon": metadata.get("icon", "workflow"),
"category": metadata.get("category", "templates"),
"workflow": workflow_data,
}
def load_all_packages(packages_dir: Path) -> List[Dict[str, Any]]:
"""Load all workflow packages from directory."""
if not packages_dir.exists():
logger.warning("Packages directory not found: %s", packages_dir)
return []
packages = []
for item in sorted(packages_dir.iterdir()):
if not item.is_dir():
continue
package = load_package(item)
if package:
packages.append(package)
logger.debug("Loaded %d workflow packages", len(packages))
return packages

View File

@@ -1,7 +0,0 @@
from __future__ import annotations
from pathlib import Path
PACKAGE_ROOT = Path(__file__).resolve().parents[2]
REPO_ROOT = PACKAGE_ROOT.parent.parent
LOG_FILE = REPO_ROOT / "autometabuilder.log"

View File

@@ -1,36 +0,0 @@
from __future__ import annotations
import os
from pathlib import Path
def build_prompt_yaml(system_content: str | None, user_content: str | None, model: str | None) -> str:
def indent_block(text: str | None) -> str:
if not text:
return ""
return "\n ".join(line.rstrip() for line in text.splitlines())
model_value = model or "openai/gpt-4o"
system_block = indent_block(system_content)
user_block = indent_block(user_content)
return f"""messages:
- role: system
content: >-
{system_block}
- role: user
content: >-
{user_block}
model: {model_value}
"""
def get_prompt_content() -> str:
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
if path.is_file():
return path.read_text(encoding="utf-8")
return ""
def write_prompt(content: str) -> None:
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
path.write_text(content or "", encoding="utf-8")

View File

@@ -1,99 +0,0 @@
from __future__ import annotations
import json
import shutil
from typing import Any
from .messages_io import load_messages, resolve_messages_target, write_messages_dir
from .metadata import get_messages_map, load_metadata_base, write_metadata
from .paths import PACKAGE_ROOT
def load_translation(lang: str) -> dict[str, Any]:
messages_map = get_messages_map()
target = resolve_messages_target(messages_map, lang)
if not target:
return {}
return load_messages(PACKAGE_ROOT / target)
def list_translations() -> dict[str, str]:
messages_map = get_messages_map()
if messages_map:
return messages_map
fallback = {}
for candidate in PACKAGE_ROOT.glob("messages_*.json"):
name = candidate.name
language = name.removeprefix("messages_").removesuffix(".json")
fallback[language] = name
messages_dir = PACKAGE_ROOT / "messages"
if messages_dir.exists():
for candidate in messages_dir.iterdir():
if candidate.is_dir():
fallback[candidate.name] = f"messages/{candidate.name}"
return fallback
def get_ui_messages(lang: str) -> dict[str, Any]:
messages_map = get_messages_map()
base_name = resolve_messages_target(messages_map, "en")
base = load_messages(PACKAGE_ROOT / base_name)
localized = load_messages(PACKAGE_ROOT / resolve_messages_target(messages_map, lang))
merged = dict(base)
merged.update(localized)
merged["__lang"] = lang
return merged
def create_translation(lang: str) -> bool:
messages_map = get_messages_map()
if lang in messages_map:
return False
base = resolve_messages_target(messages_map, "en")
base_file = PACKAGE_ROOT / base
if not base_file.exists():
return False
if base_file.is_dir():
target_name = f"messages/{lang}"
target_path = PACKAGE_ROOT / target_name
shutil.copytree(base_file, target_path)
else:
target_name = f"messages_{lang}.json"
target_path = PACKAGE_ROOT / target_name
shutil.copy(base_file, target_path)
messages_map[lang] = target_name
metadata = load_metadata_base()
metadata["messages"] = messages_map
write_metadata(metadata)
return True
def delete_translation(lang: str) -> bool:
if lang == "en":
return False
messages_map = get_messages_map()
if lang not in messages_map:
return False
target = PACKAGE_ROOT / messages_map[lang]
if target.exists():
if target.is_dir():
shutil.rmtree(target)
else:
target.unlink()
del messages_map[lang]
metadata = load_metadata_base()
metadata["messages"] = messages_map
write_metadata(metadata)
return True
def update_translation(lang: str, payload: dict[str, Any]) -> bool:
messages_map = get_messages_map()
if lang not in messages_map:
return False
payload_content = payload.get("content", {})
target_path = PACKAGE_ROOT / messages_map[lang]
if target_path.is_dir():
write_messages_dir(target_path, payload_content)
else:
target_path.write_text(json.dumps(payload_content, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
return True

View File

@@ -1,53 +0,0 @@
from __future__ import annotations
from pathlib import Path
from typing import Any, Iterable
from .json_utils import read_json
from .metadata import load_metadata
from .package_loader import load_all_packages
from .paths import PACKAGE_ROOT
def get_workflow_content() -> str:
metadata = load_metadata()
workflow_name = metadata.get("workflow_path", "workflow.json")
workflow_path = PACKAGE_ROOT / workflow_name
if workflow_path.exists():
return workflow_path.read_text(encoding="utf-8")
return ""
def write_workflow(content: str) -> None:
metadata = load_metadata()
workflow_name = metadata.get("workflow_path", "workflow.json")
workflow_path = PACKAGE_ROOT / workflow_name
workflow_path.write_text(content or "", encoding="utf-8")
def get_workflow_packages_dir() -> Path:
metadata = load_metadata()
packages_name = metadata.get("workflow_packages_path", "packages")
return PACKAGE_ROOT / packages_name
def load_workflow_packages() -> list[dict[str, Any]]:
packages_dir = get_workflow_packages_dir()
return load_all_packages(packages_dir)
def summarize_workflow_packages(packages: Iterable[dict[str, Any]]) -> list[dict[str, Any]]:
summary = []
for pkg in packages:
summary.append(
{
"id": pkg["id"],
"name": pkg.get("name", pkg["id"]),
"label": pkg.get("label") or pkg["id"],
"description": pkg.get("description", ""),
"tags": pkg.get("tags", []),
"version": pkg.get("version", "1.0.0"),
"category": pkg.get("category", "templates"),
}
)
return summary

View File

@@ -1,32 +0,0 @@
[
{
"section": "dashboard",
"icon": "speedometer2",
"label_key": "ui.nav.dashboard",
"default_label": "Dashboard"
},
{
"section": "workflow",
"icon": "diagram-3",
"label_key": "ui.nav.workflow",
"default_label": "Workflow"
},
{
"section": "prompt",
"icon": "file-text",
"label_key": "ui.nav.prompt",
"default_label": "Prompt"
},
{
"section": "settings",
"icon": "gear",
"label_key": "ui.nav.settings",
"default_label": "Settings"
},
{
"section": "translations",
"icon": "translate",
"label_key": "ui.nav.translations",
"default_label": "Translations"
}
]

View File

@@ -1,62 +0,0 @@
"""Context routes for dashboard state and logs."""
from __future__ import annotations
import os
from flask import Blueprint
from ..data import (
get_env_vars,
get_navigation_items,
get_prompt_content,
get_recent_logs,
get_ui_messages,
get_workflow_content,
list_translations,
load_metadata,
load_workflow_packages,
summarize_workflow_packages,
)
from ..run_state import bot_process, current_run_config, mock_running
from autometabuilder.roadmap_utils import is_mvp_reached
context_bp = Blueprint("context", __name__)
def build_context() -> dict[str, object]:
lang = os.environ.get("APP_LANG", "en")
metadata = load_metadata()
packages = load_workflow_packages()
return {
"logs": get_recent_logs(),
"env_vars": get_env_vars(),
"translations": list_translations(),
"metadata": metadata,
"navigation": get_navigation_items(),
"prompt_content": get_prompt_content(),
"workflow_content": get_workflow_content(),
"workflow_packages": summarize_workflow_packages(packages),
"workflow_packages_raw": packages,
"messages": get_ui_messages(lang),
"lang": lang,
"status": {
"is_running": bot_process is not None or mock_running,
"mvp_reached": is_mvp_reached(),
"config": current_run_config,
},
}
@context_bp.route("/api/context")
def api_context() -> tuple[dict[str, object], int]:
return build_context(), 200
@context_bp.route("/api/status")
def api_status() -> tuple[dict[str, object], int]:
return build_context()["status"], 200
@context_bp.route("/api/logs")
def api_logs() -> tuple[dict[str, str], int]:
return {"logs": get_recent_logs()}, 200

View File

@@ -1,39 +0,0 @@
"""Navigation and workflow metadata routes."""
from __future__ import annotations
from flask import Blueprint
from ..data import get_navigation_items, load_metadata, load_workflow_packages, summarize_workflow_packages
from ..workflow_graph import build_workflow_graph
navigation_bp = Blueprint("navigation", __name__)
@navigation_bp.route("/api/navigation")
def api_navigation() -> tuple[dict[str, object], int]:
return {"items": get_navigation_items()}, 200
@navigation_bp.route("/api/workflow/packages")
def api_workflow_packages() -> tuple[dict[str, object], int]:
packages = load_workflow_packages()
return {"packages": summarize_workflow_packages(packages)}, 200
@navigation_bp.route("/api/workflow/packages/<package_id>")
def api_get_workflow_package(package_id: str) -> tuple[dict[str, object], int]:
packages = load_workflow_packages()
for pkg in packages:
if pkg.get("id") == package_id:
return pkg, 200
return {"error": "package not found"}, 404
@navigation_bp.route("/api/workflow/plugins")
def api_workflow_plugins() -> tuple[dict[str, object], int]:
return {"plugins": load_metadata().get("workflow_plugins", {})}, 200
@navigation_bp.route("/api/workflow/graph")
def api_workflow_graph() -> tuple[dict[str, object], int]:
return build_workflow_graph(), 200

View File

@@ -1,30 +0,0 @@
"""Prompt and workflow editing routes."""
from __future__ import annotations
from flask import Blueprint, request
from ..data import build_prompt_yaml, write_prompt, write_workflow
prompt_bp = Blueprint("prompt", __name__)
@prompt_bp.route("/api/prompt", methods=["POST"])
def api_prompt() -> tuple[dict[str, str], int]:
payload = request.get_json(force=True)
content = payload.get("content")
system = payload.get("system_content")
user = payload.get("user_content")
model = payload.get("model")
mode = payload.get("prompt_mode", "builder")
if mode == "raw" and content is not None:
write_prompt(content)
else:
write_prompt(build_prompt_yaml(system, user, model))
return {"status": "ok"}, 200
@prompt_bp.route("/api/workflow", methods=["POST"])
def api_workflow() -> tuple[dict[str, str], int]:
payload = request.get_json(force=True)
write_workflow(payload.get("content", ""))
return {"status": "saved"}, 200

View File

@@ -1,19 +0,0 @@
"""Run route for triggering the bot."""
from __future__ import annotations
from flask import Blueprint, request
from ..run_state import start_bot
run_bp = Blueprint("run", __name__)
@run_bp.route("/api/run", methods=["POST"])
def api_run() -> tuple[dict[str, object], int]:
payload = request.get_json(silent=True) or {}
mode = payload.get("mode", "once")
iterations = int(payload.get("iterations", 1))
yolo = bool(payload.get("yolo", True))
stop_at_mvp = bool(payload.get("stop_at_mvp", False))
started = start_bot(mode, iterations, yolo, stop_at_mvp)
return {"started": started}, 202 if started else 409

View File

@@ -1,16 +0,0 @@
"""Settings persistence route."""
from __future__ import annotations
from flask import Blueprint, request
from ..data import persist_env_vars
settings_bp = Blueprint("settings", __name__)
@settings_bp.route("/api/settings", methods=["POST"])
def api_settings() -> tuple[dict[str, str], int]:
payload = request.get_json(force=True) or {}
entries = payload.get("env", {}) or {}
persist_env_vars(entries)
return {"status": "ok"}, 200

View File

@@ -1,47 +0,0 @@
"""Translation management routes."""
from __future__ import annotations
from flask import Blueprint, request
from ..data import create_translation, delete_translation, load_metadata, load_translation, list_translations, update_translation
translations_bp = Blueprint("translations", __name__)
@translations_bp.route("/api/translation-options")
def api_translation_options() -> tuple[dict[str, dict[str, str]], int]:
return {"translations": list_translations()}, 200
@translations_bp.route("/api/translations", methods=["POST"])
def api_create_translation() -> tuple[dict[str, str], int]:
payload = request.get_json(force=True)
lang = payload.get("lang")
if not lang:
return {"error": "lang required"}, 400
ok = create_translation(lang)
return ({"created": ok}, 201 if ok else 400)
@translations_bp.route("/api/translations/<lang>", methods=["GET"])
def api_get_translation(lang: str) -> tuple[dict[str, object], int]:
if lang not in load_metadata().get("messages", {}):
return {"error": "translation not found"}, 404
return {"lang": lang, "content": load_translation(lang)}, 200
@translations_bp.route("/api/translations/<lang>", methods=["PUT"])
def api_update_translation(lang: str) -> tuple[dict[str, str], int]:
payload = request.get_json(force=True)
updated = update_translation(lang, payload)
if not updated:
return {"error": "unable to update"}, 400
return {"status": "saved"}, 200
@translations_bp.route("/api/translations/<lang>", methods=["DELETE"])
def api_delete_translation(lang: str) -> tuple[dict[str, str], int]:
deleted = delete_translation(lang)
if not deleted:
return {"error": "cannot delete"}, 400
return {"deleted": True}, 200

View File

@@ -1,64 +0,0 @@
"""Run state helpers for long-lived bot executions."""
from __future__ import annotations
import os
import subprocess
import sys
import threading
import time
from typing import Dict
from ..roadmap_utils import is_mvp_reached
bot_process = None
mock_running = False
current_run_config: Dict[str, object] = {}
def _reset_run_state() -> None:
global bot_process, current_run_config
bot_process = None
current_run_config = {}
def run_bot_task(mode: str, iterations: int, yolo: bool, stop_at_mvp: bool) -> None:
global bot_process, mock_running, current_run_config
current_run_config = {
"mode": mode,
"iterations": iterations,
"yolo": yolo,
"stop_at_mvp": stop_at_mvp,
}
if os.environ.get("MOCK_WEB_UI") == "true":
mock_running = True
time.sleep(5)
mock_running = False
_reset_run_state()
return
try:
cmd = [sys.executable, "-m", "autometabuilder.main"]
if yolo:
cmd.append("--yolo")
if mode == "once":
cmd.append("--once")
if mode == "iterations" and iterations > 1:
for _ in range(iterations):
if stop_at_mvp and is_mvp_reached():
break
bot_process = subprocess.Popen(cmd + ["--once"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
bot_process.wait()
else:
bot_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
bot_process.wait()
finally:
_reset_run_state()
def start_bot(mode: str = "once", iterations: int = 1, yolo: bool = True, stop_at_mvp: bool = False) -> bool:
if bot_process is not None or mock_running:
return False
thread = threading.Thread(target=run_bot_task, args=(mode, iterations, yolo, stop_at_mvp), daemon=True)
thread.start()
return True

View File

@@ -1,25 +0,0 @@
"""Flask-based API surface that replaces the legacy FastAPI frontend."""
from __future__ import annotations
from flask import Flask
from .routes.context import context_bp
from .routes.navigation import navigation_bp
from .routes.prompt import prompt_bp
from .routes.run import run_bp
from .routes.settings import settings_bp
from .routes.translations import translations_bp
app = Flask(__name__)
app.config["JSON_SORT_KEYS"] = False
app.register_blueprint(context_bp)
app.register_blueprint(run_bp)
app.register_blueprint(prompt_bp)
app.register_blueprint(settings_bp)
app.register_blueprint(translations_bp)
app.register_blueprint(navigation_bp)
def start_web_ui(host: str = "0.0.0.0", port: int = 8000) -> None:
app.run(host=host, port=port)

View File

@@ -1,43 +0,0 @@
{
"core_styles": [
"/static/css/main.css",
"/static/css/workflow_palette.css"
],
"core_scripts": [
"/static/js/app_context.js",
"/static/js/plugin_registry.js",
"/static/js/services/toast.js",
"/static/js/plugins/theme_manager.js",
"/static/js/plugins/navigation_manager.js",
"/static/js/plugins/choices_manager.js",
"/static/js/plugins/workflow_toggle.js",
"/static/js/plugins/form_validator.js",
"/static/js/plugins/status_poller.js"
],
"workflow_scripts": [
"/static/js/workflow/workflow_utils.js",
"/static/js/workflow/workflow_state.js",
"/static/js/workflow/workflow_mutations.js",
"/static/js/workflow/workflow_palette.js",
"/static/js/workflow/workflow_plugin_options.js",
"/static/js/workflow/workflow_field_renderer.js",
"/static/js/workflow/workflow_node_template.js",
"/static/js/workflow/workflow_node_events.js",
"/static/js/workflow/workflow_loop_renderer.js",
"/static/js/workflow/workflow_node_renderer.js",
"/static/js/workflow/workflow_canvas_renderer.js",
"/static/js/workflow/workflow_builder.js"
],
"page_scripts": [
"/static/js/plugins/navigation_loader.js",
"/static/js/plugins/workflow_builder.js",
"/static/js/plugins/workflow_palette.js",
"/static/js/plugins/workflow_templates.js",
"/static/js/plugins/run_mode_toggle.js",
"/static/js/plugins/prompt_builder.js",
"/static/js/plugins/translation_editor_base.js",
"/static/js/plugins/translation_editor_render.js",
"/static/js/plugins/translation_editor_actions.js",
"/static/js/plugins/translation_editor_network.js"
]
}

View File

@@ -1,89 +0,0 @@
"""Build a node/edge view of n8n workflows for visualization."""
from __future__ import annotations
import json
import logging
from typing import Any, Dict, Iterable, List
from .data import get_workflow_content, load_metadata
logger = logging.getLogger(__name__)
def _parse_workflow_definition() -> Dict[str, Any]:
payload = get_workflow_content()
if not payload:
return {"name": "Empty", "nodes": [], "connections": {}}
try:
parsed = json.loads(payload)
except json.JSONDecodeError as exc:
logger.warning("Invalid workflow JSON: %s", exc)
return {"name": "Invalid", "nodes": [], "connections": {}}
return parsed if isinstance(parsed, dict) else {"name": "Invalid", "nodes": [], "connections": {}}
def _gather_n8n_nodes(
nodes: Iterable[Dict[str, Any]],
plugin_map: Dict[str, Any]
) -> List[Dict[str, Any]]:
"""Extract nodes from n8n format."""
collected = []
for node in nodes:
node_id = node.get("id", node.get("name", f"node-{len(collected)}"))
node_type = node.get("type", "unknown")
metadata = plugin_map.get(node_type, {})
collected.append({
"id": node_id,
"name": node.get("name", node_id),
"type": node_type,
"label_key": metadata.get("label"),
"parent": None,
"position": node.get("position", [0, 0]),
})
return collected
def _build_n8n_edges(
connections: Dict[str, Any],
nodes: List[Dict[str, Any]]
) -> List[Dict[str, str]]:
"""Build edges from n8n connections."""
# Build name to ID mapping
name_to_id = {node["name"]: node["id"] for node in nodes}
edges = []
for source_name, outputs in connections.items():
source_id = name_to_id.get(source_name, source_name)
for output_type, indices in outputs.items():
for index, targets in indices.items():
for target in targets:
target_name = target["node"]
target_id = name_to_id.get(target_name, target_name)
edges.append({
"from": source_id,
"to": target_id,
"type": target.get("type", "main"),
"output_index": index,
"input_index": target.get("index", 0),
})
return edges
def build_workflow_graph() -> Dict[str, Any]:
"""Build workflow graph from n8n format (breaking change: legacy format removed)."""
definition = _parse_workflow_definition()
plugin_map = load_metadata().get("workflow_plugins", {})
# Only support n8n format now
nodes = _gather_n8n_nodes(definition.get("nodes", []), plugin_map)
edges = _build_n8n_edges(definition.get("connections", {}), nodes)
logger.debug("Built workflow graph with %d nodes and %d edges", len(nodes), len(edges))
return {
"nodes": nodes,
"edges": edges,
"count": {"nodes": len(nodes), "edges": len(edges)},
}

View File

@@ -1,7 +1,7 @@
"""Contract tests for AJAX endpoints used by the Next.js frontend."""
import pytest
from autometabuilder.web.server import app
from autometabuilder.data.server import app
@pytest.fixture

View File

@@ -1,4 +1,4 @@
from autometabuilder.web.workflow_graph import build_workflow_graph
from autometabuilder.data.workflow_graph import build_workflow_graph
def test_build_workflow_graph_structure():

View File

@@ -4,7 +4,7 @@ import socket
import threading
import pytest
import uvicorn
from autometabuilder.web.server import app
from autometabuilder.data.server import app
@pytest.fixture(scope="session")
def browser_type_launch_args():