Introduce AutoMetabuilder core components and workflow packages:

- Implement core components: CLI argument parsing, environment loading, GitHub service creation, and logging configuration.
- Add support for OpenAI client setup and model resolution.
- Develop SDLC context loader from GitHub and repository files.
- Implement workflow context and engine builders.
- Introduce major workflow packages: `game_tick_loop` and `contextual_iterative_loop`.
- Update localization files with new package descriptions and labels.
- Streamline web navigation by loading items from a dedicated JSON file.
This commit is contained in:
2026-01-10 00:45:46 +00:00
parent d1857fc195
commit 877ba64de8
184 changed files with 1381 additions and 4171 deletions

7
.gitignore vendored
View File

@@ -114,7 +114,7 @@ celerybeat.pid
*.sage.py
# Environments
.env
backend/.env
.venv
env/
venv/
@@ -154,3 +154,8 @@ cmake-build-*/
# VS Code
.vscode/
frontend/.next/
frontend/node_modules/
frontend/.env.local
frontend/.env.development.local
frontend/.env.production.local

Binary file not shown.

Binary file not shown.

View File

@@ -13,7 +13,7 @@ RUN apt-get update && apt-get install -y \
RUN pip install poetry
# Copy the current directory contents into the container at /app
COPY . /app
COPY .. /app
# Install project dependencies
RUN poetry config virtualenvs.create false \

View File

@@ -0,0 +1,240 @@
"""Helpers for loading metadata, translations, and workflow assets."""
from __future__ import annotations
import json
import os
import shutil
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional
PACKAGE_ROOT = Path(__file__).resolve().parents[1]
REPO_ROOT = PACKAGE_ROOT.parent.parent
LOG_FILE = REPO_ROOT / "autometabuilder.log"
def _read_json(path: Path) -> Dict[str, Any]:
if not path.exists():
return {}
try:
return json.loads(path.read_text(encoding="utf-8"))
except json.JSONDecodeError:
return {}
def build_prompt_yaml(system_content: str | None, user_content: str | None, model: str | None) -> str:
def indent_block(text: str | None) -> str:
if not text:
return ""
return "\n ".join(line.rstrip() for line in text.splitlines())
model_value = model or "openai/gpt-4o"
system_block = indent_block(system_content)
user_block = indent_block(user_content)
return f\"\"\"messages:
- role: system
content: >-
{system_block}
- role: user
content: >-
{user_block}
model: {model_value}
\"\"\"
def load_metadata() -> Dict[str, Any]:
metadata_path = PACKAGE_ROOT / "metadata.json"
return _read_json(metadata_path)
def write_metadata(metadata: Dict[str, Any]) -> None:
path = PACKAGE_ROOT / "metadata.json"
path.write_text(json.dumps(metadata, indent=2, ensure_ascii=False), encoding="utf-8")
def get_messages_map(metadata: Optional[Dict[str, Any]] = None) -> Dict[str, str]:
metadata = metadata or load_metadata()
return metadata.get("messages", {})
def load_translation(lang: str) -> Dict[str, Any]:
messages_map = get_messages_map()
target = messages_map.get(lang)
if not target:
return {}
return _read_json(PACKAGE_ROOT / target)
def list_translations() -> Dict[str, str]:
messages_map = get_messages_map()
if messages_map:
return messages_map
# falling back to files on disk
fallback = {}
for candidate in PACKAGE_ROOT.glob("messages_*.json"):
name = candidate.name
language = name.removeprefix("messages_").removesuffix(".json")
fallback[language] = name
return fallback
def get_ui_messages(lang: str) -> Dict[str, Any]:
messages_map = get_messages_map()
base_name = messages_map.get("en", "messages_en.json")
base = _read_json(PACKAGE_ROOT / base_name)
localized = _read_json(PACKAGE_ROOT / messages_map.get(lang, base_name))
merged = dict(base)
merged.update(localized)
merged["__lang"] = lang
return merged
def get_prompt_content() -> str:
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
if path.is_file():
return path.read_text(encoding="utf-8")
return ""
def write_prompt(content: str) -> None:
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
path.write_text(content or "", encoding="utf-8")
def get_workflow_content() -> str:
metadata = load_metadata()
workflow_name = metadata.get("workflow_path", "workflow.json")
workflow_path = PACKAGE_ROOT / workflow_name
if workflow_path.exists():
return workflow_path.read_text(encoding="utf-8")
return ""
def write_workflow(content: str) -> None:
metadata = load_metadata()
workflow_name = metadata.get("workflow_path", "workflow.json")
workflow_path = PACKAGE_ROOT / workflow_name
workflow_path.write_text(content or "", encoding="utf-8")
def get_navigation_items() -> List[Dict[str, Any]]:
nav_path = PACKAGE_ROOT / "web" / "navigation_items.json"
nav = _read_json(nav_path)
if isinstance(nav, list):
return nav
return []
def get_workflow_packages_dir() -> Path:
metadata = load_metadata()
packages_name = metadata.get("workflow_packages_path", "workflow_packages")
return PACKAGE_ROOT / packages_name
def load_workflow_packages() -> List[Dict[str, Any]]:
packages_dir = get_workflow_packages_dir()
if not packages_dir.exists():
return []
packages: List[Dict[str, Any]] = []
for file in sorted(packages_dir.iterdir()):
if file.suffix != ".json":
continue
data = _read_json(file)
if not isinstance(data, dict):
continue
pkg_id = data.get("id") or file.stem
data["id"] = pkg_id
data.setdefault("workflow", {"nodes": []})
packages.append(data)
return packages
def summarize_workflow_packages(packages: Iterable[Dict[str, Any]]) -> List[Dict[str, Any]]:
summary = []
for pkg in packages:
summary.append(
{
"id": pkg["id"],
"label": pkg.get("label") or pkg["id"],
"description": pkg.get("description", ""),
"tags": pkg.get("tags", []),
}
)
return summary
def get_env_vars() -> Dict[str, str]:
env_path = Path(".env")
if not env_path.exists():
return {}
result: Dict[str, str] = {}
for raw in env_path.read_text(encoding="utf-8").splitlines():
line = raw.strip()
if not line or line.startswith("#"):
continue
if "=" not in line:
continue
key, value = line.split("=", 1)
value = value.strip().strip("'\"")
result[key.strip()] = value
return result
def persist_env_vars(updates: Dict[str, str]) -> None:
from dotenv import set_key
env_path = Path(".env")
env_path.touch(exist_ok=True)
for key, value in updates.items():
set_key(env_path, key, value)
def get_recent_logs(lines: int = 50) -> str:
if not LOG_FILE.exists():
return ""
with LOG_FILE.open("r", encoding="utf-8") as handle:
content = handle.readlines()
return "".join(content[-lines:])
def create_translation(lang: str) -> bool:
messages_map = get_messages_map()
if lang in messages_map:
return False
base = messages_map.get("en", "messages_en.json")
base_file = PACKAGE_ROOT / base
if not base_file.exists():
return False
target_name = f"messages_{lang}.json"
target_path = PACKAGE_ROOT / target_name
shutil.copy(base_file, target_path)
messages_map[lang] = target_name
metadata = load_metadata()
metadata["messages"] = messages_map
write_metadata(metadata)
return True
def delete_translation(lang: str) -> bool:
if lang == "en":
return False
messages_map = get_messages_map()
if lang not in messages_map:
return False
target = PACKAGE_ROOT / messages_map[lang]
if target.exists():
target.unlink()
del messages_map[lang]
metadata = load_metadata()
metadata["messages"] = messages_map
write_metadata(metadata)
return True
def update_translation(lang: str, payload: Dict[str, Any]) -> bool:
messages_map = get_messages_map()
if lang not in messages_map:
return False
payload_content = payload.get("content", {})
target_path = PACKAGE_ROOT / messages_map[lang]
target_path.write_text(json.dumps(payload_content, indent=2, ensure_ascii=False), encoding="utf-8")
return True

View File

@@ -0,0 +1,237 @@
"""Flask-based API surface that replaces the legacy FastAPI frontend."""
from __future__ import annotations
import os
import subprocess
import sys
import threading
import time
from typing import Dict
from flask import Flask, request
from ..roadmap_utils import is_mvp_reached
from .data import (
build_prompt_yaml,
create_translation,
delete_translation,
get_env_vars,
get_navigation_items,
get_prompt_content,
get_recent_logs,
get_ui_messages,
get_workflow_content,
list_translations,
load_metadata,
load_translation,
load_workflow_packages,
persist_env_vars,
summarize_workflow_packages,
update_translation,
write_prompt,
write_workflow,
)
app = Flask(__name__)
app.config["JSON_SORT_KEYS"] = False
bot_process = None
mock_running = False
current_run_config: Dict[str, object] = {}
def _reset_run_state() -> None:
global bot_process, current_run_config
bot_process = None
current_run_config = {}
def run_bot_task(mode: str, iterations: int, yolo: bool, stop_at_mvp: bool) -> None:
global bot_process, mock_running, current_run_config
current_run_config = {
"mode": mode,
"iterations": iterations,
"yolo": yolo,
"stop_at_mvp": stop_at_mvp,
}
if os.environ.get("MOCK_WEB_UI") == "true":
mock_running = True
time.sleep(5)
mock_running = False
_reset_run_state()
return
try:
cmd = [sys.executable, "-m", "autometabuilder.main"]
if yolo:
cmd.append("--yolo")
if mode == "once":
cmd.append("--once")
if mode == "iterations" and iterations > 1:
for _ in range(iterations):
if stop_at_mvp and is_mvp_reached():
break
bot_process = subprocess.Popen(cmd + ["--once"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
bot_process.wait()
else:
bot_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
bot_process.wait()
finally:
_reset_run_state()
def start_bot(mode: str = "once", iterations: int = 1, yolo: bool = True, stop_at_mvp: bool = False) -> bool:
if bot_process is not None or mock_running:
return False
thread = threading.Thread(target=run_bot_task, args=(mode, iterations, yolo, stop_at_mvp), daemon=True)
thread.start()
return True
def build_context() -> Dict[str, object]:
lang = os.environ.get("APP_LANG", "en")
metadata = load_metadata()
packages = load_workflow_packages()
return {
"logs": get_recent_logs(),
"env_vars": get_env_vars(),
"translations": list_translations(),
"metadata": metadata,
"navigation": get_navigation_items(),
"prompt_content": get_prompt_content(),
"workflow_content": get_workflow_content(),
"workflow_packages": summarize_workflow_packages(packages),
"workflow_packages_raw": packages,
"messages": get_ui_messages(lang),
"lang": lang,
"status": {
"is_running": bot_process is not None or mock_running,
"mvp_reached": is_mvp_reached(),
"config": current_run_config,
},
}
@app.route("/api/context")
def api_context() -> tuple[Dict[str, object], int]:
return build_context(), 200
@app.route("/api/run", methods=["POST"])
def api_run() -> tuple[Dict[str, object], int]:
payload = request.get_json(silent=True) or {}
mode = payload.get("mode", "once")
iterations = int(payload.get("iterations", 1))
yolo = bool(payload.get("yolo", True))
stop_at_mvp = bool(payload.get("stop_at_mvp", False))
started = start_bot(mode, iterations, yolo, stop_at_mvp)
return {"started": started}, 202 if started else 409
@app.route("/api/prompt", methods=["POST"])
def api_prompt() -> tuple[Dict[str, str], int]:
payload = request.get_json(force=True)
content = payload.get("content")
system = payload.get("system_content")
user = payload.get("user_content")
model = payload.get("model")
mode = payload.get("prompt_mode", "builder")
if mode == "raw" and content is not None:
write_prompt(content)
else:
write_prompt(build_prompt_yaml(system, user, model))
return {"status": "ok"}, 200
@app.route("/api/workflow", methods=["POST"])
def api_workflow() -> tuple[Dict[str, str], int]:
payload = request.get_json(force=True)
write_workflow(payload.get("content", ""))
return {"status": "saved"}, 200
@app.route("/api/settings", methods=["POST"])
def api_settings() -> tuple[Dict[str, str], int]:
payload = request.get_json(force=True) or {}
entries = payload.get("env", {}) or {}
persist_env_vars(entries)
return {"status": "ok"}, 200
@app.route("/api/status")
def api_status() -> tuple[Dict[str, object], int]:
return build_context()["status"], 200
@app.route("/api/logs")
def api_logs() -> tuple[Dict[str, str], int]:
return {"logs": get_recent_logs()}, 200
@app.route("/api/translation-options")
def api_translation_options() -> tuple[Dict[str, Dict[str, str]], int]:
return {"translations": list_translations()}, 200
@app.route("/api/translations", methods=["POST"])
def api_create_translation() -> tuple[Dict[str, str], int]:
payload = request.get_json(force=True)
lang = payload.get("lang")
if not lang:
return {"error": "lang required"}, 400
ok = create_translation(lang)
return ({"created": ok}, 201 if ok else 400)
@app.route("/api/translations/<lang>", methods=["GET"])
def api_get_translation(lang: str) -> tuple[Dict[str, object], int]:
if lang not in load_metadata().get("messages", {}):
return {"error": "translation not found"}, 404
return {"lang": lang, "content": load_translation(lang)}, 200
@app.route("/api/translations/<lang>", methods=["PUT"])
def api_update_translation(lang: str) -> tuple[Dict[str, str], int]:
payload = request.get_json(force=True)
updated = update_translation(lang, payload)
if not updated:
return {"error": "unable to update"}, 400
return {"status": "saved"}, 200
@app.route("/api/translations/<lang>", methods=["DELETE"])
def api_delete_translation(lang: str) -> tuple[Dict[str, str], int]:
deleted = delete_translation(lang)
if not deleted:
return {"error": "cannot delete"}, 400
return {"deleted": True}, 200
@app.route("/api/navigation")
def api_navigation() -> tuple[Dict[str, object], int]:
return {"items": get_navigation_items()}, 200
@app.route("/api/workflow/plugins")
def api_workflow_plugins() -> tuple[Dict[str, object], int]:
return {"plugins": load_metadata().get("workflow_plugins", {})}, 200
@app.route("/api/workflow/packages")
def api_workflow_packages() -> tuple[Dict[str, object], int]:
packages = load_workflow_packages()
return {"packages": summarize_workflow_packages(packages)}, 200
@app.route("/api/workflow/packages/<package_id>")
def api_get_workflow_package(package_id: str) -> tuple[Dict[str, object], int]:
packages = load_workflow_packages()
for pkg in packages:
if pkg.get("id") == package_id:
return pkg, 200
return {"error": "package not found"}, 404
def start_web_ui(host: str = "0.0.0.0", port: int = 8000) -> None:
app.run(host=host, port=port)

View File

View File

@@ -4,7 +4,7 @@ version = "0.1.0"
description = "AutoMetabuilder"
authors = ["Your Name <you@example.com>"]
readme = "README.md"
packages = [{include = "autometabuilder", from = "src"}]
packages = [{include = "autometabuilder", from = "backend"}]
[tool.poetry.dependencies]
python = "^3.10"
@@ -14,12 +14,9 @@ python-dotenv = "^1.0.0"
openai = "^1.0.0"
PyGithub = "^2.1.1"
tenacity = "^9.1.2"
fastapi = "^0.128.0"
uvicorn = "^0.40.0"
jinja2 = "^3.1.6"
flask = "^2.3.3"
slack-sdk = "^3.39.0"
discord-py = "^2.6.4"
python-multipart = "^0.0.21"
[build-system]
requires = ["poetry-core"]

View File

@@ -1,8 +1,8 @@
services:
autometabuilder:
build: .
build: ..
env_file:
- .env
- ../backend/.env
volumes:
- .:/app
stdin_open: true

6
frontend/.eslintrc.json Normal file
View File

@@ -0,0 +1,6 @@
{
"extends": "next/core-web-vitals",
"rules": {
"react/react-in-jsx-scope": "off"
}
}

Some files were not shown because too many files have changed in this diff Show More