mirror of
https://github.com/johndoe6345789/AutoMetabuilder.git
synced 2026-04-24 13:54:59 +00:00
Introduce AutoMetabuilder core components and workflow packages:
- Implement core components: CLI argument parsing, environment loading, GitHub service creation, and logging configuration. - Add support for OpenAI client setup and model resolution. - Develop SDLC context loader from GitHub and repository files. - Implement workflow context and engine builders. - Introduce major workflow packages: `game_tick_loop` and `contextual_iterative_loop`. - Update localization files with new package descriptions and labels. - Streamline web navigation by loading items from a dedicated JSON file.
This commit is contained in:
@@ -1,240 +0,0 @@
|
||||
"""Helpers for loading metadata, translations, and workflow assets."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
|
||||
PACKAGE_ROOT = Path(__file__).resolve().parents[1]
|
||||
REPO_ROOT = PACKAGE_ROOT.parent.parent
|
||||
LOG_FILE = REPO_ROOT / "autometabuilder.log"
|
||||
|
||||
|
||||
def _read_json(path: Path) -> Dict[str, Any]:
|
||||
if not path.exists():
|
||||
return {}
|
||||
try:
|
||||
return json.loads(path.read_text(encoding="utf-8"))
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
|
||||
|
||||
def build_prompt_yaml(system_content: str | None, user_content: str | None, model: str | None) -> str:
|
||||
def indent_block(text: str | None) -> str:
|
||||
if not text:
|
||||
return ""
|
||||
return "\n ".join(line.rstrip() for line in text.splitlines())
|
||||
|
||||
model_value = model or "openai/gpt-4o"
|
||||
system_block = indent_block(system_content)
|
||||
user_block = indent_block(user_content)
|
||||
return f"""messages:
|
||||
- role: system
|
||||
content: >-
|
||||
{system_block}
|
||||
- role: user
|
||||
content: >-
|
||||
{user_block}
|
||||
model: {model_value}
|
||||
"""
|
||||
|
||||
|
||||
def load_metadata() -> Dict[str, Any]:
|
||||
metadata_path = PACKAGE_ROOT / "metadata.json"
|
||||
return _read_json(metadata_path)
|
||||
|
||||
|
||||
def write_metadata(metadata: Dict[str, Any]) -> None:
|
||||
path = PACKAGE_ROOT / "metadata.json"
|
||||
path.write_text(json.dumps(metadata, indent=2, ensure_ascii=False), encoding="utf-8")
|
||||
|
||||
|
||||
def get_messages_map(metadata: Optional[Dict[str, Any]] = None) -> Dict[str, str]:
|
||||
metadata = metadata or load_metadata()
|
||||
return metadata.get("messages", {})
|
||||
|
||||
|
||||
def load_translation(lang: str) -> Dict[str, Any]:
|
||||
messages_map = get_messages_map()
|
||||
target = messages_map.get(lang)
|
||||
if not target:
|
||||
return {}
|
||||
return _read_json(PACKAGE_ROOT / target)
|
||||
|
||||
|
||||
def list_translations() -> Dict[str, str]:
|
||||
messages_map = get_messages_map()
|
||||
if messages_map:
|
||||
return messages_map
|
||||
# falling back to files on disk
|
||||
fallback = {}
|
||||
for candidate in PACKAGE_ROOT.glob("messages_*.json"):
|
||||
name = candidate.name
|
||||
language = name.removeprefix("messages_").removesuffix(".json")
|
||||
fallback[language] = name
|
||||
return fallback
|
||||
|
||||
|
||||
def get_ui_messages(lang: str) -> Dict[str, Any]:
|
||||
messages_map = get_messages_map()
|
||||
base_name = messages_map.get("en", "messages_en.json")
|
||||
base = _read_json(PACKAGE_ROOT / base_name)
|
||||
localized = _read_json(PACKAGE_ROOT / messages_map.get(lang, base_name))
|
||||
merged = dict(base)
|
||||
merged.update(localized)
|
||||
merged["__lang"] = lang
|
||||
return merged
|
||||
|
||||
|
||||
def get_prompt_content() -> str:
|
||||
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
|
||||
if path.is_file():
|
||||
return path.read_text(encoding="utf-8")
|
||||
return ""
|
||||
|
||||
|
||||
def write_prompt(content: str) -> None:
|
||||
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
|
||||
path.write_text(content or "", encoding="utf-8")
|
||||
|
||||
|
||||
def get_workflow_content() -> str:
|
||||
metadata = load_metadata()
|
||||
workflow_name = metadata.get("workflow_path", "workflow.json")
|
||||
workflow_path = PACKAGE_ROOT / workflow_name
|
||||
if workflow_path.exists():
|
||||
return workflow_path.read_text(encoding="utf-8")
|
||||
return ""
|
||||
|
||||
|
||||
def write_workflow(content: str) -> None:
|
||||
metadata = load_metadata()
|
||||
workflow_name = metadata.get("workflow_path", "workflow.json")
|
||||
workflow_path = PACKAGE_ROOT / workflow_name
|
||||
workflow_path.write_text(content or "", encoding="utf-8")
|
||||
|
||||
|
||||
def get_navigation_items() -> List[Dict[str, Any]]:
|
||||
nav_path = PACKAGE_ROOT / "web" / "navigation_items.json"
|
||||
nav = _read_json(nav_path)
|
||||
if isinstance(nav, list):
|
||||
return nav
|
||||
return []
|
||||
|
||||
|
||||
def get_workflow_packages_dir() -> Path:
|
||||
metadata = load_metadata()
|
||||
packages_name = metadata.get("workflow_packages_path", "workflow_packages")
|
||||
return PACKAGE_ROOT / packages_name
|
||||
|
||||
|
||||
def load_workflow_packages() -> List[Dict[str, Any]]:
|
||||
packages_dir = get_workflow_packages_dir()
|
||||
if not packages_dir.exists():
|
||||
return []
|
||||
packages: List[Dict[str, Any]] = []
|
||||
for file in sorted(packages_dir.iterdir()):
|
||||
if file.suffix != ".json":
|
||||
continue
|
||||
data = _read_json(file)
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
pkg_id = data.get("id") or file.stem
|
||||
data["id"] = pkg_id
|
||||
data.setdefault("workflow", {"nodes": []})
|
||||
packages.append(data)
|
||||
return packages
|
||||
|
||||
|
||||
def summarize_workflow_packages(packages: Iterable[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
summary = []
|
||||
for pkg in packages:
|
||||
summary.append(
|
||||
{
|
||||
"id": pkg["id"],
|
||||
"label": pkg.get("label") or pkg["id"],
|
||||
"description": pkg.get("description", ""),
|
||||
"tags": pkg.get("tags", []),
|
||||
}
|
||||
)
|
||||
return summary
|
||||
|
||||
|
||||
def get_env_vars() -> Dict[str, str]:
|
||||
env_path = Path(".env")
|
||||
if not env_path.exists():
|
||||
return {}
|
||||
result: Dict[str, str] = {}
|
||||
for raw in env_path.read_text(encoding="utf-8").splitlines():
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
if "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
value = value.strip().strip("'\"")
|
||||
result[key.strip()] = value
|
||||
return result
|
||||
|
||||
|
||||
def persist_env_vars(updates: Dict[str, str]) -> None:
|
||||
from dotenv import set_key
|
||||
|
||||
env_path = Path(".env")
|
||||
env_path.touch(exist_ok=True)
|
||||
for key, value in updates.items():
|
||||
set_key(env_path, key, value)
|
||||
|
||||
|
||||
def get_recent_logs(lines: int = 50) -> str:
|
||||
if not LOG_FILE.exists():
|
||||
return ""
|
||||
with LOG_FILE.open("r", encoding="utf-8") as handle:
|
||||
content = handle.readlines()
|
||||
return "".join(content[-lines:])
|
||||
|
||||
|
||||
def create_translation(lang: str) -> bool:
|
||||
messages_map = get_messages_map()
|
||||
if lang in messages_map:
|
||||
return False
|
||||
base = messages_map.get("en", "messages_en.json")
|
||||
base_file = PACKAGE_ROOT / base
|
||||
if not base_file.exists():
|
||||
return False
|
||||
target_name = f"messages_{lang}.json"
|
||||
target_path = PACKAGE_ROOT / target_name
|
||||
shutil.copy(base_file, target_path)
|
||||
messages_map[lang] = target_name
|
||||
metadata = load_metadata()
|
||||
metadata["messages"] = messages_map
|
||||
write_metadata(metadata)
|
||||
return True
|
||||
|
||||
|
||||
def delete_translation(lang: str) -> bool:
|
||||
if lang == "en":
|
||||
return False
|
||||
messages_map = get_messages_map()
|
||||
if lang not in messages_map:
|
||||
return False
|
||||
target = PACKAGE_ROOT / messages_map[lang]
|
||||
if target.exists():
|
||||
target.unlink()
|
||||
del messages_map[lang]
|
||||
metadata = load_metadata()
|
||||
metadata["messages"] = messages_map
|
||||
write_metadata(metadata)
|
||||
return True
|
||||
|
||||
|
||||
def update_translation(lang: str, payload: Dict[str, Any]) -> bool:
|
||||
messages_map = get_messages_map()
|
||||
if lang not in messages_map:
|
||||
return False
|
||||
payload_content = payload.get("content", {})
|
||||
target_path = PACKAGE_ROOT / messages_map[lang]
|
||||
target_path.write_text(json.dumps(payload_content, indent=2, ensure_ascii=False), encoding="utf-8")
|
||||
return True
|
||||
46
backend/autometabuilder/web/data/__init__.py
Normal file
46
backend/autometabuilder/web/data/__init__.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .env import get_env_vars, persist_env_vars
|
||||
from .logs import get_recent_logs
|
||||
from .metadata import get_messages_map, load_metadata, write_metadata
|
||||
from .navigation import get_navigation_items
|
||||
from .prompt import build_prompt_yaml, get_prompt_content, write_prompt
|
||||
from .translations import (
|
||||
create_translation,
|
||||
delete_translation,
|
||||
get_ui_messages,
|
||||
list_translations,
|
||||
load_translation,
|
||||
update_translation,
|
||||
)
|
||||
from .workflow import (
|
||||
get_workflow_content,
|
||||
get_workflow_packages_dir,
|
||||
load_workflow_packages,
|
||||
summarize_workflow_packages,
|
||||
write_workflow,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"build_prompt_yaml",
|
||||
"create_translation",
|
||||
"delete_translation",
|
||||
"get_env_vars",
|
||||
"get_messages_map",
|
||||
"get_navigation_items",
|
||||
"get_prompt_content",
|
||||
"get_recent_logs",
|
||||
"get_ui_messages",
|
||||
"get_workflow_content",
|
||||
"get_workflow_packages_dir",
|
||||
"list_translations",
|
||||
"load_metadata",
|
||||
"load_translation",
|
||||
"load_workflow_packages",
|
||||
"persist_env_vars",
|
||||
"summarize_workflow_packages",
|
||||
"update_translation",
|
||||
"write_metadata",
|
||||
"write_prompt",
|
||||
"write_workflow",
|
||||
]
|
||||
29
backend/autometabuilder/web/data/env.py
Normal file
29
backend/autometabuilder/web/data/env.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_env_vars() -> dict[str, str]:
|
||||
env_path = Path(".env")
|
||||
if not env_path.exists():
|
||||
return {}
|
||||
result: dict[str, str] = {}
|
||||
for raw in env_path.read_text(encoding="utf-8").splitlines():
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
if "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
value = value.strip().strip("'\"")
|
||||
result[key.strip()] = value
|
||||
return result
|
||||
|
||||
|
||||
def persist_env_vars(updates: dict[str, str]) -> None:
|
||||
from dotenv import set_key
|
||||
|
||||
env_path = Path(".env")
|
||||
env_path.touch(exist_ok=True)
|
||||
for key, value in updates.items():
|
||||
set_key(env_path, key, value)
|
||||
14
backend/autometabuilder/web/data/json_utils.py
Normal file
14
backend/autometabuilder/web/data/json_utils.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def read_json(path: Path) -> dict[str, Any]:
|
||||
if not path.exists():
|
||||
return {}
|
||||
try:
|
||||
return json.loads(path.read_text(encoding="utf-8"))
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
11
backend/autometabuilder/web/data/logs.py
Normal file
11
backend/autometabuilder/web/data/logs.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .paths import LOG_FILE
|
||||
|
||||
|
||||
def get_recent_logs(lines: int = 50) -> str:
|
||||
if not LOG_FILE.exists():
|
||||
return ""
|
||||
with LOG_FILE.open("r", encoding="utf-8") as handle:
|
||||
content = handle.readlines()
|
||||
return "".join(content[-lines:])
|
||||
22
backend/autometabuilder/web/data/metadata.py
Normal file
22
backend/autometabuilder/web/data/metadata.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from .json_utils import read_json
|
||||
from .paths import PACKAGE_ROOT
|
||||
|
||||
|
||||
def load_metadata() -> dict[str, Any]:
|
||||
metadata_path = PACKAGE_ROOT / "metadata.json"
|
||||
return read_json(metadata_path)
|
||||
|
||||
|
||||
def write_metadata(metadata: dict[str, Any]) -> None:
|
||||
path = PACKAGE_ROOT / "metadata.json"
|
||||
path.write_text(json.dumps(metadata, indent=2, ensure_ascii=False), encoding="utf-8")
|
||||
|
||||
|
||||
def get_messages_map(metadata: dict[str, Any] | None = None) -> dict[str, str]:
|
||||
metadata = metadata or load_metadata()
|
||||
return metadata.get("messages", {})
|
||||
14
backend/autometabuilder/web/data/navigation.py
Normal file
14
backend/autometabuilder/web/data/navigation.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from .json_utils import read_json
|
||||
from .paths import PACKAGE_ROOT
|
||||
|
||||
|
||||
def get_navigation_items() -> list[dict[str, Any]]:
|
||||
nav_path = PACKAGE_ROOT / "web" / "navigation_items.json"
|
||||
nav = read_json(nav_path)
|
||||
if isinstance(nav, list):
|
||||
return nav
|
||||
return []
|
||||
7
backend/autometabuilder/web/data/paths.py
Normal file
7
backend/autometabuilder/web/data/paths.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
PACKAGE_ROOT = Path(__file__).resolve().parents[2]
|
||||
REPO_ROOT = PACKAGE_ROOT.parent.parent
|
||||
LOG_FILE = REPO_ROOT / "autometabuilder.log"
|
||||
36
backend/autometabuilder/web/data/prompt.py
Normal file
36
backend/autometabuilder/web/data/prompt.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def build_prompt_yaml(system_content: str | None, user_content: str | None, model: str | None) -> str:
|
||||
def indent_block(text: str | None) -> str:
|
||||
if not text:
|
||||
return ""
|
||||
return "\n ".join(line.rstrip() for line in text.splitlines())
|
||||
|
||||
model_value = model or "openai/gpt-4o"
|
||||
system_block = indent_block(system_content)
|
||||
user_block = indent_block(user_content)
|
||||
return f"""messages:
|
||||
- role: system
|
||||
content: >-
|
||||
{system_block}
|
||||
- role: user
|
||||
content: >-
|
||||
{user_block}
|
||||
model: {model_value}
|
||||
"""
|
||||
|
||||
|
||||
def get_prompt_content() -> str:
|
||||
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
|
||||
if path.is_file():
|
||||
return path.read_text(encoding="utf-8")
|
||||
return ""
|
||||
|
||||
|
||||
def write_prompt(content: str) -> None:
|
||||
path = Path(os.environ.get("PROMPT_PATH", "prompt.yml"))
|
||||
path.write_text(content or "", encoding="utf-8")
|
||||
84
backend/autometabuilder/web/data/translations.py
Normal file
84
backend/autometabuilder/web/data/translations.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import shutil
|
||||
from typing import Any
|
||||
|
||||
from .json_utils import read_json
|
||||
from .metadata import get_messages_map, load_metadata, write_metadata
|
||||
from .paths import PACKAGE_ROOT
|
||||
|
||||
|
||||
def load_translation(lang: str) -> dict[str, Any]:
|
||||
messages_map = get_messages_map()
|
||||
target = messages_map.get(lang)
|
||||
if not target:
|
||||
return {}
|
||||
return read_json(PACKAGE_ROOT / target)
|
||||
|
||||
|
||||
def list_translations() -> dict[str, str]:
|
||||
messages_map = get_messages_map()
|
||||
if messages_map:
|
||||
return messages_map
|
||||
fallback = {}
|
||||
for candidate in PACKAGE_ROOT.glob("messages_*.json"):
|
||||
name = candidate.name
|
||||
language = name.removeprefix("messages_").removesuffix(".json")
|
||||
fallback[language] = name
|
||||
return fallback
|
||||
|
||||
|
||||
def get_ui_messages(lang: str) -> dict[str, Any]:
|
||||
messages_map = get_messages_map()
|
||||
base_name = messages_map.get("en", "messages_en.json")
|
||||
base = read_json(PACKAGE_ROOT / base_name)
|
||||
localized = read_json(PACKAGE_ROOT / messages_map.get(lang, base_name))
|
||||
merged = dict(base)
|
||||
merged.update(localized)
|
||||
merged["__lang"] = lang
|
||||
return merged
|
||||
|
||||
|
||||
def create_translation(lang: str) -> bool:
|
||||
messages_map = get_messages_map()
|
||||
if lang in messages_map:
|
||||
return False
|
||||
base = messages_map.get("en", "messages_en.json")
|
||||
base_file = PACKAGE_ROOT / base
|
||||
if not base_file.exists():
|
||||
return False
|
||||
target_name = f"messages_{lang}.json"
|
||||
target_path = PACKAGE_ROOT / target_name
|
||||
shutil.copy(base_file, target_path)
|
||||
messages_map[lang] = target_name
|
||||
metadata = load_metadata()
|
||||
metadata["messages"] = messages_map
|
||||
write_metadata(metadata)
|
||||
return True
|
||||
|
||||
|
||||
def delete_translation(lang: str) -> bool:
|
||||
if lang == "en":
|
||||
return False
|
||||
messages_map = get_messages_map()
|
||||
if lang not in messages_map:
|
||||
return False
|
||||
target = PACKAGE_ROOT / messages_map[lang]
|
||||
if target.exists():
|
||||
target.unlink()
|
||||
del messages_map[lang]
|
||||
metadata = load_metadata()
|
||||
metadata["messages"] = messages_map
|
||||
write_metadata(metadata)
|
||||
return True
|
||||
|
||||
|
||||
def update_translation(lang: str, payload: dict[str, Any]) -> bool:
|
||||
messages_map = get_messages_map()
|
||||
if lang not in messages_map:
|
||||
return False
|
||||
payload_content = payload.get("content", {})
|
||||
target_path = PACKAGE_ROOT / messages_map[lang]
|
||||
target_path.write_text(json.dumps(payload_content, indent=2, ensure_ascii=False), encoding="utf-8")
|
||||
return True
|
||||
62
backend/autometabuilder/web/data/workflow.py
Normal file
62
backend/autometabuilder/web/data/workflow.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable
|
||||
|
||||
from .json_utils import read_json
|
||||
from .metadata import load_metadata
|
||||
from .paths import PACKAGE_ROOT
|
||||
|
||||
|
||||
def get_workflow_content() -> str:
|
||||
metadata = load_metadata()
|
||||
workflow_name = metadata.get("workflow_path", "workflow.json")
|
||||
workflow_path = PACKAGE_ROOT / workflow_name
|
||||
if workflow_path.exists():
|
||||
return workflow_path.read_text(encoding="utf-8")
|
||||
return ""
|
||||
|
||||
|
||||
def write_workflow(content: str) -> None:
|
||||
metadata = load_metadata()
|
||||
workflow_name = metadata.get("workflow_path", "workflow.json")
|
||||
workflow_path = PACKAGE_ROOT / workflow_name
|
||||
workflow_path.write_text(content or "", encoding="utf-8")
|
||||
|
||||
|
||||
def get_workflow_packages_dir() -> Path:
|
||||
metadata = load_metadata()
|
||||
packages_name = metadata.get("workflow_packages_path", "workflow_packages")
|
||||
return PACKAGE_ROOT / packages_name
|
||||
|
||||
|
||||
def load_workflow_packages() -> list[dict[str, Any]]:
|
||||
packages_dir = get_workflow_packages_dir()
|
||||
if not packages_dir.exists():
|
||||
return []
|
||||
packages: list[dict[str, Any]] = []
|
||||
for file in sorted(packages_dir.iterdir()):
|
||||
if file.suffix != ".json":
|
||||
continue
|
||||
data = read_json(file)
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
pkg_id = data.get("id") or file.stem
|
||||
data["id"] = pkg_id
|
||||
data.setdefault("workflow", {"nodes": []})
|
||||
packages.append(data)
|
||||
return packages
|
||||
|
||||
|
||||
def summarize_workflow_packages(packages: Iterable[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
summary = []
|
||||
for pkg in packages:
|
||||
summary.append(
|
||||
{
|
||||
"id": pkg["id"],
|
||||
"label": pkg.get("label") or pkg["id"],
|
||||
"description": pkg.get("description", ""),
|
||||
"tags": pkg.get("tags", []),
|
||||
}
|
||||
)
|
||||
return summary
|
||||
Reference in New Issue
Block a user