diff --git a/env_example b/env_example index 3207596..6ff76ee 100644 --- a/env_example +++ b/env_example @@ -1,5 +1,6 @@ GITHUB_TOKEN=token123 GITHUB_REPOSITORY=owner/repo RAW_PROMPT_URL=https://raw.githubusercontent.com/johndoe6345789/metabuilder/main/getonwithit.prompt.yml +PROMPT_PATH=prompt.yml GITHUB_MODELS_ENDPOINT=https://models.github.ai/inference APP_LANG=en # Supported: en, es, fr, nl, pirate diff --git a/prompt.yml b/prompt.yml new file mode 100644 index 0000000..cb92ac5 --- /dev/null +++ b/prompt.yml @@ -0,0 +1,16 @@ +messages: + - role: system + content: >- + Use software architecture best practices, unit testing, e2e testing, + linting. + - role: user + content: >- + Implement features found in ROADMAP.md and README.md, keep both files up + to date. Write playwright and unit tests. Follow code style. Use JSON + schema but its mostly correct so leave the schema alone. Index repository + and make notes. Align styling with old/ folder while sticking with plain + SASS files. Work towards MVP. Run linter and fix lint issues. Ensure + development DBAL is aligned with C++ production DBAL. Tables, Prisma + schemas and types should be generated using DBAL. C++ CLI frontend can + interface with DBAL and make life a bit easier. Implement any stub code. +model: openai/gpt-4o diff --git a/src/autometabuilder/main.py b/src/autometabuilder/main.py index b58e3a4..dd36ee7 100644 --- a/src/autometabuilder/main.py +++ b/src/autometabuilder/main.py @@ -12,6 +12,7 @@ from .github_integration import GitHubIntegration, get_repo_name_from_env load_dotenv() +DEFAULT_PROMPT_PATH = "prompt.yml" DEFAULT_RAW_PROMPT_URL = ( "https://raw.githubusercontent.com/johndoe6345789/" "metabuilder/main/getonwithit.prompt.yml" @@ -19,8 +20,16 @@ DEFAULT_RAW_PROMPT_URL = ( DEFAULT_ENDPOINT = "https://models.github.ai/inference" -def load_prompt_yaml(url: str, token: str) -> dict: - """Load prompt configuration from a remote YAML file.""" +def load_prompt_yaml(token: str) -> dict: + """Load prompt configuration from local file or remote URL.""" + # Try local file first + local_path = os.environ.get("PROMPT_PATH", DEFAULT_PROMPT_PATH) + if os.path.exists(local_path): + with open(local_path, "r", encoding="utf-8") as f: + return yaml.safe_load(f) + + # Fallback to remote URL + url = os.environ.get("RAW_PROMPT_URL", DEFAULT_RAW_PROMPT_URL) headers = {"Authorization": f"Bearer {token}"} r = requests.get(url, headers=headers, timeout=30) r.raise_for_status() @@ -94,9 +103,7 @@ def main(): api_key=token, ) - prompt = load_prompt_yaml( - os.environ.get("RAW_PROMPT_URL", DEFAULT_RAW_PROMPT_URL), token - ) + prompt = load_prompt_yaml(token) # Load tools for SDLC operations from JSON file tools_path = os.path.join(os.path.dirname(__file__), "tools.json")