Support local and remote prompt loading: add default prompt path and environment variable; update load_prompt_yaml to handle local files and adjust main function call accordingly.

This commit is contained in:
2026-01-09 13:31:42 +00:00
parent 160864a320
commit a8574d156c
3 changed files with 29 additions and 5 deletions

View File

@@ -1,5 +1,6 @@
GITHUB_TOKEN=token123
GITHUB_REPOSITORY=owner/repo
RAW_PROMPT_URL=https://raw.githubusercontent.com/johndoe6345789/metabuilder/main/getonwithit.prompt.yml
PROMPT_PATH=prompt.yml
GITHUB_MODELS_ENDPOINT=https://models.github.ai/inference
APP_LANG=en # Supported: en, es, fr, nl, pirate

16
prompt.yml Normal file
View File

@@ -0,0 +1,16 @@
messages:
- role: system
content: >-
Use software architecture best practices, unit testing, e2e testing,
linting.
- role: user
content: >-
Implement features found in ROADMAP.md and README.md, keep both files up
to date. Write playwright and unit tests. Follow code style. Use JSON
schema but its mostly correct so leave the schema alone. Index repository
and make notes. Align styling with old/ folder while sticking with plain
SASS files. Work towards MVP. Run linter and fix lint issues. Ensure
development DBAL is aligned with C++ production DBAL. Tables, Prisma
schemas and types should be generated using DBAL. C++ CLI frontend can
interface with DBAL and make life a bit easier. Implement any stub code.
model: openai/gpt-4o

View File

@@ -12,6 +12,7 @@ from .github_integration import GitHubIntegration, get_repo_name_from_env
load_dotenv()
DEFAULT_PROMPT_PATH = "prompt.yml"
DEFAULT_RAW_PROMPT_URL = (
"https://raw.githubusercontent.com/johndoe6345789/"
"metabuilder/main/getonwithit.prompt.yml"
@@ -19,8 +20,16 @@ DEFAULT_RAW_PROMPT_URL = (
DEFAULT_ENDPOINT = "https://models.github.ai/inference"
def load_prompt_yaml(url: str, token: str) -> dict:
"""Load prompt configuration from a remote YAML file."""
def load_prompt_yaml(token: str) -> dict:
"""Load prompt configuration from local file or remote URL."""
# Try local file first
local_path = os.environ.get("PROMPT_PATH", DEFAULT_PROMPT_PATH)
if os.path.exists(local_path):
with open(local_path, "r", encoding="utf-8") as f:
return yaml.safe_load(f)
# Fallback to remote URL
url = os.environ.get("RAW_PROMPT_URL", DEFAULT_RAW_PROMPT_URL)
headers = {"Authorization": f"Bearer {token}"}
r = requests.get(url, headers=headers, timeout=30)
r.raise_for_status()
@@ -94,9 +103,7 @@ def main():
api_key=token,
)
prompt = load_prompt_yaml(
os.environ.get("RAW_PROMPT_URL", DEFAULT_RAW_PROMPT_URL), token
)
prompt = load_prompt_yaml(token)
# Load tools for SDLC operations from JSON file
tools_path = os.path.join(os.path.dirname(__file__), "tools.json")