mirror of
https://github.com/johndoe6345789/goodpackagerepo.git
synced 2026-04-24 13:54:59 +00:00
Migrate to SQLAlchemy for database operations
Co-authored-by: johndoe6345789 <224850594+johndoe6345789@users.noreply.github.com>
This commit is contained in:
@@ -18,13 +18,18 @@ import jwt
|
||||
from werkzeug.exceptions import HTTPException
|
||||
import jsonschema
|
||||
|
||||
import auth as auth_module
|
||||
import config_db
|
||||
import auth_sqlalchemy as auth_module
|
||||
import config_db_sqlalchemy as config_db
|
||||
|
||||
app = Flask(__name__)
|
||||
CORS(app)
|
||||
|
||||
# Configuration is now loaded from database, not JSON file
|
||||
# Load schema.json for reference
|
||||
SCHEMA_PATH = Path(__file__).parent.parent / "schema.json"
|
||||
with open(SCHEMA_PATH) as f:
|
||||
SCHEMA = json.load(f)
|
||||
|
||||
# Configuration is now loaded from database using SQLAlchemy
|
||||
# schema.json is only used once during initial database setup
|
||||
DB_CONFIG = config_db.get_repository_config()
|
||||
|
||||
|
||||
92
backend/auth_sqlalchemy.py
Normal file
92
backend/auth_sqlalchemy.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""
|
||||
Authentication and user management module using SQLAlchemy.
|
||||
"""
|
||||
|
||||
import bcrypt
|
||||
import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
from models import User, UsersSession
|
||||
|
||||
|
||||
def init_db():
|
||||
"""Initialize the database with default admin user if needed."""
|
||||
session = UsersSession()
|
||||
try:
|
||||
# Check if admin user exists
|
||||
admin = session.query(User).filter_by(username='admin').first()
|
||||
if not admin:
|
||||
# Create default admin user (admin/admin)
|
||||
password_hash = bcrypt.hashpw("admin".encode('utf-8'), bcrypt.gensalt())
|
||||
now = datetime.utcnow().isoformat() + "Z"
|
||||
admin = User(
|
||||
username='admin',
|
||||
password_hash=password_hash.decode('utf-8'),
|
||||
scopes='read,write,admin',
|
||||
created_at=now,
|
||||
updated_at=now
|
||||
)
|
||||
session.add(admin)
|
||||
session.commit()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def verify_password(username: str, password: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify username and password, return user data if valid."""
|
||||
session = UsersSession()
|
||||
try:
|
||||
user = session.query(User).filter_by(username=username).first()
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Verify password
|
||||
if bcrypt.checkpw(password.encode('utf-8'), user.password_hash.encode('utf-8')):
|
||||
return {
|
||||
'id': user.id,
|
||||
'username': user.username,
|
||||
'scopes': user.scopes.split(',')
|
||||
}
|
||||
|
||||
return None
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def change_password(username: str, old_password: str, new_password: str) -> bool:
|
||||
"""Change user password."""
|
||||
# Verify old password first
|
||||
user_data = verify_password(username, old_password)
|
||||
if not user_data:
|
||||
return False
|
||||
|
||||
# Hash new password
|
||||
password_hash = bcrypt.hashpw(new_password.encode('utf-8'), bcrypt.gensalt())
|
||||
now = datetime.utcnow().isoformat() + "Z"
|
||||
|
||||
session = UsersSession()
|
||||
try:
|
||||
user = session.query(User).filter_by(username=username).first()
|
||||
if user:
|
||||
user.password_hash = password_hash.decode('utf-8')
|
||||
user.updated_at = now
|
||||
session.commit()
|
||||
return True
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def generate_token(user: Dict[str, Any], secret: str, expires_hours: int = 24) -> str:
|
||||
"""Generate JWT token for user."""
|
||||
payload = {
|
||||
'sub': user['username'],
|
||||
'scopes': user['scopes'],
|
||||
'exp': datetime.utcnow() + timedelta(hours=expires_hours)
|
||||
}
|
||||
return jwt.encode(payload, secret, algorithm='HS256')
|
||||
|
||||
|
||||
# Initialize database on module import
|
||||
init_db()
|
||||
471
backend/config_db_sqlalchemy.py
Normal file
471
backend/config_db_sqlalchemy.py
Normal file
@@ -0,0 +1,471 @@
|
||||
"""
|
||||
Database models for repository configuration using SQLAlchemy.
|
||||
Stores schema.json configuration in SQLite for dynamic management.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List, Optional
|
||||
from models import (
|
||||
ConfigSession, RepositoryConfig, Capability, Entity, EntityField, EntityConstraint,
|
||||
BlobStore, KVStore, APIRoute, AuthScope, AuthPolicy, CachingConfig, FeaturesConfig,
|
||||
DocumentConfig, StorageSchema, Index, IndexKey, Upstream, EventType, ReplicationConfig,
|
||||
GCConfig, OpsLimits, AllowedOp, Invariant, ValidationRule, VersioningConfig
|
||||
)
|
||||
|
||||
|
||||
def load_schema_to_db(schema_path: Path):
|
||||
"""Load schema.json into the database using SQLAlchemy."""
|
||||
with open(schema_path) as f:
|
||||
schema = json.load(f)
|
||||
|
||||
session = ConfigSession()
|
||||
try:
|
||||
# Check if config already exists
|
||||
existing = session.query(RepositoryConfig).first()
|
||||
if existing:
|
||||
print("Configuration already exists in database")
|
||||
return
|
||||
|
||||
now = datetime.utcnow().isoformat() + "Z"
|
||||
|
||||
# Create repository config
|
||||
config = RepositoryConfig(
|
||||
schema_version=schema['schema_version'],
|
||||
type_id=schema['type_id'],
|
||||
description=schema['description'],
|
||||
created_at=now,
|
||||
updated_at=now
|
||||
)
|
||||
session.add(config)
|
||||
session.flush() # Get the config.id
|
||||
|
||||
# Add capabilities
|
||||
capability = Capability(
|
||||
config_id=config.id,
|
||||
protocols=json.dumps(schema['capabilities']['protocols']),
|
||||
storage=json.dumps(schema['capabilities']['storage']),
|
||||
features=json.dumps(schema['capabilities']['features'])
|
||||
)
|
||||
session.add(capability)
|
||||
|
||||
# Add entities
|
||||
for entity_name, entity_data in schema['entities'].items():
|
||||
if entity_name == 'versioning':
|
||||
continue
|
||||
|
||||
entity = Entity(
|
||||
config_id=config.id,
|
||||
name=entity_name,
|
||||
type='artifact',
|
||||
primary_key=json.dumps(entity_data.get('primary_key', [])),
|
||||
created_at=now
|
||||
)
|
||||
session.add(entity)
|
||||
session.flush()
|
||||
|
||||
# Add entity fields
|
||||
for field_name, field_data in entity_data.get('fields', {}).items():
|
||||
field = EntityField(
|
||||
entity_id=entity.id,
|
||||
name=field_name,
|
||||
type=field_data['type'],
|
||||
optional=1 if field_data.get('optional', False) else 0,
|
||||
normalizations=json.dumps(field_data.get('normalize', []))
|
||||
)
|
||||
session.add(field)
|
||||
|
||||
# Add entity constraints
|
||||
for constraint in entity_data.get('constraints', []):
|
||||
constraint_obj = EntityConstraint(
|
||||
entity_id=entity.id,
|
||||
field=constraint['field'],
|
||||
regex=constraint['regex'],
|
||||
when_present=1 if constraint.get('when_present', False) else 0
|
||||
)
|
||||
session.add(constraint_obj)
|
||||
|
||||
# Add blob stores
|
||||
for store_name, store_data in schema['storage']['blob_stores'].items():
|
||||
blob_store = BlobStore(
|
||||
config_id=config.id,
|
||||
name=store_name,
|
||||
kind=store_data['kind'],
|
||||
root=store_data['root'],
|
||||
addressing_mode=store_data['addressing'].get('mode'),
|
||||
addressing_digest=store_data['addressing'].get('digest'),
|
||||
path_template=store_data['addressing'].get('path_template'),
|
||||
max_blob_bytes=store_data['limits'].get('max_blob_bytes'),
|
||||
min_blob_bytes=store_data['limits'].get('min_blob_bytes')
|
||||
)
|
||||
session.add(blob_store)
|
||||
|
||||
# Add KV stores
|
||||
for store_name, store_data in schema['storage']['kv_stores'].items():
|
||||
kv_store = KVStore(
|
||||
config_id=config.id,
|
||||
name=store_name,
|
||||
kind=store_data['kind'],
|
||||
root=store_data['root']
|
||||
)
|
||||
session.add(kv_store)
|
||||
|
||||
# Add API routes
|
||||
for route in schema['api']['routes']:
|
||||
api_route = APIRoute(
|
||||
config_id=config.id,
|
||||
route_id=route['id'],
|
||||
method=route['method'],
|
||||
path=route['path'],
|
||||
tags=json.dumps(route.get('tags', [])),
|
||||
pipeline=json.dumps(route['pipeline']),
|
||||
created_at=now
|
||||
)
|
||||
session.add(api_route)
|
||||
|
||||
# Add auth scopes
|
||||
for scope in schema['auth']['scopes']:
|
||||
auth_scope = AuthScope(
|
||||
config_id=config.id,
|
||||
name=scope['name'],
|
||||
actions=json.dumps(scope['actions'])
|
||||
)
|
||||
session.add(auth_scope)
|
||||
|
||||
# Add auth policies
|
||||
for policy in schema['auth']['policies']:
|
||||
auth_policy = AuthPolicy(
|
||||
config_id=config.id,
|
||||
name=policy['name'],
|
||||
effect=policy['effect'],
|
||||
conditions=json.dumps(policy.get('when', {})),
|
||||
requirements=json.dumps(policy.get('require', {}))
|
||||
)
|
||||
session.add(auth_policy)
|
||||
|
||||
# Add caching config
|
||||
caching = schema['caching']
|
||||
caching_config = CachingConfig(
|
||||
config_id=config.id,
|
||||
response_cache_enabled=1 if caching['response_cache']['enabled'] else 0,
|
||||
response_cache_ttl=caching['response_cache']['default_ttl_seconds'],
|
||||
blob_cache_enabled=1 if caching['blob_cache']['enabled'] else 0,
|
||||
blob_cache_max_bytes=caching['blob_cache']['max_bytes']
|
||||
)
|
||||
session.add(caching_config)
|
||||
|
||||
# Add features config
|
||||
features = schema['features']
|
||||
features_config = FeaturesConfig(
|
||||
config_id=config.id,
|
||||
mutable_tags=1 if features['mutable_tags'] else 0,
|
||||
allow_overwrite_artifacts=1 if features['allow_overwrite_artifacts'] else 0,
|
||||
proxy_enabled=1 if features['proxy_enabled'] else 0,
|
||||
gc_enabled=1 if schema['gc']['enabled'] else 0
|
||||
)
|
||||
session.add(features_config)
|
||||
|
||||
# Add document configs
|
||||
for doc_name, doc_data in schema['storage'].get('documents', {}).items():
|
||||
doc_config = DocumentConfig(
|
||||
config_id=config.id,
|
||||
name=doc_name,
|
||||
store=doc_data['store'],
|
||||
key_template=doc_data['key_template'],
|
||||
schema_name=doc_data['schema']
|
||||
)
|
||||
session.add(doc_config)
|
||||
|
||||
# Add storage schemas
|
||||
for schema_name, schema_def in schema['storage'].get('schemas', {}).items():
|
||||
storage_schema = StorageSchema(
|
||||
config_id=config.id,
|
||||
name=schema_name,
|
||||
schema_definition=json.dumps(schema_def)
|
||||
)
|
||||
session.add(storage_schema)
|
||||
|
||||
# Add indexes
|
||||
for index_name, index_data in schema.get('indexes', {}).items():
|
||||
index = Index(
|
||||
config_id=config.id,
|
||||
name=index_name,
|
||||
source_document=index_data['source_document'],
|
||||
materialization_mode=index_data['materialization'].get('mode'),
|
||||
materialization_trigger=index_data['materialization'].get('trigger')
|
||||
)
|
||||
session.add(index)
|
||||
session.flush()
|
||||
|
||||
# Add index keys
|
||||
for key in index_data.get('keys', []):
|
||||
index_key = IndexKey(
|
||||
index_id=index.id,
|
||||
name=key['name'],
|
||||
fields=json.dumps(key['fields']),
|
||||
sort=json.dumps(key.get('sort', [])),
|
||||
unique_key=1 if key.get('unique', False) else 0
|
||||
)
|
||||
session.add(index_key)
|
||||
|
||||
# Add upstreams
|
||||
for upstream_name, upstream_data in schema.get('upstreams', {}).items():
|
||||
upstream = Upstream(
|
||||
config_id=config.id,
|
||||
name=upstream_name,
|
||||
base_url=upstream_data['base_url'],
|
||||
auth_mode=upstream_data.get('auth', {}).get('mode'),
|
||||
connect_timeout_ms=upstream_data.get('timeouts_ms', {}).get('connect'),
|
||||
read_timeout_ms=upstream_data.get('timeouts_ms', {}).get('read'),
|
||||
retry_max_attempts=upstream_data.get('retry', {}).get('max_attempts'),
|
||||
retry_backoff_ms=upstream_data.get('retry', {}).get('backoff_ms')
|
||||
)
|
||||
session.add(upstream)
|
||||
|
||||
# Add event types
|
||||
for event in schema.get('events', {}).get('types', []):
|
||||
event_type = EventType(
|
||||
config_id=config.id,
|
||||
name=event['name'],
|
||||
durable=1 if event.get('durable', True) else 0,
|
||||
schema_definition=json.dumps(event.get('schema', {}))
|
||||
)
|
||||
session.add(event_type)
|
||||
|
||||
# Add replication config
|
||||
replication = schema.get('replication', {})
|
||||
if replication:
|
||||
replication_config = ReplicationConfig(
|
||||
config_id=config.id,
|
||||
mode=replication.get('mode'),
|
||||
log_store=replication.get('log', {}).get('store'),
|
||||
log_key_prefix=replication.get('log', {}).get('key_prefix'),
|
||||
log_ordering=replication.get('log', {}).get('ordering'),
|
||||
log_max_event_bytes=replication.get('log', {}).get('max_event_bytes'),
|
||||
shipping_strategy=replication.get('shipping', {}).get('strategy'),
|
||||
shipping_dedupe_enabled=1 if replication.get('shipping', {}).get('dedupe', {}).get('enabled', False) else 0,
|
||||
shipping_batch_max_events=replication.get('shipping', {}).get('batch', {}).get('max_events'),
|
||||
shipping_batch_max_bytes=replication.get('shipping', {}).get('batch', {}).get('max_bytes')
|
||||
)
|
||||
session.add(replication_config)
|
||||
|
||||
# Add GC config
|
||||
gc = schema.get('gc', {})
|
||||
if gc:
|
||||
gc_config = GCConfig(
|
||||
config_id=config.id,
|
||||
enabled=1 if gc.get('enabled', True) else 0,
|
||||
immutable_after_publish=1 if gc.get('retention', {}).get('immutable_after_publish', True) else 0,
|
||||
keep_last_n_versions=gc.get('retention', {}).get('keep_last_n_versions'),
|
||||
keep_tags_forever=1 if gc.get('retention', {}).get('keep_tags_forever', True) else 0,
|
||||
sweep_schedule_rrule=gc.get('sweep', {}).get('schedule', {}).get('rrule'),
|
||||
sweep_unreferenced_after_seconds=gc.get('sweep', {}).get('sweep_unreferenced_after_seconds')
|
||||
)
|
||||
session.add(gc_config)
|
||||
|
||||
# Add ops limits
|
||||
ops = schema.get('ops', {})
|
||||
if ops:
|
||||
ops_limits = OpsLimits(
|
||||
config_id=config.id,
|
||||
closed_world=1 if ops.get('closed_world', True) else 0,
|
||||
max_pipeline_ops=ops.get('limits', {}).get('max_pipeline_ops'),
|
||||
max_request_body_bytes=ops.get('limits', {}).get('max_request_body_bytes'),
|
||||
max_json_bytes=ops.get('limits', {}).get('max_json_bytes'),
|
||||
max_kv_value_bytes=ops.get('limits', {}).get('max_kv_value_bytes'),
|
||||
max_cpu_ms_per_request=ops.get('limits', {}).get('max_cpu_ms_per_request'),
|
||||
max_io_ops_per_request=ops.get('limits', {}).get('max_io_ops_per_request')
|
||||
)
|
||||
session.add(ops_limits)
|
||||
|
||||
# Add allowed operations
|
||||
for op in ops.get('allowed', []):
|
||||
allowed_op = AllowedOp(
|
||||
config_id=config.id,
|
||||
operation=op
|
||||
)
|
||||
session.add(allowed_op)
|
||||
|
||||
# Add invariants
|
||||
for invariant in schema.get('invariants', {}).get('global', []):
|
||||
invariant_obj = Invariant(
|
||||
config_id=config.id,
|
||||
invariant_id=invariant['id'],
|
||||
description=invariant['description'],
|
||||
assertion=json.dumps(invariant['assert'])
|
||||
)
|
||||
session.add(invariant_obj)
|
||||
|
||||
# Add validation rules
|
||||
validation = schema.get('validation', {})
|
||||
for rule in validation.get('load_time_checks', []):
|
||||
validation_rule = ValidationRule(
|
||||
config_id=config.id,
|
||||
rule_id=rule['id'],
|
||||
rule_type='load_time',
|
||||
requirement=json.dumps(rule['require']),
|
||||
on_fail=rule['on_fail']
|
||||
)
|
||||
session.add(validation_rule)
|
||||
|
||||
for rule in validation.get('runtime_checks', []):
|
||||
validation_rule = ValidationRule(
|
||||
config_id=config.id,
|
||||
rule_id=rule['id'],
|
||||
rule_type='runtime',
|
||||
requirement=json.dumps(rule['require']),
|
||||
on_fail=rule['on_fail']
|
||||
)
|
||||
session.add(validation_rule)
|
||||
|
||||
# Add versioning config
|
||||
versioning = schema.get('entities', {}).get('versioning', {})
|
||||
if versioning:
|
||||
versioning_config = VersioningConfig(
|
||||
config_id=config.id,
|
||||
scheme=versioning.get('scheme'),
|
||||
ordering=versioning.get('ordering'),
|
||||
allow_prerelease=1 if versioning.get('allow_prerelease', False) else 0,
|
||||
latest_policy_enabled=1 if versioning.get('latest_policy', {}).get('enabled', True) else 0,
|
||||
latest_policy_monotonic=1 if versioning.get('latest_policy', {}).get('monotonic', True) else 0,
|
||||
latest_policy_exclude_prerelease=1 if versioning.get('latest_policy', {}).get('exclude_prerelease', True) else 0
|
||||
)
|
||||
session.add(versioning_config)
|
||||
|
||||
session.commit()
|
||||
print("Schema loaded into database successfully")
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
print(f"Error loading schema: {e}")
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def get_repository_config() -> Optional[Dict[str, Any]]:
|
||||
"""Get the current repository configuration."""
|
||||
session = ConfigSession()
|
||||
try:
|
||||
config = session.query(RepositoryConfig).first()
|
||||
|
||||
if not config:
|
||||
return None
|
||||
|
||||
config_dict = {
|
||||
'id': config.id,
|
||||
'schema_version': config.schema_version,
|
||||
'type_id': config.type_id,
|
||||
'description': config.description,
|
||||
'created_at': config.created_at,
|
||||
'updated_at': config.updated_at
|
||||
}
|
||||
|
||||
# Get capabilities
|
||||
if config.capabilities:
|
||||
cap = config.capabilities[0]
|
||||
config_dict['capabilities'] = {
|
||||
'protocols': json.loads(cap.protocols),
|
||||
'storage': json.loads(cap.storage),
|
||||
'features': json.loads(cap.features)
|
||||
}
|
||||
|
||||
# Get entities
|
||||
entities = []
|
||||
for entity in config.entities:
|
||||
entity_dict = {
|
||||
'id': entity.id,
|
||||
'name': entity.name,
|
||||
'type': entity.type,
|
||||
'primary_key': entity.primary_key,
|
||||
'created_at': entity.created_at,
|
||||
'fields': [],
|
||||
'constraints': []
|
||||
}
|
||||
|
||||
# Get fields
|
||||
for field in entity.fields:
|
||||
entity_dict['fields'].append({
|
||||
'id': field.id,
|
||||
'name': field.name,
|
||||
'type': field.type,
|
||||
'optional': field.optional,
|
||||
'normalizations': field.normalizations
|
||||
})
|
||||
|
||||
# Get constraints
|
||||
for constraint in entity.constraints:
|
||||
entity_dict['constraints'].append({
|
||||
'id': constraint.id,
|
||||
'field': constraint.field,
|
||||
'regex': constraint.regex,
|
||||
'when_present': constraint.when_present
|
||||
})
|
||||
|
||||
entities.append(entity_dict)
|
||||
|
||||
config_dict['entities'] = entities
|
||||
|
||||
# Get blob stores
|
||||
config_dict['blob_stores'] = [
|
||||
{
|
||||
'id': bs.id,
|
||||
'name': bs.name,
|
||||
'kind': bs.kind,
|
||||
'root': bs.root,
|
||||
'addressing_mode': bs.addressing_mode,
|
||||
'addressing_digest': bs.addressing_digest,
|
||||
'path_template': bs.path_template,
|
||||
'max_blob_bytes': bs.max_blob_bytes,
|
||||
'min_blob_bytes': bs.min_blob_bytes
|
||||
}
|
||||
for bs in config.blob_stores
|
||||
]
|
||||
|
||||
# Get KV stores
|
||||
config_dict['kv_stores'] = [
|
||||
{'id': kv.id, 'name': kv.name, 'kind': kv.kind, 'root': kv.root}
|
||||
for kv in config.kv_stores
|
||||
]
|
||||
|
||||
# Get API routes
|
||||
config_dict['api_routes'] = [
|
||||
{
|
||||
'id': route.id,
|
||||
'route_id': route.route_id,
|
||||
'method': route.method,
|
||||
'path': route.path,
|
||||
'tags': route.tags,
|
||||
'pipeline': route.pipeline,
|
||||
'created_at': route.created_at
|
||||
}
|
||||
for route in config.api_routes
|
||||
]
|
||||
|
||||
# Get auth scopes
|
||||
config_dict['auth_scopes'] = [
|
||||
{'id': scope.id, 'name': scope.name, 'actions': scope.actions}
|
||||
for scope in config.auth_scopes
|
||||
]
|
||||
|
||||
# Get auth policies
|
||||
config_dict['auth_policies'] = [
|
||||
{
|
||||
'id': policy.id,
|
||||
'name': policy.name,
|
||||
'effect': policy.effect,
|
||||
'conditions': policy.conditions,
|
||||
'requirements': policy.requirements
|
||||
}
|
||||
for policy in config.auth_policies
|
||||
]
|
||||
|
||||
return config_dict
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
# Load schema if database is empty
|
||||
schema_path = Path(__file__).parent.parent / "schema.json"
|
||||
if schema_path.exists():
|
||||
load_schema_to_db(schema_path)
|
||||
409
backend/models.py
Normal file
409
backend/models.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
SQLAlchemy models for goodpackagerepo.
|
||||
|
||||
This module defines the database schema using SQLAlchemy ORM.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from sqlalchemy import (
|
||||
create_engine, Column, Integer, String, Text, Boolean, ForeignKey,
|
||||
Table, MetaData
|
||||
)
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relationship, sessionmaker
|
||||
from pathlib import Path
|
||||
|
||||
# Database paths
|
||||
USERS_DB_PATH = Path(__file__).parent / "users.db"
|
||||
CONFIG_DB_PATH = Path(__file__).parent / "config.db"
|
||||
|
||||
# Create engines
|
||||
users_engine = create_engine(f'sqlite:///{USERS_DB_PATH}', echo=False)
|
||||
config_engine = create_engine(f'sqlite:///{CONFIG_DB_PATH}', echo=False)
|
||||
|
||||
# Create session makers
|
||||
UsersSession = sessionmaker(bind=users_engine)
|
||||
ConfigSession = sessionmaker(bind=config_engine)
|
||||
|
||||
# Base classes
|
||||
Base = declarative_base()
|
||||
ConfigBase = declarative_base()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# User Management Models
|
||||
# ============================================================================
|
||||
|
||||
class User(Base):
|
||||
"""User model for authentication."""
|
||||
__tablename__ = 'users'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
username = Column(String(255), unique=True, nullable=False, index=True)
|
||||
password_hash = Column(String(255), nullable=False)
|
||||
scopes = Column(String(512), nullable=False)
|
||||
created_at = Column(String(64), nullable=False)
|
||||
updated_at = Column(String(64), nullable=False)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<User(username='{self.username}', scopes='{self.scopes}')>"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Configuration Models
|
||||
# ============================================================================
|
||||
|
||||
class RepositoryConfig(ConfigBase):
|
||||
"""Main repository configuration."""
|
||||
__tablename__ = 'repository_config'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
schema_version = Column(String(32), nullable=False)
|
||||
type_id = Column(String(255), nullable=False)
|
||||
description = Column(Text)
|
||||
created_at = Column(String(64), nullable=False)
|
||||
updated_at = Column(String(64), nullable=False)
|
||||
|
||||
# Relationships
|
||||
capabilities = relationship("Capability", back_populates="config", cascade="all, delete-orphan")
|
||||
entities = relationship("Entity", back_populates="config", cascade="all, delete-orphan")
|
||||
blob_stores = relationship("BlobStore", back_populates="config", cascade="all, delete-orphan")
|
||||
kv_stores = relationship("KVStore", back_populates="config", cascade="all, delete-orphan")
|
||||
api_routes = relationship("APIRoute", back_populates="config", cascade="all, delete-orphan")
|
||||
auth_scopes = relationship("AuthScope", back_populates="config", cascade="all, delete-orphan")
|
||||
auth_policies = relationship("AuthPolicy", back_populates="config", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class Capability(ConfigBase):
|
||||
"""System capabilities."""
|
||||
__tablename__ = 'capabilities'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
protocols = Column(Text, nullable=False) # JSON
|
||||
storage = Column(Text, nullable=False) # JSON
|
||||
features = Column(Text, nullable=False) # JSON
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="capabilities")
|
||||
|
||||
|
||||
class Entity(ConfigBase):
|
||||
"""Entity definitions."""
|
||||
__tablename__ = 'entities'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
type = Column(String(255), nullable=False)
|
||||
primary_key = Column(Text) # JSON
|
||||
created_at = Column(String(64), nullable=False)
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="entities")
|
||||
fields = relationship("EntityField", back_populates="entity", cascade="all, delete-orphan")
|
||||
constraints = relationship("EntityConstraint", back_populates="entity", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class EntityField(ConfigBase):
|
||||
"""Entity field definitions."""
|
||||
__tablename__ = 'entity_fields'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
entity_id = Column(Integer, ForeignKey('entities.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
type = Column(String(64), nullable=False)
|
||||
optional = Column(Integer, default=0)
|
||||
normalizations = Column(Text) # JSON
|
||||
|
||||
entity = relationship("Entity", back_populates="fields")
|
||||
|
||||
|
||||
class EntityConstraint(ConfigBase):
|
||||
"""Entity constraints."""
|
||||
__tablename__ = 'entity_constraints'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
entity_id = Column(Integer, ForeignKey('entities.id', ondelete='CASCADE'), nullable=False)
|
||||
field = Column(String(255), nullable=False)
|
||||
regex = Column(Text, nullable=False)
|
||||
when_present = Column(Integer, default=0)
|
||||
|
||||
entity = relationship("Entity", back_populates="constraints")
|
||||
|
||||
|
||||
class BlobStore(ConfigBase):
|
||||
"""Blob store configurations."""
|
||||
__tablename__ = 'blob_stores'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
kind = Column(String(64), nullable=False)
|
||||
root = Column(String(512), nullable=False)
|
||||
addressing_mode = Column(String(64))
|
||||
addressing_digest = Column(String(64))
|
||||
path_template = Column(String(512))
|
||||
max_blob_bytes = Column(Integer)
|
||||
min_blob_bytes = Column(Integer)
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="blob_stores")
|
||||
|
||||
|
||||
class KVStore(ConfigBase):
|
||||
"""Key-value store configurations."""
|
||||
__tablename__ = 'kv_stores'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
kind = Column(String(64), nullable=False)
|
||||
root = Column(String(512), nullable=False)
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="kv_stores")
|
||||
|
||||
|
||||
class APIRoute(ConfigBase):
|
||||
"""API route definitions."""
|
||||
__tablename__ = 'api_routes'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
route_id = Column(String(255), nullable=False)
|
||||
method = Column(String(16), nullable=False)
|
||||
path = Column(String(512), nullable=False)
|
||||
tags = Column(Text) # JSON
|
||||
pipeline = Column(Text, nullable=False) # JSON
|
||||
created_at = Column(String(64), nullable=False)
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="api_routes")
|
||||
|
||||
|
||||
class AuthScope(ConfigBase):
|
||||
"""Authentication scopes."""
|
||||
__tablename__ = 'auth_scopes'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
actions = Column(Text, nullable=False) # JSON
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="auth_scopes")
|
||||
|
||||
|
||||
class AuthPolicy(ConfigBase):
|
||||
"""Authentication policies."""
|
||||
__tablename__ = 'auth_policies'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
effect = Column(String(32), nullable=False)
|
||||
conditions = Column(Text) # JSON
|
||||
requirements = Column(Text) # JSON
|
||||
|
||||
config = relationship("RepositoryConfig", back_populates="auth_policies")
|
||||
|
||||
|
||||
class CachingConfig(ConfigBase):
|
||||
"""Caching configuration."""
|
||||
__tablename__ = 'caching_config'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
response_cache_enabled = Column(Integer, default=1)
|
||||
response_cache_ttl = Column(Integer, default=300)
|
||||
blob_cache_enabled = Column(Integer, default=1)
|
||||
blob_cache_max_bytes = Column(Integer)
|
||||
|
||||
|
||||
class FeaturesConfig(ConfigBase):
|
||||
"""Features configuration."""
|
||||
__tablename__ = 'features_config'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
mutable_tags = Column(Integer, default=1)
|
||||
allow_overwrite_artifacts = Column(Integer, default=0)
|
||||
proxy_enabled = Column(Integer, default=1)
|
||||
gc_enabled = Column(Integer, default=1)
|
||||
|
||||
|
||||
class DocumentConfig(ConfigBase):
|
||||
"""Document configurations."""
|
||||
__tablename__ = 'document_configs'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
store = Column(String(255), nullable=False)
|
||||
key_template = Column(String(512), nullable=False)
|
||||
schema_name = Column(String(255), nullable=False)
|
||||
|
||||
|
||||
class StorageSchema(ConfigBase):
|
||||
"""Storage schemas."""
|
||||
__tablename__ = 'storage_schemas'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
schema_definition = Column(Text, nullable=False) # JSON
|
||||
|
||||
|
||||
class Index(ConfigBase):
|
||||
"""Index configurations."""
|
||||
__tablename__ = 'indexes'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
source_document = Column(String(255), nullable=False)
|
||||
materialization_mode = Column(String(64))
|
||||
materialization_trigger = Column(String(64))
|
||||
|
||||
keys = relationship("IndexKey", back_populates="index", cascade="all, delete-orphan")
|
||||
|
||||
|
||||
class IndexKey(ConfigBase):
|
||||
"""Index key definitions."""
|
||||
__tablename__ = 'index_keys'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
index_id = Column(Integer, ForeignKey('indexes.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
fields = Column(Text, nullable=False) # JSON
|
||||
sort = Column(Text) # JSON
|
||||
unique_key = Column(Integer, default=0)
|
||||
|
||||
index = relationship("Index", back_populates="keys")
|
||||
|
||||
|
||||
class Upstream(ConfigBase):
|
||||
"""Upstream repository configurations."""
|
||||
__tablename__ = 'upstreams'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
base_url = Column(String(512), nullable=False)
|
||||
auth_mode = Column(String(64))
|
||||
connect_timeout_ms = Column(Integer)
|
||||
read_timeout_ms = Column(Integer)
|
||||
retry_max_attempts = Column(Integer)
|
||||
retry_backoff_ms = Column(Integer)
|
||||
|
||||
|
||||
class EventType(ConfigBase):
|
||||
"""Event type definitions."""
|
||||
__tablename__ = 'event_types'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
name = Column(String(255), nullable=False)
|
||||
durable = Column(Integer, default=1)
|
||||
schema_definition = Column(Text) # JSON
|
||||
|
||||
|
||||
class ReplicationConfig(ConfigBase):
|
||||
"""Replication configuration."""
|
||||
__tablename__ = 'replication_config'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
mode = Column(String(64), nullable=False)
|
||||
log_store = Column(String(255))
|
||||
log_key_prefix = Column(String(255))
|
||||
log_ordering = Column(String(64))
|
||||
log_max_event_bytes = Column(Integer)
|
||||
shipping_strategy = Column(String(64))
|
||||
shipping_dedupe_enabled = Column(Integer)
|
||||
shipping_batch_max_events = Column(Integer)
|
||||
shipping_batch_max_bytes = Column(Integer)
|
||||
|
||||
|
||||
class GCConfig(ConfigBase):
|
||||
"""Garbage collection configuration."""
|
||||
__tablename__ = 'gc_config'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
enabled = Column(Integer, default=1)
|
||||
immutable_after_publish = Column(Integer, default=1)
|
||||
keep_last_n_versions = Column(Integer, default=50)
|
||||
keep_tags_forever = Column(Integer, default=1)
|
||||
sweep_schedule_rrule = Column(Text)
|
||||
sweep_unreferenced_after_seconds = Column(Integer, default=604800)
|
||||
|
||||
|
||||
class OpsLimits(ConfigBase):
|
||||
"""Operations limits configuration."""
|
||||
__tablename__ = 'ops_limits'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
closed_world = Column(Integer, default=1)
|
||||
max_pipeline_ops = Column(Integer, default=128)
|
||||
max_request_body_bytes = Column(Integer, default=2147483648)
|
||||
max_json_bytes = Column(Integer, default=10485760)
|
||||
max_kv_value_bytes = Column(Integer, default=1048576)
|
||||
max_cpu_ms_per_request = Column(Integer, default=200)
|
||||
max_io_ops_per_request = Column(Integer, default=5000)
|
||||
|
||||
|
||||
class AllowedOp(ConfigBase):
|
||||
"""Allowed operations."""
|
||||
__tablename__ = 'allowed_ops'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
operation = Column(String(255), nullable=False)
|
||||
|
||||
|
||||
class Invariant(ConfigBase):
|
||||
"""System invariants."""
|
||||
__tablename__ = 'invariants'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
invariant_id = Column(String(255), nullable=False)
|
||||
description = Column(Text, nullable=False)
|
||||
assertion = Column(Text, nullable=False) # JSON
|
||||
|
||||
|
||||
class ValidationRule(ConfigBase):
|
||||
"""Validation rules."""
|
||||
__tablename__ = 'validation_rules'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
rule_id = Column(String(255), nullable=False)
|
||||
rule_type = Column(String(64), nullable=False)
|
||||
requirement = Column(Text, nullable=False) # JSON
|
||||
on_fail = Column(String(64), nullable=False)
|
||||
|
||||
|
||||
class VersioningConfig(ConfigBase):
|
||||
"""Versioning configuration."""
|
||||
__tablename__ = 'versioning_config'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
config_id = Column(Integer, ForeignKey('repository_config.id', ondelete='CASCADE'), nullable=False)
|
||||
scheme = Column(String(64), nullable=False)
|
||||
ordering = Column(String(64), nullable=False)
|
||||
allow_prerelease = Column(Integer, default=0)
|
||||
latest_policy_enabled = Column(Integer, default=1)
|
||||
latest_policy_monotonic = Column(Integer, default=1)
|
||||
latest_policy_exclude_prerelease = Column(Integer, default=1)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Database initialization
|
||||
# ============================================================================
|
||||
|
||||
def init_all_databases():
|
||||
"""Initialize all database schemas."""
|
||||
Base.metadata.create_all(users_engine)
|
||||
ConfigBase.metadata.create_all(config_engine)
|
||||
|
||||
|
||||
# Initialize on import
|
||||
init_all_databases()
|
||||
@@ -5,3 +5,5 @@ rocksdict==0.3.23
|
||||
werkzeug==3.1.4
|
||||
jsonschema==4.20.0
|
||||
bcrypt==4.1.2
|
||||
SQLAlchemy==2.0.23
|
||||
alembic==1.13.0
|
||||
|
||||
Reference in New Issue
Block a user