mirror of
https://github.com/johndoe6345789/metabuilder.git
synced 2026-04-26 14:54:55 +00:00
docs: dbal,script,lua (5 files)
This commit is contained in:
@@ -18,16 +18,17 @@ namespace lua_script {
|
||||
*/
|
||||
inline Result<bool> remove(InMemoryStore& store, const std::string& id) {
|
||||
if (id.empty()) {
|
||||
return Error::validationError("Script ID cannot be empty");
|
||||
return Error::validationError("Lua script ID cannot be empty");
|
||||
}
|
||||
|
||||
|
||||
auto it = store.lua_scripts.find(id);
|
||||
if (it == store.lua_scripts.end()) {
|
||||
return Error::notFound("Lua script not found: " + id);
|
||||
}
|
||||
|
||||
|
||||
store.lua_script_names.erase(it->second.name);
|
||||
store.lua_scripts.erase(it);
|
||||
|
||||
|
||||
return Result<bool>(true);
|
||||
}
|
||||
|
||||
|
||||
@@ -19,37 +19,68 @@ namespace lua_script {
|
||||
*/
|
||||
inline Result<LuaScript> update(InMemoryStore& store, const std::string& id, const UpdateLuaScriptInput& input) {
|
||||
if (id.empty()) {
|
||||
return Error::validationError("Script ID cannot be empty");
|
||||
return Error::validationError("Lua script ID cannot be empty");
|
||||
}
|
||||
|
||||
|
||||
auto it = store.lua_scripts.find(id);
|
||||
if (it == store.lua_scripts.end()) {
|
||||
return Error::notFound("Lua script not found: " + id);
|
||||
}
|
||||
|
||||
|
||||
LuaScript& script = it->second;
|
||||
|
||||
std::string old_name = script.name;
|
||||
|
||||
if (input.name.has_value()) {
|
||||
if (input.name.value().empty() || input.name.value().length() > 100) {
|
||||
return Error::validationError("Name must be between 1 and 100 characters");
|
||||
if (!validation::isValidLuaScriptName(input.name.value())) {
|
||||
return Error::validationError("Lua script name must be 1-255 characters");
|
||||
}
|
||||
auto name_it = store.lua_script_names.find(input.name.value());
|
||||
if (name_it != store.lua_script_names.end() && name_it->second != id) {
|
||||
return Error::conflict("Lua script name already exists: " + input.name.value());
|
||||
}
|
||||
store.lua_script_names.erase(old_name);
|
||||
store.lua_script_names[input.name.value()] = id;
|
||||
script.name = input.name.value();
|
||||
}
|
||||
|
||||
|
||||
if (input.description.has_value()) {
|
||||
script.description = input.description.value();
|
||||
}
|
||||
|
||||
if (input.code.has_value()) {
|
||||
if (input.code.value().empty()) {
|
||||
return Error::validationError("Script code cannot be empty");
|
||||
}
|
||||
if (!validation::isValidLuaSyntax(input.code.value())) {
|
||||
return Error::validationError("Invalid Lua syntax");
|
||||
if (!validation::isValidLuaScriptCode(input.code.value())) {
|
||||
return Error::validationError("Lua script code must be a non-empty string");
|
||||
}
|
||||
script.code = input.code.value();
|
||||
}
|
||||
|
||||
if (input.description.has_value()) script.description = input.description.value();
|
||||
if (input.category.has_value()) script.category = input.category.value();
|
||||
if (input.is_active.has_value()) script.is_active = input.is_active.value();
|
||||
|
||||
|
||||
if (input.is_sandboxed.has_value()) {
|
||||
script.is_sandboxed = input.is_sandboxed.value();
|
||||
}
|
||||
|
||||
if (input.allowed_globals.has_value()) {
|
||||
for (const auto& entry : input.allowed_globals.value()) {
|
||||
if (entry.empty()) {
|
||||
return Error::validationError("allowed_globals must contain non-empty strings");
|
||||
}
|
||||
}
|
||||
script.allowed_globals = input.allowed_globals.value();
|
||||
}
|
||||
|
||||
if (input.timeout_ms.has_value()) {
|
||||
if (!validation::isValidLuaTimeout(input.timeout_ms.value())) {
|
||||
return Error::validationError("Timeout must be between 100 and 30000 ms");
|
||||
}
|
||||
script.timeout_ms = input.timeout_ms.value();
|
||||
}
|
||||
|
||||
if (input.created_by.has_value()) {
|
||||
if (input.created_by.value().empty()) {
|
||||
return Error::validationError("created_by is required");
|
||||
}
|
||||
script.created_by = input.created_by.value();
|
||||
}
|
||||
|
||||
script.updated_at = std::chrono::system_clock::now();
|
||||
return Result<LuaScript>(script);
|
||||
}
|
||||
|
||||
@@ -682,7 +682,7 @@ class SecureS3Storage implements BlobStorage {
|
||||
|
||||
### 1.3 ACL/Authorization Layer
|
||||
|
||||
#### DBAL-2025-006: Race Condition in Row-Level Security (HIGH)
|
||||
#### DBAL-2025-006: Race Condition in Row-Level Security (HIGH → CRITICAL)
|
||||
**Location**: [acl-adapter.ts](../ts/src/adapters/acl-adapter.ts#L187-L203)
|
||||
|
||||
```typescript
|
||||
@@ -706,22 +706,202 @@ async update(entity: string, id: string, data: Record<string, unknown>): Promise
|
||||
- Between checking `existing` and calling `update`, another process could modify the record
|
||||
- Could allow updating records that should be blocked by row-level security
|
||||
|
||||
**Recommendation**:
|
||||
**🏰 Fort Knox Remediation**:
|
||||
```typescript
|
||||
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
|
||||
this.checkPermission(entity, 'update')
|
||||
import { PrismaClient, Prisma } from '@prisma/client'
|
||||
|
||||
/**
|
||||
* Fort Knox ACL Adapter with Atomic Operations
|
||||
* Eliminates all TOCTOU vulnerabilities through database-level locking
|
||||
*/
|
||||
class FortKnoxACLAdapter implements DBALAdapter {
|
||||
private prisma: PrismaClient
|
||||
private user: User
|
||||
private rules: ACLRule[]
|
||||
private auditLogger: AuditLogger
|
||||
|
||||
// Use database transaction for atomic check-and-update
|
||||
return await this.baseAdapter.withTransaction(async (tx) => {
|
||||
const existing = await tx.read(entity, id, { forUpdate: true }) // SELECT FOR UPDATE
|
||||
if (existing) {
|
||||
this.checkRowLevelAccess(entity, 'update', existing as Record<string, unknown>)
|
||||
/**
|
||||
* Atomic update with row-level security
|
||||
* Uses SELECT FOR UPDATE to prevent race conditions
|
||||
*/
|
||||
async update(entity: string, id: string, data: Record<string, unknown>): Promise<unknown> {
|
||||
// Pre-flight permission check (fail fast)
|
||||
this.checkEntityPermission(entity, 'update')
|
||||
|
||||
// Validate update data against schema
|
||||
const validatedData = await this.validateUpdateData(entity, data)
|
||||
|
||||
// Execute in serializable transaction for maximum isolation
|
||||
return await this.prisma.$transaction(async (tx) => {
|
||||
// ATOMIC: Lock row and read in same statement
|
||||
// SELECT ... FOR UPDATE prevents concurrent modifications
|
||||
const existing = await this.selectForUpdate(tx, entity, id)
|
||||
|
||||
if (!existing) {
|
||||
throw DBALError.notFound(`${entity} not found: ${id}`)
|
||||
}
|
||||
|
||||
// Row-level security check (on locked data)
|
||||
this.enforceRowLevelSecurity(entity, 'update', existing)
|
||||
|
||||
// Verify no forbidden fields in update
|
||||
this.validateFieldLevelPermissions(entity, 'update', validatedData)
|
||||
|
||||
// Log before modification (audit trail)
|
||||
await this.auditLogger.logOperation({
|
||||
operation: 'UPDATE',
|
||||
entity,
|
||||
entityId: id,
|
||||
userId: this.user.id,
|
||||
tenantId: this.user.tenantId,
|
||||
before: existing,
|
||||
after: validatedData,
|
||||
timestamp: new Date(),
|
||||
transactionId: tx.id
|
||||
})
|
||||
|
||||
// Perform update within transaction
|
||||
const result = await this.doUpdate(tx, entity, id, validatedData)
|
||||
|
||||
// Post-update verification (defense in depth)
|
||||
await this.verifyUpdateIntegrity(tx, entity, id, result)
|
||||
|
||||
return result
|
||||
}, {
|
||||
isolationLevel: Prisma.TransactionIsolationLevel.Serializable,
|
||||
timeout: 30000, // 30 second timeout
|
||||
maxWait: 5000, // 5 second max wait for lock
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* SELECT FOR UPDATE with entity-specific handling
|
||||
*/
|
||||
private async selectForUpdate(
|
||||
tx: Prisma.TransactionClient,
|
||||
entity: string,
|
||||
id: string
|
||||
): Promise<Record<string, unknown> | null> {
|
||||
// Use raw query for FOR UPDATE - Prisma doesn't support it natively
|
||||
const tableName = this.entityToTable(entity)
|
||||
|
||||
// Parameterized query to prevent SQL injection
|
||||
const rows = await tx.$queryRaw<Record<string, unknown>[]>`
|
||||
SELECT * FROM ${Prisma.raw(`"${tableName}"`)}
|
||||
WHERE id = ${id}
|
||||
FOR UPDATE NOWAIT
|
||||
`
|
||||
|
||||
return rows[0] || null
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforce row-level security with user context
|
||||
*/
|
||||
private enforceRowLevelSecurity(
|
||||
entity: string,
|
||||
operation: string,
|
||||
record: Record<string, unknown>
|
||||
): void {
|
||||
const applicableRules = this.rules.filter(rule =>
|
||||
rule.entity === entity &&
|
||||
rule.roles.includes(this.user.role) &&
|
||||
rule.operations.includes(operation)
|
||||
)
|
||||
|
||||
if (applicableRules.length === 0) {
|
||||
throw DBALError.forbidden(
|
||||
`No matching ACL rule for ${this.user.role} to ${operation} ${entity}`
|
||||
)
|
||||
}
|
||||
return tx.update(entity, id, data)
|
||||
})
|
||||
|
||||
// Check all row-level filters
|
||||
for (const rule of applicableRules) {
|
||||
if (rule.rowLevelFilter) {
|
||||
const hasAccess = rule.rowLevelFilter(this.user, record)
|
||||
if (!hasAccess) {
|
||||
this.auditLogger.logSecurityViolation({
|
||||
type: 'ROW_LEVEL_ACCESS_DENIED',
|
||||
user: this.user,
|
||||
entity,
|
||||
operation,
|
||||
recordId: record.id as string,
|
||||
rule: rule.id
|
||||
})
|
||||
throw DBALError.forbidden(
|
||||
`Row-level access denied for ${entity}:${record.id}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate field-level permissions (prevent privilege escalation)
|
||||
*/
|
||||
private validateFieldLevelPermissions(
|
||||
entity: string,
|
||||
operation: string,
|
||||
data: Record<string, unknown>
|
||||
): void {
|
||||
const sensitiveFields = this.getSensitiveFields(entity)
|
||||
const attemptedSensitiveFields = Object.keys(data).filter(f => sensitiveFields.has(f))
|
||||
|
||||
if (attemptedSensitiveFields.length > 0) {
|
||||
// Only supergod can modify sensitive fields
|
||||
if (this.user.role !== 'supergod') {
|
||||
this.auditLogger.logSecurityViolation({
|
||||
type: 'PRIVILEGE_ESCALATION_ATTEMPT',
|
||||
user: this.user,
|
||||
entity,
|
||||
operation,
|
||||
attemptedFields: attemptedSensitiveFields
|
||||
})
|
||||
throw DBALError.forbidden(
|
||||
`Cannot modify sensitive fields: ${attemptedSensitiveFields.join(', ')}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getSensitiveFields(entity: string): Set<string> {
|
||||
const SENSITIVE_FIELDS: Record<string, string[]> = {
|
||||
User: ['role', 'level', 'permissions', 'passwordHash', 'tenantId'],
|
||||
Session: ['token', 'userId'],
|
||||
Package: ['isCore', 'trustLevel'],
|
||||
}
|
||||
return new Set(SENSITIVE_FIELDS[entity] || [])
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify update didn't violate invariants
|
||||
*/
|
||||
private async verifyUpdateIntegrity(
|
||||
tx: Prisma.TransactionClient,
|
||||
entity: string,
|
||||
id: string,
|
||||
result: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
// Example: Verify user didn't escalate their own privileges
|
||||
if (entity === 'User' && result.id === this.user.id) {
|
||||
const newLevel = result.level as number
|
||||
const originalLevel = this.user.level
|
||||
|
||||
if (newLevel > originalLevel) {
|
||||
// Rollback will happen automatically when we throw
|
||||
throw DBALError.forbidden('Cannot escalate own privileges')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Additional Protections**:
|
||||
- [ ] Add optimistic locking with version column
|
||||
- [ ] Implement distributed locks for multi-node deployments
|
||||
- [ ] Add circuit breaker for repeated auth failures
|
||||
- [ ] Monitor for privilege escalation patterns
|
||||
|
||||
---
|
||||
|
||||
#### DBAL-2025-007: Mass Assignment via Unvalidated Fields (MEDIUM)
|
||||
|
||||
10
frontends/nextjs/src/lib/db/lua-scripts/parse-json-array.ts
Normal file
10
frontends/nextjs/src/lib/db/lua-scripts/parse-json-array.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export function parseJsonArray(value: unknown): unknown[] {
|
||||
if (Array.isArray(value)) return value
|
||||
if (typeof value !== 'string') return []
|
||||
try {
|
||||
const parsed = JSON.parse(value)
|
||||
return Array.isArray(parsed) ? parsed : []
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
64
tools/generate-mega-seed.cjs
Normal file
64
tools/generate-mega-seed.cjs
Normal file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
const categories = [
|
||||
{ id: 'cat_launch', name: 'Launch Radar', description: 'Product drops and milestone updates' },
|
||||
{ id: 'cat_growth', name: 'Growth Ops', description: 'Retention, onboarding, and community tactics' },
|
||||
{ id: 'cat_design', name: 'Design Systems', description: 'Tokens, theming, and UI governance' },
|
||||
{ id: 'cat_ai', name: 'AI Tooling', description: 'Prompt playbooks, evals, and guardrails' },
|
||||
{ id: 'cat_ops', name: 'Community Ops', description: 'Moderation workflows and engagement rituals' },
|
||||
{ id: 'cat_hiring', name: 'Hiring & Ops', description: 'Team growth, planning, and operating cadence' },
|
||||
{ id: 'cat_funding', name: 'Funding Desk', description: 'Rounds, terms, and investor updates' },
|
||||
{ id: 'cat_culture', name: 'Culture Lab', description: 'Rituals, values, and remote collaboration' },
|
||||
{ id: 'cat_docs', name: 'Docs Guild', description: 'Documentation standards and knowledge bases' },
|
||||
{ id: 'cat_security', name: 'Security Flight', description: 'Threat modeling and incident response' },
|
||||
]
|
||||
|
||||
const titles = [
|
||||
'First 1000 users playbook',
|
||||
'Launch checklist for indie teams',
|
||||
'Building a tokenized design system',
|
||||
'How to measure community health',
|
||||
'Prompt playbooks for onboarding',
|
||||
'Moderation handoff checklists',
|
||||
'Hiring your first community lead',
|
||||
'Feature flag cadence for launches',
|
||||
'Post-launch retrospectives that actually work',
|
||||
'Creator pipelines and content calendars',
|
||||
'How we scaled support without burnout',
|
||||
'Founder updates cadence for investors',
|
||||
'Red teaming your onboarding flow',
|
||||
'From doc chaos to a unified handbook',
|
||||
'Battle-tested incident comms templates',
|
||||
'What to automate in week one',
|
||||
'Designing for multi-tenant UX',
|
||||
'What metrics matter in community programs',
|
||||
'Building an experimentation runway',
|
||||
'On-call rotations for small teams',
|
||||
]
|
||||
|
||||
const threads = titles.map((title, index) => {
|
||||
const category = categories[index % categories.length]
|
||||
return {
|
||||
id: `thread_${index + 1}`,
|
||||
title,
|
||||
categoryId: category.id,
|
||||
replyCount: 12 + (index * 7) % 120,
|
||||
likeCount: 20 + (index * 11) % 240,
|
||||
}
|
||||
})
|
||||
|
||||
const payload = {
|
||||
generatedAt: new Date().toISOString(),
|
||||
examples: {
|
||||
categories,
|
||||
threads,
|
||||
},
|
||||
}
|
||||
|
||||
const outputPath = path.join(__dirname, '..', 'packages', 'forum_forge', 'static_content', 'examples.json')
|
||||
fs.writeFileSync(outputPath, JSON.stringify(payload, null, 2) + '\n')
|
||||
|
||||
console.log(`Wrote ${outputPath} with ${categories.length} categories and ${threads.length} threads`)
|
||||
Reference in New Issue
Block a user