feat: initial MALS implementation — Phase 1

This commit is contained in:
2026-03-20 07:59:43 -05:00
commit dc1258a5cc
28 changed files with 2972 additions and 0 deletions

59
alembic/env.py Normal file
View File

@@ -0,0 +1,59 @@
"""Alembic environment — uses sync psycopg2 driver for migrations."""
from __future__ import annotations
import os
from logging.config import fileConfig
from alembic import context
from sqlalchemy import create_engine
# this is the Alembic Config object
config = context.config
# Interpret the config file for Python logging.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Read DATABASE_URL from environment (preferred) or alembic.ini
database_url = os.environ.get("DATABASE_URL") or config.get_main_option("sqlalchemy.url")
if not database_url:
raise RuntimeError(
"mals/alembic: DATABASE_URL is required. "
"Set it as an environment variable or in alembic.ini."
)
# Convert asyncpg URL to psycopg2 (sync) for Alembic
sync_url = (
database_url.replace("postgresql+asyncpg://", "postgresql://")
.replace("asyncpg://", "postgresql://")
)
target_metadata = None
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode (generate SQL only)."""
context.configure(
url=sync_url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations against a live database."""
connectable = create_engine(sync_url)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,78 @@
"""Initial schema — agent_logs table with indexes.
Revision ID: 001
Revises:
Create Date: 2026-03-20
"""
from __future__ import annotations
from alembic import op
revision = "001"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.execute("""
CREATE TABLE IF NOT EXISTS agent_logs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
-- Source
agent_id VARCHAR(64) NOT NULL,
session_key VARCHAR(256),
source VARCHAR(64) NOT NULL DEFAULT 'api',
-- Classification
level VARCHAR(16) NOT NULL DEFAULT 'info',
category VARCHAR(64),
-- Content
message TEXT NOT NULL,
metadata JSONB DEFAULT '{}',
-- Correlation
trace_id UUID,
parent_id UUID REFERENCES agent_logs(id),
-- Resolution tracking
resolved BOOLEAN NOT NULL DEFAULT false,
resolved_at TIMESTAMPTZ,
resolved_by VARCHAR(64)
)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_agent_logs_agent_time
ON agent_logs (agent_id, created_at DESC)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_agent_logs_level_time
ON agent_logs (level, created_at DESC)
WHERE level IN ('warn', 'error', 'critical')
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_agent_logs_unresolved
ON agent_logs (created_at DESC)
WHERE resolved = false AND level IN ('warn', 'error', 'critical')
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_agent_logs_metadata
ON agent_logs USING GIN (metadata)
""")
op.execute("""
CREATE INDEX IF NOT EXISTS idx_agent_logs_trace
ON agent_logs (trace_id)
WHERE trace_id IS NOT NULL
""")
def downgrade() -> None:
op.execute("DROP TABLE IF EXISTS agent_logs CASCADE")