feat: project skeleton

- infra (k8s, kind, helm, docker) backbone is implemented
- security: implementation + unit tests are done
This commit is contained in:
2025-11-21 12:00:00 -05:00
commit fbe9fbba6e
46 changed files with 3450 additions and 0 deletions

View File

@@ -0,0 +1,61 @@
-- Initial schema for IncidentOps
-- Creates core tables: users, orgs, org_members, services, incidents, incident_events
CREATE TABLE users (
id UUID PRIMARY KEY,
email TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE orgs (
id UUID PRIMARY KEY,
name TEXT NOT NULL,
slug TEXT NOT NULL UNIQUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE org_members (
id UUID PRIMARY KEY,
user_id UUID NOT NULL REFERENCES users(id),
org_id UUID NOT NULL REFERENCES orgs(id),
role TEXT NOT NULL CHECK (role IN ('admin', 'member', 'viewer')),
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
UNIQUE (user_id, org_id)
);
CREATE TABLE services (
id UUID PRIMARY KEY,
org_id UUID NOT NULL REFERENCES orgs(id),
name TEXT NOT NULL,
slug TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
UNIQUE (org_id, slug)
);
CREATE TABLE incidents (
id UUID PRIMARY KEY,
org_id UUID NOT NULL REFERENCES orgs(id),
service_id UUID NOT NULL REFERENCES services(id),
title TEXT NOT NULL,
description TEXT,
status TEXT NOT NULL CHECK (status IN ('triggered', 'acknowledged', 'mitigated', 'resolved')),
severity TEXT NOT NULL CHECK (severity IN ('critical', 'high', 'medium', 'low')),
version INTEGER NOT NULL DEFAULT 1,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_incidents_org_status ON incidents(org_id, status);
CREATE INDEX idx_incidents_org_created ON incidents(org_id, created_at DESC);
CREATE TABLE incident_events (
id UUID PRIMARY KEY,
incident_id UUID NOT NULL REFERENCES incidents(id),
event_type TEXT NOT NULL,
actor_user_id UUID REFERENCES users(id),
payload JSONB,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_incident_events_incident ON incident_events(incident_id, created_at);

View File

@@ -0,0 +1,15 @@
-- Refresh tokens table for JWT token rotation
-- Stores hashed refresh tokens with active org context
CREATE TABLE refresh_tokens (
id UUID PRIMARY KEY,
user_id UUID NOT NULL REFERENCES users(id),
token_hash TEXT NOT NULL UNIQUE,
active_org_id UUID NOT NULL REFERENCES orgs(id),
expires_at TIMESTAMPTZ NOT NULL,
revoked_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_refresh_tokens_user ON refresh_tokens(user_id);
CREATE INDEX idx_refresh_tokens_hash ON refresh_tokens(token_hash);

View File

@@ -0,0 +1,25 @@
-- Notification system tables
-- Stores notification targets and delivery attempts
CREATE TABLE notification_targets (
id UUID PRIMARY KEY,
org_id UUID NOT NULL REFERENCES orgs(id),
name TEXT NOT NULL,
target_type TEXT NOT NULL CHECK (target_type IN ('webhook', 'email', 'slack')),
webhook_url TEXT,
enabled BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_notification_targets_org ON notification_targets(org_id);
CREATE TABLE notification_attempts (
id UUID PRIMARY KEY,
incident_id UUID NOT NULL REFERENCES incidents(id),
target_id UUID NOT NULL REFERENCES notification_targets(id),
status TEXT NOT NULL CHECK (status IN ('pending', 'sent', 'failed')),
error TEXT,
sent_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
UNIQUE (incident_id, target_id)
);

119
migrations/migrate.py Normal file
View File

@@ -0,0 +1,119 @@
"""
Simple migration runner using asyncpg.
Tracks applied migrations in a _migrations table.
Usage:
DATABASE_URL=postgresql://user:pass@localhost/db uv run python migrations/migrate.py apply
DATABASE_URL=postgresql://user:pass@localhost/db uv run python migrations/migrate.py status
"""
import asyncio
import os
import sys
from pathlib import Path
import asyncpg
MIGRATIONS_DIR = Path(__file__).parent
async def ensure_migrations_table(conn: asyncpg.Connection) -> None:
"""Create the migrations tracking table if it doesn't exist."""
await conn.execute("""
CREATE TABLE IF NOT EXISTS _migrations (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
applied_at TIMESTAMPTZ NOT NULL DEFAULT now()
)
""")
async def get_applied_migrations(conn: asyncpg.Connection) -> set[str]:
"""Get the set of already applied migration names."""
rows = await conn.fetch("SELECT name FROM _migrations")
return {row["name"] for row in rows}
async def get_pending_migrations(conn: asyncpg.Connection) -> list[Path]:
"""Get list of migration files that haven't been applied yet."""
applied = await get_applied_migrations(conn)
sql_files = sorted(MIGRATIONS_DIR.glob("*.sql"))
return [f for f in sql_files if f.name not in applied]
async def apply_migration(conn: asyncpg.Connection, migration_file: Path) -> None:
"""Apply a single migration file within a transaction."""
sql = migration_file.read_text()
async with conn.transaction():
await conn.execute(sql)
await conn.execute(
"INSERT INTO _migrations (name) VALUES ($1)",
migration_file.name
)
print(f"Applied: {migration_file.name}")
async def migrate(database_url: str) -> None:
"""Apply all pending migrations."""
conn = await asyncpg.connect(database_url)
try:
await ensure_migrations_table(conn)
pending = await get_pending_migrations(conn)
if not pending:
print("No pending migrations.")
return
for migration_file in pending:
await apply_migration(conn, migration_file)
print(f"Applied {len(pending)} migration(s).")
finally:
await conn.close()
async def status(database_url: str) -> None:
"""Show migration status."""
conn = await asyncpg.connect(database_url)
try:
await ensure_migrations_table(conn)
applied = await get_applied_migrations(conn)
pending = await get_pending_migrations(conn)
print("Applied migrations:")
for name in sorted(applied):
print(f" [x] {name}")
print("\nPending migrations:")
for f in pending:
print(f" [ ] {f.name}")
if not applied and not pending:
print(" (none)")
finally:
await conn.close()
def main() -> None:
database_url = os.environ.get("DATABASE_URL")
if not database_url:
print("Error: DATABASE_URL environment variable is required")
sys.exit(1)
if len(sys.argv) < 2:
print("Usage: python migrate.py [apply|status]")
sys.exit(1)
command = sys.argv[1]
if command == "apply":
asyncio.run(migrate(database_url))
elif command == "status":
asyncio.run(status(database_url))
else:
print(f"Unknown command: {command}")
print("Usage: python migrate.py [apply|status]")
sys.exit(1)
if __name__ == "__main__":
main()