feat(database): create all core tables with migrations
Add database migrations for mockupAWS v0.2.0: - DB-003: scenario_logs table * Stores received log entries with SHA256 hash for deduplication * PII detection flags * Metrics: size_bytes, token_count, sqs_blocks * Indexes on scenario_id, received_at, message_hash, has_pii - DB-004: scenario_metrics table * Time-series storage for metrics aggregation * Supports: sqs, lambda, bedrock, safety metric types * Flexible JSONB metadata field * BRIN index on timestamp for efficient queries - DB-005: aws_pricing table * Stores AWS service pricing by region * Supports price history with effective_from/to dates * Active pricing flag for current rates * Index on service, region, tier combination - DB-006: reports table * Generated report tracking * Supports PDF and CSV formats * File path and size tracking * Metadata JSONB for extensibility All tables include: - UUID primary keys with auto-generation - Foreign key constraints with CASCADE delete - Appropriate indexes for query performance - Check constraints for data validation Tasks: DB-003, DB-004, DB-005, DB-006 complete
This commit is contained in:
73
alembic/versions/e80c6eef58b2_create_reports_table.py
Normal file
73
alembic/versions/e80c6eef58b2_create_reports_table.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""create reports table
|
||||
|
||||
Revision ID: e80c6eef58b2
|
||||
Revises: 48f2231e7c12
|
||||
Create Date: 2026-04-07 13:51:51.381906
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "e80c6eef58b2"
|
||||
down_revision: Union[str, Sequence[str], None] = "48f2231e7c12"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.create_table(
|
||||
"reports",
|
||||
sa.Column(
|
||||
"id",
|
||||
postgresql.UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
server_default=sa.text("uuid_generate_v4()"),
|
||||
),
|
||||
sa.Column(
|
||||
"scenario_id",
|
||||
postgresql.UUID(as_uuid=True),
|
||||
sa.ForeignKey("scenarios.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"format", sa.Enum("pdf", "csv", name="report_format"), nullable=False
|
||||
),
|
||||
sa.Column("file_path", sa.String(500), nullable=False),
|
||||
sa.Column("file_size_bytes", sa.Integer(), nullable=True),
|
||||
sa.Column(
|
||||
"generated_at",
|
||||
sa.TIMESTAMP(timezone=True),
|
||||
server_default=sa.text("NOW()"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"generated_by", sa.String(100), nullable=True
|
||||
), # user_id or api_key_id
|
||||
sa.Column("metadata", postgresql.JSONB(), server_default="{}"),
|
||||
)
|
||||
|
||||
# Add indexes
|
||||
op.create_index("idx_reports_scenario_id", "reports", ["scenario_id"])
|
||||
op.create_index(
|
||||
"idx_reports_generated_at", "reports", ["generated_at"], postgresql_using="brin"
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# Drop indexes
|
||||
op.drop_index("idx_reports_generated_at", table_name="reports")
|
||||
op.drop_index("idx_reports_scenario_id", table_name="reports")
|
||||
|
||||
# Drop table
|
||||
op.drop_table("reports")
|
||||
|
||||
# Drop enum type
|
||||
op.execute("DROP TYPE IF EXISTS report_format;")
|
||||
Reference in New Issue
Block a user