feat(database): create all core tables with migrations

Add database migrations for mockupAWS v0.2.0:

- DB-003: scenario_logs table
  * Stores received log entries with SHA256 hash for deduplication
  * PII detection flags
  * Metrics: size_bytes, token_count, sqs_blocks
  * Indexes on scenario_id, received_at, message_hash, has_pii

- DB-004: scenario_metrics table
  * Time-series storage for metrics aggregation
  * Supports: sqs, lambda, bedrock, safety metric types
  * Flexible JSONB metadata field
  * BRIN index on timestamp for efficient queries

- DB-005: aws_pricing table
  * Stores AWS service pricing by region
  * Supports price history with effective_from/to dates
  * Active pricing flag for current rates
  * Index on service, region, tier combination

- DB-006: reports table
  * Generated report tracking
  * Supports PDF and CSV formats
  * File path and size tracking
  * Metadata JSONB for extensibility

All tables include:
- UUID primary keys with auto-generation
- Foreign key constraints with CASCADE delete
- Appropriate indexes for query performance
- Check constraints for data validation

Tasks: DB-003, DB-004, DB-005, DB-006 complete
This commit is contained in:
Luca Sacchi Ricciardi
2026-04-07 13:53:07 +02:00
parent 6f03c33ab5
commit 26fb4a276f
4 changed files with 323 additions and 0 deletions

View File

@@ -0,0 +1,78 @@
"""create aws_pricing table
Revision ID: 48f2231e7c12
Revises: 5e247ed57b77
Create Date: 2026-04-07 13:50:15.040833
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = "48f2231e7c12"
down_revision: Union[str, Sequence[str], None] = "5e247ed57b77"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.create_table(
"aws_pricing",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
primary_key=True,
server_default=sa.text("uuid_generate_v4()"),
),
sa.Column(
"service", sa.String(50), nullable=False
), # 'sqs', 'lambda', 'bedrock'
sa.Column("region", sa.String(50), nullable=False),
sa.Column("tier", sa.String(50), server_default="standard", nullable=False),
sa.Column("price_per_unit", sa.DECIMAL(15, 10), nullable=False),
sa.Column(
"unit", sa.String(20), nullable=False
), # 'per_million_requests', 'per_gb_second', 'per_1k_tokens'
sa.Column(
"effective_from",
sa.Date(),
server_default=sa.text("CURRENT_DATE"),
nullable=False,
),
sa.Column("effective_to", sa.Date(), nullable=True),
sa.Column("is_active", sa.Boolean(), server_default="true", nullable=False),
sa.Column("source_url", sa.String(500), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
)
# Add constraints
op.create_check_constraint(
"chk_price_positive", "aws_pricing", sa.column("price_per_unit") >= 0
)
# Add indexes
op.create_index("idx_pricing_service", "aws_pricing", ["service"])
op.create_index("idx_pricing_region", "aws_pricing", ["region"])
op.create_index(
"idx_pricing_active",
"aws_pricing",
["service", "region", "tier"],
postgresql_where=sa.text("is_active = true"),
)
def downgrade() -> None:
"""Downgrade schema."""
# Drop indexes
op.drop_index("idx_pricing_active", table_name="aws_pricing")
op.drop_index("idx_pricing_region", table_name="aws_pricing")
op.drop_index("idx_pricing_service", table_name="aws_pricing")
# Drop table
op.drop_table("aws_pricing")