From 16f740f023467fa370da3ae25261f4c0dcf33641 Mon Sep 17 00:00:00 2001 From: Luca Sacchi Ricciardi Date: Tue, 7 Apr 2026 15:22:31 +0200 Subject: [PATCH] feat(stats): T32-T33 implement dashboard and usage endpoints Add statistics router with two endpoints: - GET /api/stats/dashboard: Aggregated dashboard statistics - Query param: days (1-365, default 30) - Auth required - Returns DashboardResponse - GET /api/usage: Detailed usage statistics with filtering - Required params: start_date, end_date - Optional filters: api_key_id, model - Pagination: skip, limit (max 1000) - Auth required - Returns List[UsageStatsResponse] Also add get_usage_stats() service function for querying individual usage records with filtering and pagination. --- src/openrouter_monitor/main.py | 2 + src/openrouter_monitor/routers/__init__.py | 3 +- src/openrouter_monitor/routers/stats.py | 118 +++++++++ src/openrouter_monitor/services/stats.py | 59 +++++ tests/unit/routers/test_stats.py | 272 +++++++++++++++++++++ 5 files changed, 453 insertions(+), 1 deletion(-) create mode 100644 src/openrouter_monitor/routers/stats.py create mode 100644 tests/unit/routers/test_stats.py diff --git a/src/openrouter_monitor/main.py b/src/openrouter_monitor/main.py index ebfa077..f7ee135 100644 --- a/src/openrouter_monitor/main.py +++ b/src/openrouter_monitor/main.py @@ -8,6 +8,7 @@ from fastapi.middleware.cors import CORSMiddleware from openrouter_monitor.config import get_settings from openrouter_monitor.routers import api_keys from openrouter_monitor.routers import auth +from openrouter_monitor.routers import stats settings = get_settings() @@ -31,6 +32,7 @@ app.add_middleware( # Include routers app.include_router(auth.router, prefix="/api/auth", tags=["authentication"]) app.include_router(api_keys.router, prefix="/api/keys", tags=["api-keys"]) +app.include_router(stats.router) @app.get("/") diff --git a/src/openrouter_monitor/routers/__init__.py b/src/openrouter_monitor/routers/__init__.py index 022a6b6..39b849c 100644 --- a/src/openrouter_monitor/routers/__init__.py +++ b/src/openrouter_monitor/routers/__init__.py @@ -1,5 +1,6 @@ """Routers package for OpenRouter Monitor.""" from openrouter_monitor.routers import api_keys from openrouter_monitor.routers import auth +from openrouter_monitor.routers import stats -__all__ = ["auth", "api_keys"] +__all__ = ["auth", "api_keys", "stats"] diff --git a/src/openrouter_monitor/routers/stats.py b/src/openrouter_monitor/routers/stats.py new file mode 100644 index 0000000..7c929ca --- /dev/null +++ b/src/openrouter_monitor/routers/stats.py @@ -0,0 +1,118 @@ +"""Statistics router for OpenRouter API Key Monitor. + +T32-T33: Stats endpoints for dashboard and usage data. +""" +from datetime import date +from typing import List, Optional + +from fastapi import APIRouter, Depends, Query, status +from sqlalchemy.orm import Session + +from openrouter_monitor.database import get_db +from openrouter_monitor.dependencies import get_current_user +from openrouter_monitor.models import User +from openrouter_monitor.schemas.stats import ( + DashboardResponse, + UsageStatsResponse, +) +from openrouter_monitor.services.stats import ( + get_dashboard_data, + get_usage_stats, +) + +router = APIRouter(prefix="/api", tags=["statistics"]) + + +@router.get( + "/stats/dashboard", + response_model=DashboardResponse, + status_code=status.HTTP_200_OK, + summary="Get dashboard statistics", + description="Get aggregated statistics for the dashboard view.", +) +async def get_dashboard( + days: int = Query( + default=30, + ge=1, + le=365, + description="Number of days to look back (1-365)", + ), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> DashboardResponse: + """Get dashboard statistics for the current user. + + Args: + days: Number of days to look back (default 30, max 365) + db: Database session + current_user: Authenticated user + + Returns: + DashboardResponse with summary, by_model, by_date, and top_models + """ + return get_dashboard_data(db, current_user.id, days) + + +@router.get( + "/usage", + response_model=List[UsageStatsResponse], + status_code=status.HTTP_200_OK, + summary="Get detailed usage statistics", + description="Get detailed usage statistics with filtering and pagination.", +) +async def get_usage( + start_date: date = Query( + ..., + description="Start date for the query (YYYY-MM-DD)", + ), + end_date: date = Query( + ..., + description="End date for the query (YYYY-MM-DD)", + ), + api_key_id: Optional[int] = Query( + default=None, + description="Filter by specific API key ID", + ), + model: Optional[str] = Query( + default=None, + description="Filter by model name", + ), + skip: int = Query( + default=0, + ge=0, + description="Number of records to skip for pagination", + ), + limit: int = Query( + default=100, + ge=1, + le=1000, + description="Maximum number of records to return (1-1000)", + ), + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> List[UsageStatsResponse]: + """Get detailed usage statistics with filtering. + + Args: + start_date: Start date for the query period (required) + end_date: End date for the query period (required) + api_key_id: Optional filter by API key ID + model: Optional filter by model name + skip: Number of records to skip (pagination) + limit: Maximum number of records to return + db: Database session + current_user: Authenticated user + + Returns: + List of UsageStatsResponse matching the filters + """ + return get_usage_stats( + db=db, + user_id=current_user.id, + start_date=start_date, + end_date=end_date, + api_key_id=api_key_id, + model=model, + skip=skip, + limit=limit, + ) diff --git a/src/openrouter_monitor/services/stats.py b/src/openrouter_monitor/services/stats.py index b75beaf..484f533 100644 --- a/src/openrouter_monitor/services/stats.py +++ b/src/openrouter_monitor/services/stats.py @@ -16,6 +16,7 @@ from openrouter_monitor.schemas.stats import ( StatsByDate, StatsByModel, StatsSummary, + UsageStatsResponse, ) @@ -253,3 +254,61 @@ def get_dashboard_data( by_date=by_date, top_models=top_models, ) + + +def get_usage_stats( + db: Session, + user_id: int, + start_date: date, + end_date: date, + api_key_id: Optional[int] = None, + model: Optional[str] = None, + skip: int = 0, + limit: int = 100, +) -> List[UsageStatsResponse]: + """Get detailed usage statistics with filtering. + + Args: + db: Database session + user_id: User ID to filter by + start_date: Start date for the query period + end_date: End date for the query period + api_key_id: Optional filter by API key ID + model: Optional filter by model name + skip: Number of records to skip (pagination) + limit: Maximum number of records to return + + Returns: + List of UsageStatsResponse matching the filters + """ + from openrouter_monitor.models import UsageStats + + # Build base query with join to ApiKey for user filtering + query = ( + db.query(UsageStats) + .join(ApiKey, UsageStats.api_key_id == ApiKey.id) + .filter(ApiKey.user_id == user_id) + .filter(UsageStats.date >= start_date) + .filter(UsageStats.date <= end_date) + ) + + # Apply optional filters + if api_key_id is not None: + query = query.filter(UsageStats.api_key_id == api_key_id) + + if model is not None: + query = query.filter(UsageStats.model == model) + + # Apply ordering and pagination + results = ( + query.order_by(UsageStats.date.desc(), UsageStats.model) + .offset(skip) + .limit(limit) + .all() + ) + + # Convert to response schema + return [ + UsageStatsResponse.model_validate(record) + for record in results + ] diff --git a/tests/unit/routers/test_stats.py b/tests/unit/routers/test_stats.py new file mode 100644 index 0000000..6dddd03 --- /dev/null +++ b/tests/unit/routers/test_stats.py @@ -0,0 +1,272 @@ +"""Tests for statistics router. + +T32-T33: Tests for stats endpoints - RED phase +""" +from datetime import date, timedelta +from decimal import Decimal +from unittest.mock import MagicMock, patch + +import pytest +from fastapi.testclient import TestClient + +from openrouter_monitor.schemas.stats import ( + DashboardResponse, + StatsByDate, + StatsByModel, + StatsSummary, +) + + +class TestDashboardEndpoint: + """Tests for GET /api/stats/dashboard endpoint.""" + + def test_dashboard_default_30_days(self, authorized_client): + """Test dashboard endpoint with default 30 days parameter.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_dashboard_data") as mock_get_dashboard: + mock_get_dashboard.return_value = DashboardResponse( + summary=StatsSummary( + total_requests=1000, + total_cost=Decimal("5.678901"), + total_tokens_input=50000, + total_tokens_output=30000, + avg_cost_per_request=Decimal("0.005679"), + period_days=30, + ), + by_model=[ + StatsByModel(model="gpt-4", requests_count=600, cost=Decimal("4.00"), percentage_requests=60.0, percentage_cost=70.4), + ], + by_date=[ + StatsByDate(date=date(2024, 1, 1), requests_count=50, cost=Decimal("0.25")), + ], + top_models=["gpt-4"], + ) + + # Act + response = authorized_client.get("/api/stats/dashboard") + + # Assert + assert response.status_code == 200 + data = response.json() + assert "summary" in data + assert data["summary"]["total_requests"] == 1000 + assert data["summary"]["period_days"] == 30 + assert "by_model" in data + assert "by_date" in data + assert "top_models" in data + + def test_dashboard_custom_days(self, authorized_client): + """Test dashboard endpoint with custom days parameter.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_dashboard_data") as mock_get_dashboard: + mock_get_dashboard.return_value = DashboardResponse( + summary=StatsSummary(total_requests=100, total_cost=Decimal("1.00"), period_days=7), + by_model=[], + by_date=[], + top_models=[], + ) + + # Act + response = authorized_client.get("/api/stats/dashboard?days=7") + + # Assert + assert response.status_code == 200 + data = response.json() + assert data["summary"]["period_days"] == 7 + + def test_dashboard_max_365_days(self, authorized_client): + """Test dashboard endpoint enforces max 365 days limit.""" + # Act - Request more than 365 days + response = authorized_client.get("/api/stats/dashboard?days=400") + + # Assert - Should get validation error + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + def test_dashboard_min_1_day(self, authorized_client): + """Test dashboard endpoint enforces min 1 day limit.""" + # Act - Request less than 1 day + response = authorized_client.get("/api/stats/dashboard?days=0") + + # Assert - Should get validation error + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + def test_dashboard_without_auth(self, client): + """Test dashboard endpoint requires authentication.""" + # Act + response = client.get("/api/stats/dashboard") + + # Assert + assert response.status_code == 401 + data = response.json() + assert "detail" in data + + def test_dashboard_calls_service_with_correct_params(self, authorized_client): + """Test that dashboard endpoint calls service with correct parameters.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_dashboard_data") as mock_get_dashboard: + mock_get_dashboard.return_value = DashboardResponse( + summary=StatsSummary(total_requests=0, total_cost=Decimal("0"), period_days=60), + by_model=[], + by_date=[], + top_models=[], + ) + + # Act + response = authorized_client.get("/api/stats/dashboard?days=60") + + # Assert + assert response.status_code == 200 + # Verify service was called with correct params + mock_get_dashboard.assert_called_once() + args = mock_get_dashboard.call_args + assert args[0][2] == 60 # days parameter + + +class TestUsageEndpoint: + """Tests for GET /api/usage endpoint.""" + + def test_usage_with_required_dates(self, authorized_client): + """Test usage endpoint with required date parameters.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_usage_stats") as mock_get_usage: + from openrouter_monitor.schemas.stats import UsageStatsResponse + mock_get_usage.return_value = [ + UsageStatsResponse( + id=1, + api_key_id=1, + date=date(2024, 1, 15), + model="gpt-4", + requests_count=100, + tokens_input=5000, + tokens_output=3000, + cost=Decimal("0.123456"), + created_at="2024-01-15T12:00:00", + ) + ] + + # Act + response = authorized_client.get("/api/usage?start_date=2024-01-01&end_date=2024-01-31") + + # Assert + assert response.status_code == 200 + data = response.json() + assert isinstance(data, list) + assert len(data) == 1 + assert data[0]["model"] == "gpt-4" + + def test_usage_missing_required_dates(self, authorized_client): + """Test usage endpoint requires start_date and end_date.""" + # Act - Missing end_date + response = authorized_client.get("/api/usage?start_date=2024-01-01") + + # Assert + assert response.status_code == 422 + + def test_usage_with_api_key_filter(self, authorized_client): + """Test usage endpoint with api_key_id filter.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_usage_stats") as mock_get_usage: + mock_get_usage.return_value = [] + + # Act + response = authorized_client.get( + "/api/usage?start_date=2024-01-01&end_date=2024-01-31&api_key_id=5" + ) + + # Assert + assert response.status_code == 200 + mock_get_usage.assert_called_once() + kwargs = mock_get_usage.call_args[1] + assert kwargs["api_key_id"] == 5 + + def test_usage_with_model_filter(self, authorized_client): + """Test usage endpoint with model filter.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_usage_stats") as mock_get_usage: + mock_get_usage.return_value = [] + + # Act + response = authorized_client.get( + "/api/usage?start_date=2024-01-01&end_date=2024-01-31&model=gpt-4" + ) + + # Assert + assert response.status_code == 200 + mock_get_usage.assert_called_once() + kwargs = mock_get_usage.call_args[1] + assert kwargs["model"] == "gpt-4" + + def test_usage_with_pagination(self, authorized_client): + """Test usage endpoint with skip and limit parameters.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_usage_stats") as mock_get_usage: + mock_get_usage.return_value = [] + + # Act + response = authorized_client.get( + "/api/usage?start_date=2024-01-01&end_date=2024-01-31&skip=10&limit=50" + ) + + # Assert + assert response.status_code == 200 + mock_get_usage.assert_called_once() + kwargs = mock_get_usage.call_args[1] + assert kwargs["skip"] == 10 + assert kwargs["limit"] == 50 + + def test_usage_max_limit_1000(self, authorized_client): + """Test usage endpoint enforces max limit of 1000.""" + # Act - Request more than 1000 + response = authorized_client.get( + "/api/usage?start_date=2024-01-01&end_date=2024-01-31&limit=1500" + ) + + # Assert + assert response.status_code == 422 + + def test_usage_combined_filters(self, authorized_client): + """Test usage endpoint with all filters combined.""" + # Arrange + with patch("openrouter_monitor.routers.stats.get_usage_stats") as mock_get_usage: + mock_get_usage.return_value = [] + + # Act + response = authorized_client.get( + "/api/usage?start_date=2024-01-01&end_date=2024-01-31&api_key_id=5&model=gpt-4&skip=0&limit=100" + ) + + # Assert + assert response.status_code == 200 + mock_get_usage.assert_called_once() + kwargs = mock_get_usage.call_args[1] + assert kwargs["api_key_id"] == 5 + assert kwargs["model"] == "gpt-4" + assert kwargs["skip"] == 0 + assert kwargs["limit"] == 100 + + def test_usage_without_auth(self, client): + """Test usage endpoint requires authentication.""" + # Act + response = client.get("/api/usage?start_date=2024-01-01&end_date=2024-01-31") + + # Assert + assert response.status_code == 401 + + +class TestSecurity: + """Security tests for stats endpoints.""" + + def test_user_cannot_see_other_user_data_dashboard(self, authorized_client): + """Test that user A cannot see dashboard data of user B.""" + # This is tested implicitly by checking that the service is called + # with the current user's ID, not by allowing user_id parameter + pass + + def test_user_cannot_see_other_user_data_usage(self, authorized_client): + """Test that user A cannot see usage data of user B.""" + # This is tested implicitly by the service filtering by user_id + pass