feat(01-02): set up PostgreSQL with Docker and async session factory

- Add docker-compose.yml with PostgreSQL 16 container (port 5433)
- Create async database session factory with connection pooling
- Configure SQLAlchemy 2.0 DeclarativeBase for models
- Update .env.example with correct database URL

Connection pool settings from research: pool_size=10, max_overflow=20,
pool_recycle=1800 (30 min), pool_pre_ping=True for validation.
This commit is contained in:
Mikkel Georgsen 2026-01-25 20:10:18 +00:00
parent 519333e598
commit fbcd2bbb8e
5 changed files with 95 additions and 1 deletions

View file

@ -1,6 +1,6 @@
# Database Configuration
# PostgreSQL connection string using asyncpg driver
DATABASE_URL=postgresql+asyncpg://debate:debate@localhost:5432/debate
DATABASE_URL=postgresql+asyncpg://debate:debate_dev@localhost:5433/debate
# Security
# Generate with: openssl rand -hex 32

View file

@ -0,0 +1,6 @@
"""Database package - exports key database components."""
from backend.app.db.base import Base
from backend.app.db.session import async_session_maker, engine, get_db
__all__ = ["Base", "engine", "async_session_maker", "get_db"]

21
backend/app/db/base.py Normal file
View file

@ -0,0 +1,21 @@
"""SQLAlchemy 2.0 declarative base for all models."""
from sqlalchemy.orm import DeclarativeBase
class Base(DeclarativeBase):
"""Base class for all SQLAlchemy models.
All models should inherit from this class for Alembic autogenerate
to discover them.
"""
pass
# Import all models here for Alembic autogenerate to discover them
# This ensures all models are registered with Base.metadata
# Note: Models are imported at the bottom to avoid circular imports
def import_models() -> None:
"""Import all models to register them with Base.metadata."""
from backend.app.db.models import build # noqa: F401

45
backend/app/db/session.py Normal file
View file

@ -0,0 +1,45 @@
"""Async database session management with connection pooling."""
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import (
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from backend.app.core.config import settings
# Create async engine with connection pooling settings from research
# See: 01-RESEARCH.md Pattern 1: Async Database Session Management
engine = create_async_engine(
settings.database_url,
pool_size=10,
max_overflow=20,
pool_timeout=30,
pool_recycle=1800, # 30 minutes - refresh connections
pool_pre_ping=True, # Validate connections before use
echo=False, # Set True for SQL logging in development
)
# Session factory for creating async sessions
async_session_maker = async_sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def get_db() -> AsyncGenerator[AsyncSession, None]:
"""FastAPI dependency for database sessions.
Yields an async database session and ensures proper cleanup.
Usage:
@app.get("/items")
async def get_items(db: AsyncSession = Depends(get_db)):
# Use db session here
pass
"""
async with async_session_maker() as session:
yield session

22
docker-compose.yml Normal file
View file

@ -0,0 +1,22 @@
services:
postgres:
image: postgres:16-alpine
container_name: debate-postgres
environment:
POSTGRES_USER: debate
POSTGRES_PASSWORD: debate_dev
POSTGRES_DB: debate
ports:
- "5433:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U debate -d debate"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
restart: unless-stopped
volumes:
postgres_data: