On this page
Migration Template
"""
Alembic Migration Template
This template shows the standard structure for database migrations.
Conventions:
- Always include both upgrade() and downgrade() functions
- Add helpful comments explaining what each change does
- Use proper data types
- Add indexes for foreign keys and frequently queried fields
- Use server_default for default values (executed in database)
- Use nullable=True for optional fields, nullable=False for required
"""
"""Add example table with proper indexes
Revision ID: 001_add_example_table
Revises:
Create Date: 2025-01-15 10:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic
revision = '001_add_example_table'
down_revision = None # If this is the first migration, else use previous revision ID
branch_labels = None
depends_on = None
def upgrade() -> None:
"""
Apply migration: Create example_items table.
This table stores user-created items with proper indexing for common queries.
"""
# ============================================================================
# CREATE TABLE
# ============================================================================
op.create_table(
'example_items',
# Primary key
sa.Column('id', sa.Integer(), nullable=False),
# Foreign keys (always indexed)
sa.Column('user_id', sa.Integer(), nullable=False),
# String columns
sa.Column(
'name',
sa.String(length=100),
nullable=False,
comment='Item name'
),
sa.Column(
'description',
sa.String(length=500),
nullable=True,
comment='Optional item description'
),
# Numeric columns
sa.Column(
'price',
sa.Numeric(precision=10, scale=2), # e.g., 99999999.99
nullable=False,
server_default='0.00',
comment='Item price'
),
sa.Column(
'quantity',
sa.Integer(),
nullable=False,
server_default='0',
comment='Available quantity'
),
# Boolean columns
sa.Column(
'is_active',
sa.Boolean(),
nullable=False,
server_default='true',
comment='Whether item is currently active'
),
# Soft delete
sa.Column(
'is_deleted',
sa.Boolean(),
nullable=False,
server_default='false',
comment='Soft delete flag'
),
sa.Column(
'deleted_at',
sa.DateTime(),
nullable=True,
comment='When item was deleted'
),
# JSON data (use JSONB for better performance)
sa.Column(
'metadata',
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment='Additional metadata'
),
# Array column (PostgreSQL-specific)
sa.Column(
'tags',
postgresql.ARRAY(sa.String(length=50)),
nullable=True,
comment='Item tags'
),
# Timestamps (always include these)
sa.Column(
'created_at',
sa.DateTime(),
nullable=False,
server_default=sa.text('now()'),
comment='Record creation timestamp'
),
sa.Column(
'updated_at',
sa.DateTime(),
nullable=True,
comment='Record last update timestamp'
),
# Primary key constraint
sa.PrimaryKeyConstraint('id', name='pk_example_items')
)
# ============================================================================
# CREATE INDEXES
# ============================================================================
# Primary key index (created automatically, but shown for reference)
op.create_index(
'ix_example_items_id',
'example_items',
['id'],
unique=False
)
# Foreign key index (ALWAYS index foreign keys)
op.create_index(
'ix_example_items_user_id',
'example_items',
['user_id'],
unique=False
)
# Frequently filtered columns
op.create_index(
'ix_example_items_is_active',
'example_items',
['is_active'],
unique=False
)
op.create_index(
'ix_example_items_is_deleted',
'example_items',
['is_deleted'],
unique=False
)
# Timestamp index (for ordering by creation date)
op.create_index(
'ix_example_items_created_at',
'example_items',
['created_at'],
unique=False
)
# Composite indexes for common queries
# Example: SELECT * FROM example_items WHERE user_id = ? AND is_active = true
op.create_index(
'idx_example_items_user_active',
'example_items',
['user_id', 'is_active'],
unique=False
)
# Example: SELECT * FROM example_items WHERE user_id = ? ORDER BY created_at DESC
op.create_index(
'idx_example_items_user_created',
'example_items',
['user_id', 'created_at'],
unique=False
)
# Partial index (only index active, non-deleted items)
# This is PostgreSQL-specific and more efficient for common queries
op.execute("""
CREATE INDEX idx_example_items_active_not_deleted
ON example_items (user_id, created_at)
WHERE is_active = true AND is_deleted = false
""")
# ============================================================================
# CREATE FOREIGN KEYS
# ============================================================================
op.create_foreign_key(
'fk_example_items_user_id', # Constraint name
'example_items', # Source table
'users', # Target table
['user_id'], # Source column
['id'], # Target column
ondelete='CASCADE' # Delete items when user is deleted
)
# ============================================================================
# CREATE CONSTRAINTS
# ============================================================================
# Check constraint: price must be positive
op.create_check_constraint(
'ck_example_items_price_positive',
'example_items',
'price >= 0'
)
# Check constraint: quantity must be non-negative
op.create_check_constraint(
'ck_example_items_quantity_positive',
'example_items',
'quantity >= 0'
)
# Unique constraint example (uncomment if needed)
# op.create_unique_constraint(
# 'uq_example_items_user_name',
# 'example_items',
# ['user_id', 'name']
# )
def downgrade() -> None:
"""
Rollback migration: Drop example_items table.
IMPORTANT: Always implement downgrade to allow rollback.
"""
# Drop table (cascades to indexes and constraints automatically)
op.drop_table('example_items')
# Note: No need to explicitly drop indexes or constraints
# as they are automatically dropped when the table is dropped
# ==============================================================================
# COMMON MIGRATION PATTERNS
# ==============================================================================
"""
1. ADD COLUMN (nullable first, then make required)
# Migration 1: Add nullable column
def upgrade():
op.add_column('users', sa.Column('phone', sa.String(20), nullable=True))
# Migration 2: Backfill data and make non-nullable
def upgrade():
op.execute("UPDATE users SET phone = '' WHERE phone IS NULL")
op.alter_column('users', 'phone', nullable=False)
2. RENAME COLUMN (create new, copy data, drop old)
def upgrade():
# Add new column
op.add_column('users', sa.Column('full_name', sa.String(100), nullable=True))
# Copy data
op.execute("UPDATE users SET full_name = name")
# Make non-nullable
op.alter_column('users', 'full_name', nullable=False)
# Drop old column (in future migration after code is updated)
# op.drop_column('users', 'name')
3. ADD INDEX CONCURRENTLY (PostgreSQL-specific, for large tables)
def upgrade():
op.execute(
"CREATE INDEX CONCURRENTLY idx_users_email ON users (email)"
)
Note: This allows reads/writes during index creation but requires
a separate connection and can't be in a transaction block.
4. DATA MIGRATION
def upgrade():
# Migrate old status values to new format
op.execute(
\"\"\"
UPDATE orders
SET status = 'completed'
WHERE status = 'complete'
\"\"\"
)
5. ADD ENUM TYPE (PostgreSQL)
def upgrade():
# Create enum type
order_status = postgresql.ENUM(
'pending', 'processing', 'completed', 'cancelled',
name='order_status',
create_type=True
)
order_status.create(op.get_bind())
# Add column using enum
op.add_column(
'orders',
sa.Column('status', order_status, nullable=False, server_default='pending')
)
def downgrade():
op.drop_column('orders', 'status')
op.execute('DROP TYPE order_status')
6. CHANGE COLUMN TYPE
def upgrade():
# Change column type (PostgreSQL)
op.execute("ALTER TABLE users ALTER COLUMN age TYPE INTEGER USING age::integer")
# or using Alembic
def upgrade():
op.alter_column('users', 'age',
existing_type=sa.String(),
type_=sa.Integer(),
postgresql_using='age::integer')
7. ADD TABLE WITH RELATIONSHIP
def upgrade():
# Create related table
op.create_table(
'order_items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=False),
sa.Column('quantity', sa.Integer(), nullable=False),
sa.Column('unit_price', sa.Numeric(10, 2), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# Add indexes
op.create_index('ix_order_items_order_id', 'order_items', ['order_id'])
op.create_index('ix_order_items_product_id', 'order_items', ['product_id'])
# Add foreign keys
op.create_foreign_key(
'fk_order_items_order_id',
'order_items', 'orders',
['order_id'], ['id'],
ondelete='CASCADE'
)
op.create_foreign_key(
'fk_order_items_product_id',
'order_items', 'products',
['product_id'], ['id'],
ondelete='RESTRICT'
)
"""
# ==============================================================================
# HOW TO USE THIS TEMPLATE
# ==============================================================================
"""
1. Generate migration:
alembic revision --autogenerate -m "add example table"
or for manual migration:
alembic revision -m "add example table"
2. Edit the generated file in migrations/versions/
3. Copy relevant sections from this template
4. Test migration:
alembic upgrade head
# Verify in database
psql yourdb -c "\d example_items"
5. Test rollback:
alembic downgrade -1
# Verify table is gone
psql yourdb -c "\d example_items"
6. If all works, commit migration file to git:
git add migrations/versions/001_add_example_table.py
git commit -m "feat(db): add example_items table"
7. Document in PR:
"Database migration included: 001_add_example_table.py
Run `alembic upgrade head` before testing"
"""