feat(db): migrate to PostgreSQL 16 standalone

- docker-compose.prod.yml: add postgres:16-alpine service with health check,
  dedicated prod_leopost_net, backup volume mount, connection pool
- requirements.txt: add psycopg2-binary==2.9.9
- database.py: remove SQLite-specific run_migrations(), add PG pool_size/
  max_overflow/pool_pre_ping, keep sqlite compat for dev
- main.py: remove run_migrations call, rely on create_all for PG
- scripts/migrate_sqlite_to_pg.py: one-shot data migration script

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Michele
2026-04-01 17:11:35 +02:00
parent b38419f3ee
commit cc1cb2d02a
5 changed files with 175 additions and 74 deletions

View File

@@ -1,13 +1,21 @@
from sqlalchemy import create_engine, text
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, declarative_base
from .config import settings
# PostgreSQL: use connection pool; SQLite (dev/test): check_same_thread
connect_args = {}
if settings.database_url.startswith("sqlite"):
connect_args["check_same_thread"] = False
engine = create_engine(settings.database_url, connect_args=connect_args)
else:
engine = create_engine(
settings.database_url,
pool_size=10,
max_overflow=20,
pool_pre_ping=True,
)
engine = create_engine(settings.database_url, connect_args=connect_args)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
@@ -18,69 +26,3 @@ def get_db():
yield db
finally:
db.close()
def run_migrations(engine):
"""SQLite-safe migration: add new columns if they don't exist."""
migrations = {
"users": [
("email", "VARCHAR"),
("display_name", "VARCHAR"),
("avatar_url", "VARCHAR"),
("auth_provider", "VARCHAR DEFAULT 'local'"),
("google_id", "VARCHAR"),
("subscription_plan", "VARCHAR DEFAULT 'freemium'"),
("subscription_expires_at", "DATETIME"),
("is_admin", "BOOLEAN DEFAULT 0"),
("posts_generated_this_month", "INTEGER DEFAULT 0"),
("posts_reset_date", "DATE"),
],
"characters": [("user_id", "INTEGER")],
"posts": [("user_id", "INTEGER")],
"affiliate_links": [("user_id", "INTEGER")],
"editorial_plans": [("user_id", "INTEGER")],
"social_accounts": [("user_id", "INTEGER")],
"system_settings": [("user_id", "INTEGER")],
}
with engine.connect() as conn:
for table, cols in migrations.items():
try:
existing = {row[1] for row in conn.execute(text(f"PRAGMA table_info({table})"))}
for col_name, col_def in cols:
if col_name not in existing:
conn.execute(text(f"ALTER TABLE {table} ADD COLUMN {col_name} {col_def}"))
conn.commit()
except Exception as e:
print(f"Migration warning for {table}: {e}")
# Fix system_settings: remove UNIQUE constraint on 'key' by recreating the table
# This allows per-user settings (same key, different user_id)
try:
indexes = list(conn.execute(text("PRAGMA index_list(system_settings)")))
has_unique_key = any(
row[1].lower().startswith("ix_") or "key" in row[1].lower()
for row in indexes
if row[2] == 1 # unique=1
)
# Check via table creation SQL
create_sql_row = conn.execute(text(
"SELECT sql FROM sqlite_master WHERE type='table' AND name='system_settings'"
)).fetchone()
if create_sql_row and "UNIQUE" in (create_sql_row[0] or "").upper():
# Recreate without UNIQUE on key
conn.execute(text("ALTER TABLE system_settings RENAME TO system_settings_old"))
conn.execute(text("""
CREATE TABLE system_settings (
id INTEGER PRIMARY KEY,
key VARCHAR(100) NOT NULL,
value JSON,
updated_at DATETIME,
user_id INTEGER REFERENCES users(id)
)
"""))
conn.execute(text("INSERT INTO system_settings SELECT id, key, value, updated_at, user_id FROM system_settings_old"))
conn.execute(text("DROP TABLE system_settings_old"))
conn.commit()
print("Migration: system_settings UNIQUE constraint on key removed.")
except Exception as e:
print(f"Migration warning for system_settings UNIQUE fix: {e}")