feat(db): migrate to PostgreSQL 16 standalone

- docker-compose.prod.yml: add postgres:16-alpine service with health check,
  dedicated prod_leopost_net, backup volume mount, connection pool
- requirements.txt: add psycopg2-binary==2.9.9
- database.py: remove SQLite-specific run_migrations(), add PG pool_size/
  max_overflow/pool_pre_ping, keep sqlite compat for dev
- main.py: remove run_migrations call, rely on create_all for PG
- scripts/migrate_sqlite_to_pg.py: one-shot data migration script

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Michele
2026-04-01 17:11:35 +02:00
parent b38419f3ee
commit cc1cb2d02a
5 changed files with 175 additions and 74 deletions

View File

@@ -0,0 +1,130 @@
#!/usr/bin/env python3
"""
Migrazione dati da SQLite → PostgreSQL per Leopost.
Eseguire UNA SOLA VOLTA sul VPS, dopo che il container PostgreSQL è attivo
ma prima di avviare l'app.
Usage (sul VPS):
cd /opt/lab-leopost-full-prod
docker compose -f docker-compose.prod.yml up -d postgres
# aspetta che postgres sia healthy (10-15 sec)
python3 scripts/migrate_sqlite_to_pg.py \
--sqlite data/leopost.db \
--pg "postgresql://leopost:leopost_pg_2026@localhost:5433/leopost"
"""
import argparse
import json
import sqlite3
import sys
try:
import psycopg2
import psycopg2.extras
except ImportError:
print("ERROR: psycopg2 non trovato. Installa con: pip install psycopg2-binary")
sys.exit(1)
# Ordine di inserimento rispetta le FK
TABLES_ORDER = [
"users",
"subscription_codes",
"characters",
"posts",
"affiliate_links",
"editorial_plans",
"scheduled_posts",
"social_accounts",
"comments",
"system_settings",
]
def get_sqlite_data(sqlite_path: str) -> dict[str, list[dict]]:
conn = sqlite3.connect(sqlite_path)
conn.row_factory = sqlite3.Row
data = {}
cursor = conn.cursor()
for table in TABLES_ORDER:
try:
cursor.execute(f"SELECT * FROM {table}")
rows = [dict(r) for r in cursor.fetchall()]
data[table] = rows
print(f" SQLite {table}: {len(rows)} righe lette")
except sqlite3.OperationalError as e:
print(f" SQLite {table}: saltata ({e})")
data[table] = []
conn.close()
return data
def migrate(sqlite_path: str, pg_dsn: str):
print(f"\n[1/3] Lettura SQLite: {sqlite_path}")
data = get_sqlite_data(sqlite_path)
print(f"\n[2/3] Connessione PostgreSQL...")
pg = psycopg2.connect(pg_dsn)
pg.autocommit = False
cur = pg.cursor()
print(f"\n[3/3] Inserimento dati in PostgreSQL...")
# Disabilita temporaneamente i trigger di FK per inserimento sicuro
cur.execute("SET session_replication_role = replica;")
for table in TABLES_ORDER:
rows = data.get(table, [])
if not rows:
print(f" {table}: nessuna riga, saltata")
continue
cols = list(rows[0].keys())
placeholders = ", ".join(["%s"] * len(cols))
col_names = ", ".join(cols)
sql = f"INSERT INTO {table} ({col_names}) VALUES ({placeholders}) ON CONFLICT DO NOTHING"
inserted = 0
for row in rows:
values = []
for col in cols:
v = row[col]
# SQLite stores JSON as string — psycopg2 needs Python objects for JSON columns
if isinstance(v, str) and v and v[0] in ("{", "["):
try:
v = json.loads(v)
v = psycopg2.extras.Json(v)
except json.JSONDecodeError:
pass
values.append(v)
try:
cur.execute(sql, values)
inserted += 1
except Exception as e:
print(f" WARN riga in {table}: {e} — saltata")
print(f" {table}: {inserted}/{len(rows)} righe inserite")
# Ripristina FK e resetta le sequenze
cur.execute("SET session_replication_role = DEFAULT;")
print("\n Resetting sequences PostgreSQL...")
for table in TABLES_ORDER:
cur.execute(f"""
SELECT setval(
pg_get_serial_sequence('{table}', 'id'),
COALESCE((SELECT MAX(id) FROM {table}), 1)
)
""")
pg.commit()
cur.close()
pg.close()
print("\n✓ Migrazione completata con successo.")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--sqlite", required=True, help="Path al file SQLite (es. data/leopost.db)")
parser.add_argument("--pg", required=True, help="DSN PostgreSQL (es. postgresql://leopost:pass@localhost:5433/leopost)")
args = parser.parse_args()
migrate(args.sqlite, args.pg)