refaktorálás javításai
This commit is contained in:
232
backend/app/scripts/unified_db_sync_1.0.py
Normal file
232
backend/app/scripts/unified_db_sync_1.0.py
Normal file
@@ -0,0 +1,232 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Unified Database Synchronizer with Deep Constraint & Index Support
|
||||
|
||||
Dynamically imports all SQLAlchemy models, compares metadata with live database,
|
||||
and creates missing tables, columns, unique constraints, and indexes.
|
||||
|
||||
Safety First:
|
||||
- NEVER drops tables, columns, constraints, or indexes.
|
||||
- Prints planned SQL before execution.
|
||||
- Requires confirmation for destructive operations (none in this script).
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect, text, UniqueConstraint, Index
|
||||
from sqlalchemy.schema import CreateTable, AddConstraint, CreateIndex
|
||||
from sqlalchemy.sql.ddl import CreateColumn
|
||||
|
||||
# Add backend to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
"""
|
||||
Dynamically import all .py files in app.models directory to ensure Base.metadata is populated.
|
||||
"""
|
||||
models_dir = Path(__file__).parent.parent / "models"
|
||||
imported = []
|
||||
|
||||
for py_file in models_dir.glob("*.py"):
|
||||
if py_file.name == "__init__.py":
|
||||
continue
|
||||
module_name = f"app.models.{py_file.stem}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
imported.append(module_name)
|
||||
print(f"✅ Imported {module_name}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not import {module_name}: {e}")
|
||||
|
||||
# Also ensure the __init__ is loaded (it imports many models manually)
|
||||
import app.models
|
||||
print(f"📦 Total tables in Base.metadata: {len(Base.metadata.tables)}")
|
||||
return imported
|
||||
|
||||
async def compare_and_repair(apply: bool = False):
|
||||
"""
|
||||
Compare SQLAlchemy metadata with live database and create missing
|
||||
tables, columns, unique constraints, and indexes.
|
||||
|
||||
If apply is False, only prints SQL statements without executing.
|
||||
"""
|
||||
print("🔗 Connecting to database...")
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def get_diff_and_repair(connection):
|
||||
inspector = inspect(connection)
|
||||
|
||||
# Get all schemas from models
|
||||
expected_schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
print(f"📋 Expected schemas: {expected_schemas}")
|
||||
|
||||
# Ensure enum types exist in marketplace schema
|
||||
if 'marketplace' in expected_schemas:
|
||||
print("\n🔧 Ensuring enum types in marketplace schema...")
|
||||
# moderation_status enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'moderation_status' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.moderation_status AS ENUM ('pending', 'approved', 'rejected');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
# source_type enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'source_type' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.source_type AS ENUM ('manual', 'ocr', 'import');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
print("✅ Enum types ensured.")
|
||||
|
||||
for schema in expected_schemas:
|
||||
print(f"\n--- 🔍 Checking schema '{schema}' ---")
|
||||
|
||||
# Check if schema exists
|
||||
db_schemas = inspector.get_schema_names()
|
||||
if schema not in db_schemas:
|
||||
print(f"❌ Schema '{schema}' missing. Creating...")
|
||||
if apply:
|
||||
connection.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema}"'))
|
||||
print(f"✅ Schema '{schema}' created.")
|
||||
else:
|
||||
print(f" SQL: CREATE SCHEMA IF NOT EXISTS \"{schema}\"")
|
||||
|
||||
# Get tables in this schema from models
|
||||
model_tables = [t for t in Base.metadata.sorted_tables if t.schema == schema]
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
|
||||
for table in model_tables:
|
||||
if table.name not in db_tables:
|
||||
print(f"❌ Missing table: {schema}.{table.name}")
|
||||
# Generate CREATE TABLE statement
|
||||
create_stmt = CreateTable(table)
|
||||
sql_str = str(create_stmt.compile(bind=engine))
|
||||
print(f" SQL: {sql_str}")
|
||||
if apply:
|
||||
connection.execute(create_stmt)
|
||||
print(f"✅ Table {schema}.{table.name} created.")
|
||||
continue
|
||||
|
||||
# Check columns
|
||||
db_columns = {c['name']: c for c in inspector.get_columns(table.name, schema=schema)}
|
||||
model_columns = table.columns
|
||||
|
||||
missing_cols = []
|
||||
for col in model_columns:
|
||||
if col.name not in db_columns:
|
||||
missing_cols.append(col)
|
||||
|
||||
if missing_cols:
|
||||
print(f"⚠️ Table {schema}.{table.name} missing columns: {[c.name for c in missing_cols]}")
|
||||
for col in missing_cols:
|
||||
col_type = col.type.compile(dialect=engine.dialect)
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}'
|
||||
if col.nullable is False:
|
||||
sql += " NOT NULL"
|
||||
if col.default is not None:
|
||||
sql += f" DEFAULT {col.default.arg}"
|
||||
print(f" SQL: {sql}")
|
||||
if apply:
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Column {col.name} added.")
|
||||
else:
|
||||
print(f"✅ Table {schema}.{table.name} columns are up‑to‑date.")
|
||||
|
||||
# Check Unique Constraints
|
||||
db_unique_constraints = inspector.get_unique_constraints(table.name, schema=schema)
|
||||
# Map by column names (since constraint names may differ)
|
||||
db_unique_map = {}
|
||||
for uc in db_unique_constraints:
|
||||
key = tuple(sorted(uc['column_names']))
|
||||
db_unique_map[key] = uc['name']
|
||||
|
||||
# Find unique constraints defined in model
|
||||
model_unique_constraints = [c for c in table.constraints if isinstance(c, UniqueConstraint)]
|
||||
for uc in model_unique_constraints:
|
||||
uc_columns = tuple(sorted([col.name for col in uc.columns]))
|
||||
if uc_columns not in db_unique_map:
|
||||
# Constraint missing
|
||||
constraint_name = uc.name or f"uq_{table.name}_{'_'.join(uc_columns)}"
|
||||
columns_sql = ', '.join([f'"{col}"' for col in uc_columns])
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD CONSTRAINT "{constraint_name}" UNIQUE ({columns_sql})'
|
||||
print(f"⚠️ Missing unique constraint on {schema}.{table.name} columns {uc_columns}")
|
||||
print(f" SQL: {sql}")
|
||||
if apply:
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Unique constraint {constraint_name} added.")
|
||||
else:
|
||||
print(f"✅ Unique constraint on {uc_columns} exists.")
|
||||
|
||||
# Check Indexes
|
||||
db_indexes = inspector.get_indexes(table.name, schema=schema)
|
||||
db_index_map = {}
|
||||
for idx in db_indexes:
|
||||
key = tuple(sorted(idx['column_names']))
|
||||
db_index_map[key] = idx['name']
|
||||
|
||||
# Find indexes defined in model (Index objects)
|
||||
model_indexes = [idx for idx in table.indexes]
|
||||
for idx in model_indexes:
|
||||
idx_columns = tuple(sorted([col.name for col in idx.columns]))
|
||||
if idx_columns not in db_index_map:
|
||||
# Index missing
|
||||
index_name = idx.name or f"idx_{table.name}_{'_'.join(idx_columns)}"
|
||||
columns_sql = ', '.join([f'"{col}"' for col in idx_columns])
|
||||
unique_sql = "UNIQUE " if idx.unique else ""
|
||||
sql = f'CREATE {unique_sql}INDEX "{index_name}" ON "{schema}"."{table.name}" ({columns_sql})'
|
||||
print(f"⚠️ Missing index on {schema}.{table.name} columns {idx_columns}")
|
||||
print(f" SQL: {sql}")
|
||||
if apply:
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Index {index_name} added.")
|
||||
else:
|
||||
print(f"✅ Index on {idx_columns} exists.")
|
||||
|
||||
print("\n--- ✅ Schema synchronization complete. ---")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(get_diff_and_repair)
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
async def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="Unified Database Synchronizer")
|
||||
parser.add_argument('--apply', action='store_true', help='Apply changes to database (otherwise dry‑run)')
|
||||
args = parser.parse_args()
|
||||
|
||||
print("🚀 Unified Database Synchronizer")
|
||||
print("=" * 50)
|
||||
|
||||
# Step 1: Dynamic import
|
||||
print("\n📥 Step 1: Dynamically importing all models...")
|
||||
dynamic_import_models()
|
||||
|
||||
# Step 2: Compare and repair
|
||||
print("\n🔧 Step 2: Comparing with database and repairing...")
|
||||
await compare_and_repair(apply=args.apply)
|
||||
|
||||
# Step 3: Final verification
|
||||
print("\n📊 Step 3: Final verification...")
|
||||
try:
|
||||
from app.tests_internal.diagnostics.compare_schema import compare
|
||||
await compare()
|
||||
except ImportError:
|
||||
print("⚠️ compare_schema module not found, skipping verification.")
|
||||
|
||||
print("\n✨ Synchronization finished successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Reference in New Issue
Block a user