refaktorálás javításai
This commit is contained in:
@@ -1,38 +0,0 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
from sqlalchemy import text
|
||||
from app.db.session import engine
|
||||
from datetime import datetime
|
||||
|
||||
async def log_discovery(conn, category, brand, model, action):
|
||||
await conn.execute(text("""
|
||||
INSERT INTO data.bot_discovery_logs (category, brand_name, model_name, action_taken)
|
||||
VALUES (:c, :b, :m, :a)
|
||||
"""), {"c": category, "b": brand, "m": model, "a": action})
|
||||
|
||||
async def run_discovery():
|
||||
async with engine.begin() as conn:
|
||||
print(f"🚀 Jármű felfedezés indul: {datetime.now()}")
|
||||
|
||||
# Jelenleg a CAR kategóriára fókuszálunk egy külső API segítségével (pl. NHTSA - Ingyenes)
|
||||
# Itt egy példa, hogyan bővül dinamikusan a rendszer
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Autók lekérése
|
||||
response = await client.get("https://vpic.nhtsa.dot.gov/api/vehicles/getallmakes?format=json")
|
||||
if response.status_code == 200:
|
||||
makes = response.json().get('Results', [])[:100] # Tesztként az első 100
|
||||
|
||||
for make in makes:
|
||||
brand_name = make['Make_Name'].strip()
|
||||
# Megnézzük, megvan-e már
|
||||
res = await conn.execute(text("SELECT id FROM data.vehicle_brands WHERE name = :n"), {"n": brand_name})
|
||||
if not res.scalar():
|
||||
await conn.execute(text("INSERT INTO data.vehicle_brands (category_id, name) VALUES (1, :n)"), {"n": brand_name})
|
||||
await log_discovery(conn, "CAR", brand_name, "ALL", "NEW_BRAND")
|
||||
print(f"✨ Új márka találva: {brand_name}")
|
||||
|
||||
await conn.commit()
|
||||
print("✅ Bot futása befejeződött.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(run_discovery())
|
||||
24
backend/app/scripts/pre_start.sh
Normal file
24
backend/app/scripts/pre_start.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "🚀 Service Finder Pre‑Start Schema Synchronization"
|
||||
echo "=================================================="
|
||||
|
||||
# Ensure we are in the correct directory (should be /app inside container)
|
||||
cd /app
|
||||
|
||||
# Run the unified database synchronizer with --apply flag
|
||||
echo "📦 Running unified_db_sync.py --apply..."
|
||||
python -m app.scripts.unified_db_sync --apply
|
||||
|
||||
# Verify that the sync succeeded
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "✅ Schema synchronization completed successfully."
|
||||
else
|
||||
echo "❌ Schema synchronization failed. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Start the FastAPI application
|
||||
echo "🌐 Starting FastAPI server..."
|
||||
exec uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
@@ -298,6 +298,34 @@ async def seed_params():
|
||||
"description": "Robot 1 - Számla OCR prompt",
|
||||
"scope_level": "global"
|
||||
},
|
||||
|
||||
# --- 13. SOCIAL & VERIFIED REVIEWS (Epic 4.1 - #66) ---
|
||||
{
|
||||
"key": "REVIEW_WINDOW_DAYS",
|
||||
"value": 30,
|
||||
"category": "social",
|
||||
"description": "Értékelési időablak napokban a tranzakció után",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "TRUST_SCORE_INFLUENCE_FACTOR",
|
||||
"value": 1.0,
|
||||
"category": "social",
|
||||
"description": "Trust‑score súlyozási tényező a szerviz értékeléseknél",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "REVIEW_RATING_WEIGHTS",
|
||||
"value": {
|
||||
"price": 0.25,
|
||||
"quality": 0.35,
|
||||
"time": 0.20,
|
||||
"communication": 0.20
|
||||
},
|
||||
"category": "social",
|
||||
"description": "Értékelési dimenziók súlyai az összpontszám számításához",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "ai_prompt_gold_data",
|
||||
"value": "Készíts technikai adatlapot a(z) {make} {model} típushoz a megadott adatok alapján: {context}. Csak hiteles JSON-t adj!",
|
||||
|
||||
169
backend/app/scripts/sync_engine.py
Normal file
169
backend/app/scripts/sync_engine.py
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Universal Schema Synchronizer
|
||||
|
||||
Dynamically imports all SQLAlchemy models from app.models, compares them with the live database,
|
||||
and creates missing tables/columns without dropping anything.
|
||||
|
||||
Safety First:
|
||||
- NEVER drops tables or columns.
|
||||
- Prints planned SQL before execution.
|
||||
- Requires confirmation for destructive operations (none in this script).
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect, text
|
||||
from sqlalchemy.schema import CreateTable, AddConstraint
|
||||
from sqlalchemy.sql.ddl import CreateColumn
|
||||
|
||||
# Add backend to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
"""
|
||||
Dynamically import all .py files in app.models directory to ensure Base.metadata is populated.
|
||||
"""
|
||||
models_dir = Path(__file__).parent.parent / "models"
|
||||
imported = []
|
||||
|
||||
for py_file in models_dir.glob("*.py"):
|
||||
if py_file.name == "__init__.py":
|
||||
continue
|
||||
module_name = f"app.models.{py_file.stem}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
imported.append(module_name)
|
||||
print(f"✅ Imported {module_name}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not import {module_name}: {e}")
|
||||
|
||||
# Also ensure the __init__ is loaded (it imports many models manually)
|
||||
import app.models
|
||||
print(f"📦 Total tables in Base.metadata: {len(Base.metadata.tables)}")
|
||||
return imported
|
||||
|
||||
async def compare_and_repair():
|
||||
"""
|
||||
Compare SQLAlchemy metadata with live database and create missing tables/columns.
|
||||
"""
|
||||
print("🔗 Connecting to database...")
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def get_diff_and_repair(connection):
|
||||
inspector = inspect(connection)
|
||||
|
||||
# Get all schemas from models
|
||||
expected_schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
print(f"📋 Expected schemas: {expected_schemas}")
|
||||
|
||||
# Ensure enum types exist in marketplace schema
|
||||
if 'marketplace' in expected_schemas:
|
||||
print("\n🔧 Ensuring enum types in marketplace schema...")
|
||||
# moderation_status enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'moderation_status' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.moderation_status AS ENUM ('pending', 'approved', 'rejected');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
# source_type enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'source_type' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.source_type AS ENUM ('manual', 'ocr', 'import');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
print("✅ Enum types ensured.")
|
||||
|
||||
for schema in expected_schemas:
|
||||
print(f"\n--- 🔍 Checking schema '{schema}' ---")
|
||||
|
||||
# Check if schema exists
|
||||
db_schemas = inspector.get_schema_names()
|
||||
if schema not in db_schemas:
|
||||
print(f"❌ Schema '{schema}' missing. Creating...")
|
||||
connection.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema}"'))
|
||||
print(f"✅ Schema '{schema}' created.")
|
||||
|
||||
# Get tables in this schema from models
|
||||
model_tables = [t for t in Base.metadata.sorted_tables if t.schema == schema]
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
|
||||
for table in model_tables:
|
||||
if table.name not in db_tables:
|
||||
print(f"❌ Missing table: {schema}.{table.name}")
|
||||
# Generate CREATE TABLE statement
|
||||
create_stmt = CreateTable(table)
|
||||
# Print SQL for debugging
|
||||
sql_str = str(create_stmt.compile(bind=engine))
|
||||
print(f" SQL: {sql_str}")
|
||||
connection.execute(create_stmt)
|
||||
print(f"✅ Table {schema}.{table.name} created.")
|
||||
else:
|
||||
# Check columns
|
||||
db_columns = {c['name']: c for c in inspector.get_columns(table.name, schema=schema)}
|
||||
model_columns = table.columns
|
||||
|
||||
missing_cols = []
|
||||
for col in model_columns:
|
||||
if col.name not in db_columns:
|
||||
missing_cols.append(col)
|
||||
|
||||
if missing_cols:
|
||||
print(f"⚠️ Table {schema}.{table.name} missing columns: {[c.name for c in missing_cols]}")
|
||||
for col in missing_cols:
|
||||
# Generate ADD COLUMN statement
|
||||
col_type = col.type.compile(dialect=engine.dialect)
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}'
|
||||
if col.nullable is False:
|
||||
sql += " NOT NULL"
|
||||
if col.default is not None:
|
||||
# Handle default values (simplistic)
|
||||
sql += f" DEFAULT {col.default.arg}"
|
||||
print(f" SQL: {sql}")
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Column {col.name} added.")
|
||||
else:
|
||||
print(f"✅ Table {schema}.{table.name} is up‑to‑date.")
|
||||
|
||||
print("\n--- ✅ Schema synchronization complete. ---")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(get_diff_and_repair)
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
async def main():
|
||||
print("🚀 Universal Schema Synchronizer")
|
||||
print("=" * 50)
|
||||
|
||||
# Step 1: Dynamic import
|
||||
print("\n📥 Step 1: Dynamically importing all models...")
|
||||
dynamic_import_models()
|
||||
|
||||
# Step 2: Compare and repair
|
||||
print("\n🔧 Step 2: Comparing with database and repairing...")
|
||||
await compare_and_repair()
|
||||
|
||||
# Step 3: Final verification
|
||||
print("\n📊 Step 3: Final verification...")
|
||||
# Run compare_schema.py logic to confirm everything is green
|
||||
from app.tests_internal.diagnostics.compare_schema import compare
|
||||
await compare()
|
||||
|
||||
print("\n✨ Synchronization finished successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
133
backend/app/scripts/unified_db_audit.py
Normal file
133
backend/app/scripts/unified_db_audit.py
Normal file
@@ -0,0 +1,133 @@
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
from sqlalchemy import inspect, text
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy.dialects.postgresql import JSONB, ENUM, NUMERIC
|
||||
|
||||
# Elérési utak beállítása
|
||||
BASE_DIR = Path(__file__).resolve().parents[2]
|
||||
sys.path.append(str(BASE_DIR))
|
||||
|
||||
try:
|
||||
from app.database import Base, engine
|
||||
from app.core.config import settings
|
||||
except ImportError as e:
|
||||
print(f"❌ Hiba az alapvető importoknál: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
def dynamic_import_models(models_dir: Path):
|
||||
"""
|
||||
Automatikusan bejárja az app/models mappát és beimportál minden .py fájlt,
|
||||
hogy a Base.metadata.tables feltöltődjön.
|
||||
"""
|
||||
print(f"🔍 Modellek dinamikus felderítése itt: {models_dir}...")
|
||||
count = 0
|
||||
for root, _, files in os.walk(models_dir):
|
||||
for file in files:
|
||||
if file.endswith(".py") and file != "__init__.py":
|
||||
full_path = Path(root) / file
|
||||
# Modul név képzése (pl. app.models.identity.user)
|
||||
rel_path = full_path.relative_to(BASE_DIR)
|
||||
module_name = str(rel_path).replace(os.sep, ".").replace(".py", "")
|
||||
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(module_name, full_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
count += 1
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Nem sikerült importálni: {module_name} -> {e}")
|
||||
print(f"✅ {count} modell fájl sikeresen betöltve a memóriába.\n")
|
||||
|
||||
async def run_unified_audit():
|
||||
# 1. Modellek betöltése
|
||||
models_path = BASE_DIR / "app" / "models"
|
||||
dynamic_import_models(models_path)
|
||||
|
||||
print(f"🔗 Kapcsolódás az adatbázishoz: {settings.POSTGRES_DB}")
|
||||
|
||||
async with engine.connect() as conn:
|
||||
inspector = await conn.run_sync(inspect)
|
||||
all_db_schemas = await conn.run_sync(lambda c: inspector.get_schema_names())
|
||||
|
||||
# Kigyűjtjük a modellekben definiált sémákat
|
||||
expected_schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
|
||||
mismatches = 0
|
||||
suggestions = []
|
||||
|
||||
for sc in expected_schemas:
|
||||
print(f"\n--- 🛰️ DOMAIN AUDIT: '{sc}' ---")
|
||||
if sc not in all_db_schemas:
|
||||
print(f"❌ KRITIKUS: A(z) '{sc}' séma hiányzik!")
|
||||
mismatches += 1
|
||||
continue
|
||||
|
||||
db_tables = await conn.run_sync(lambda c: inspector.get_table_names(schema=sc))
|
||||
model_tables = [t for t in Base.metadata.sorted_tables if t.schema == sc]
|
||||
|
||||
for table in model_tables:
|
||||
t_name = table.name
|
||||
if t_name not in db_tables:
|
||||
print(f"❌ HIÁNYZÓ TÁBLA: {sc}.{t_name}")
|
||||
mismatches += 1
|
||||
suggestions.append(f"-- Hozd létre a táblát: {sc}.{t_name}")
|
||||
continue
|
||||
|
||||
# Oszlopok lekérése a DB-ből
|
||||
db_cols = {c['name']: c for c in await conn.run_sync(
|
||||
lambda c: inspector.get_columns(t_name, schema=sc)
|
||||
)}
|
||||
|
||||
# Oszlopok lekérése a Modellből
|
||||
for col in table.columns:
|
||||
if col.name not in db_cols:
|
||||
print(f"⚠️ HIÁNYZÓ OSZLOP: {sc}.{t_name}.{col.name}")
|
||||
mismatches += 1
|
||||
suggestions.append(f"ALTER TABLE {sc}.{t_name} ADD COLUMN {col.name} {col.type};")
|
||||
else:
|
||||
# MÉLY TÍPUS ELLENŐRZÉS
|
||||
db_col = db_cols[col.name]
|
||||
db_type_str = str(db_col['type']).upper()
|
||||
|
||||
# 1. JSONB Ellenőrzés
|
||||
if isinstance(col.type, JSONB) and "JSONB" not in db_type_str:
|
||||
print(f"🔬 TÍPUS ELTÉRÉS [JSONB]: {sc}.{t_name}.{col.name} (DB: {db_type_str})")
|
||||
mismatches += 1
|
||||
|
||||
# 2. NUMERIC Precizitás
|
||||
elif isinstance(col.type, NUMERIC):
|
||||
m_prec, m_scale = col.type.precision, col.type.scale
|
||||
d_prec, d_scale = db_col['type'].precision, db_col['type'].scale
|
||||
if m_prec != d_prec or m_scale != d_scale:
|
||||
print(f"🔬 TÍPUS ELTÉRÉS [NUMERIC]: {sc}.{t_name}.{col.name} (Kód: {m_prec},{m_scale} vs DB: {d_prec},{d_scale})")
|
||||
mismatches += 1
|
||||
|
||||
# 3. ENUM Ellenőrzés
|
||||
elif isinstance(col.type, ENUM):
|
||||
enum_name = col.type.name
|
||||
res = await conn.execute(text(
|
||||
"SELECT EXISTS (SELECT 1 FROM pg_type WHERE typname = :name)"),
|
||||
{"name": enum_name}
|
||||
)
|
||||
if not res.scalar():
|
||||
print(f"🔬 HIÁNYZÓ ENUM TÍPUS: {enum_name} ({sc}.{t_name}.{col.name})")
|
||||
mismatches += 1
|
||||
|
||||
print(f"✅ {sc}.{t_name:30} | Átvizsgálva.")
|
||||
|
||||
print("\n" + "="*50)
|
||||
if mismatches == 0:
|
||||
print("✨ GRATULÁLOK! A fájlrendszer és az adatbázis szinkronban van. ✨")
|
||||
else:
|
||||
print(f"⚠️ ÖSSZESEN {mismatches} ELTÉRÉS TALÁLHATÓ!")
|
||||
print("\nJAVÍTÁSI JAVASLATOK (Copy-Paste SQL):")
|
||||
for s in suggestions:
|
||||
print(f" {s}")
|
||||
print("="*50 + "\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(run_unified_audit())
|
||||
135
backend/app/scripts/unified_db_sync.py
Normal file
135
backend/app/scripts/unified_db_sync.py
Normal file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
import enum
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect, text, UniqueConstraint, Index, Enum as SQLEnum
|
||||
from sqlalchemy.schema import CreateTable
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
# Backend hozzáadása a path-hoz
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
models_dir = Path(__file__).parent.parent / "models"
|
||||
for py_file in models_dir.glob("*.py"):
|
||||
if py_file.name == "__init__.py": continue
|
||||
module_name = f"app.models.{py_file.stem}"
|
||||
try:
|
||||
importlib.import_module(module_name)
|
||||
print(f"✅ Imported {module_name}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not import {module_name}: {e}")
|
||||
import app.models
|
||||
print(f"📦 Total tables in Base.metadata: {len(Base.metadata.tables)}")
|
||||
|
||||
async def compare_and_repair(apply: bool = False):
|
||||
print(f"🔗 Connecting to database... (Apply mode: {apply})")
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def get_diff_and_repair(connection):
|
||||
inspector = inspect(connection)
|
||||
|
||||
# 1. KITERJESZTÉSEK ÉS SÉMÁK
|
||||
schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
db_schemas = inspector.get_schema_names()
|
||||
|
||||
if apply:
|
||||
print("🔧 Ensuring extensions and schemas...")
|
||||
connection.execute(text('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"'))
|
||||
try: connection.execute(text('CREATE EXTENSION IF NOT EXISTS "postgis"'))
|
||||
except Exception: pass
|
||||
|
||||
for schema in schemas:
|
||||
if schema not in db_schemas:
|
||||
connection.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema}"'))
|
||||
|
||||
# Search path beállítása a típusok felismeréséhez
|
||||
search_path = ", ".join([f'"{s}"' for s in schemas]) + ", public"
|
||||
connection.execute(text(f"SET search_path TO {search_path}"))
|
||||
|
||||
# 2. OKOS ENUM LÉTREHOZÁS (Case-Insensitive támogatással)
|
||||
print("🔧 Checking custom Enum types...")
|
||||
for table in Base.metadata.sorted_tables:
|
||||
for col in table.columns:
|
||||
if isinstance(col.type, SQLEnum):
|
||||
enum_name = col.type.name
|
||||
schema = table.schema or 'public'
|
||||
check_enum = connection.execute(text(
|
||||
"SELECT 1 FROM pg_type t JOIN pg_namespace n ON n.oid = t.typnamespace "
|
||||
"WHERE t.typname = :name AND n.nspname = :schema"
|
||||
), {"name": enum_name, "schema": schema}).fetchone()
|
||||
|
||||
if not check_enum and apply:
|
||||
# TRÜKK: Hozzáadjuk a kis- és nagybetűs változatokat is, hogy ne bukjon el a DEFAULT-on
|
||||
all_variants = set()
|
||||
for val in col.type.enums:
|
||||
all_variants.add(val)
|
||||
all_variants.add(val.lower())
|
||||
all_variants.add(val.upper())
|
||||
|
||||
labels = ", ".join([f"'{l}'" for l in sorted(list(all_variants))])
|
||||
print(f"➕ Creating Enum {schema}.{enum_name} with variants...")
|
||||
connection.execute(text(f'CREATE TYPE "{schema}"."{enum_name}" AS ENUM ({labels})'))
|
||||
|
||||
# 3. TÁBLÁK ÉS OSZLOPOK SZINKRONIZÁLÁSA
|
||||
db_tables_cache = {s: inspector.get_table_names(schema=s) for s in schemas}
|
||||
db_tables_cache[None] = inspector.get_table_names()
|
||||
|
||||
for table in Base.metadata.sorted_tables:
|
||||
if table.name not in db_tables_cache.get(table.schema, []):
|
||||
print(f"❌ Missing table: {table.schema}.{table.name}")
|
||||
if apply:
|
||||
try:
|
||||
connection.execute(CreateTable(table))
|
||||
print(f"✅ Table {table.schema}.{table.name} created.")
|
||||
except Exception as e:
|
||||
print(f"🔥 Error creating {table.name}: {e}")
|
||||
continue
|
||||
|
||||
# Oszlop szinkronizálás
|
||||
db_cols = {c['name']: c for c in inspector.get_columns(table.name, schema=table.schema)}
|
||||
for col in table.columns:
|
||||
if col.name not in db_cols:
|
||||
col_type = col.type.compile(dialect=connection.dialect)
|
||||
sql = f'ALTER TABLE "{table.schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}'
|
||||
if not col.nullable: sql += " NOT NULL"
|
||||
|
||||
if col.default is not None:
|
||||
arg = col.default.arg
|
||||
if callable(arg):
|
||||
if "uuid" in col.name.lower(): sql += " DEFAULT gen_random_uuid()"
|
||||
elif "now" in str(arg).lower(): sql += " DEFAULT NOW()"
|
||||
elif isinstance(arg, enum.Enum):
|
||||
sql += f" DEFAULT '{arg.value}'"
|
||||
else:
|
||||
val = f"'{arg}'" if isinstance(arg, str) else arg
|
||||
sql += f" DEFAULT {val}"
|
||||
|
||||
print(f"⚠️ Adding column: {table.schema}.{table.name}.{col.name}")
|
||||
if apply: connection.execute(text(sql))
|
||||
|
||||
print("\n--- ✅ Synchronization cycle complete. ---")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(get_diff_and_repair)
|
||||
await engine.dispose()
|
||||
|
||||
async def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--apply', action='store_true')
|
||||
args = parser.parse_args()
|
||||
dynamic_import_models()
|
||||
await compare_and_repair(apply=args.apply)
|
||||
print("\n✨ Minden tábla és típus szinkronizálva!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
232
backend/app/scripts/unified_db_sync_1.0.py
Normal file
232
backend/app/scripts/unified_db_sync_1.0.py
Normal file
@@ -0,0 +1,232 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Unified Database Synchronizer with Deep Constraint & Index Support
|
||||
|
||||
Dynamically imports all SQLAlchemy models, compares metadata with live database,
|
||||
and creates missing tables, columns, unique constraints, and indexes.
|
||||
|
||||
Safety First:
|
||||
- NEVER drops tables, columns, constraints, or indexes.
|
||||
- Prints planned SQL before execution.
|
||||
- Requires confirmation for destructive operations (none in this script).
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect, text, UniqueConstraint, Index
|
||||
from sqlalchemy.schema import CreateTable, AddConstraint, CreateIndex
|
||||
from sqlalchemy.sql.ddl import CreateColumn
|
||||
|
||||
# Add backend to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
"""
|
||||
Dynamically import all .py files in app.models directory to ensure Base.metadata is populated.
|
||||
"""
|
||||
models_dir = Path(__file__).parent.parent / "models"
|
||||
imported = []
|
||||
|
||||
for py_file in models_dir.glob("*.py"):
|
||||
if py_file.name == "__init__.py":
|
||||
continue
|
||||
module_name = f"app.models.{py_file.stem}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
imported.append(module_name)
|
||||
print(f"✅ Imported {module_name}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not import {module_name}: {e}")
|
||||
|
||||
# Also ensure the __init__ is loaded (it imports many models manually)
|
||||
import app.models
|
||||
print(f"📦 Total tables in Base.metadata: {len(Base.metadata.tables)}")
|
||||
return imported
|
||||
|
||||
async def compare_and_repair(apply: bool = False):
|
||||
"""
|
||||
Compare SQLAlchemy metadata with live database and create missing
|
||||
tables, columns, unique constraints, and indexes.
|
||||
|
||||
If apply is False, only prints SQL statements without executing.
|
||||
"""
|
||||
print("🔗 Connecting to database...")
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def get_diff_and_repair(connection):
|
||||
inspector = inspect(connection)
|
||||
|
||||
# Get all schemas from models
|
||||
expected_schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
print(f"📋 Expected schemas: {expected_schemas}")
|
||||
|
||||
# Ensure enum types exist in marketplace schema
|
||||
if 'marketplace' in expected_schemas:
|
||||
print("\n🔧 Ensuring enum types in marketplace schema...")
|
||||
# moderation_status enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'moderation_status' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.moderation_status AS ENUM ('pending', 'approved', 'rejected');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
# source_type enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'source_type' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.source_type AS ENUM ('manual', 'ocr', 'import');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
print("✅ Enum types ensured.")
|
||||
|
||||
for schema in expected_schemas:
|
||||
print(f"\n--- 🔍 Checking schema '{schema}' ---")
|
||||
|
||||
# Check if schema exists
|
||||
db_schemas = inspector.get_schema_names()
|
||||
if schema not in db_schemas:
|
||||
print(f"❌ Schema '{schema}' missing. Creating...")
|
||||
if apply:
|
||||
connection.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema}"'))
|
||||
print(f"✅ Schema '{schema}' created.")
|
||||
else:
|
||||
print(f" SQL: CREATE SCHEMA IF NOT EXISTS \"{schema}\"")
|
||||
|
||||
# Get tables in this schema from models
|
||||
model_tables = [t for t in Base.metadata.sorted_tables if t.schema == schema]
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
|
||||
for table in model_tables:
|
||||
if table.name not in db_tables:
|
||||
print(f"❌ Missing table: {schema}.{table.name}")
|
||||
# Generate CREATE TABLE statement
|
||||
create_stmt = CreateTable(table)
|
||||
sql_str = str(create_stmt.compile(bind=engine))
|
||||
print(f" SQL: {sql_str}")
|
||||
if apply:
|
||||
connection.execute(create_stmt)
|
||||
print(f"✅ Table {schema}.{table.name} created.")
|
||||
continue
|
||||
|
||||
# Check columns
|
||||
db_columns = {c['name']: c for c in inspector.get_columns(table.name, schema=schema)}
|
||||
model_columns = table.columns
|
||||
|
||||
missing_cols = []
|
||||
for col in model_columns:
|
||||
if col.name not in db_columns:
|
||||
missing_cols.append(col)
|
||||
|
||||
if missing_cols:
|
||||
print(f"⚠️ Table {schema}.{table.name} missing columns: {[c.name for c in missing_cols]}")
|
||||
for col in missing_cols:
|
||||
col_type = col.type.compile(dialect=engine.dialect)
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}'
|
||||
if col.nullable is False:
|
||||
sql += " NOT NULL"
|
||||
if col.default is not None:
|
||||
sql += f" DEFAULT {col.default.arg}"
|
||||
print(f" SQL: {sql}")
|
||||
if apply:
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Column {col.name} added.")
|
||||
else:
|
||||
print(f"✅ Table {schema}.{table.name} columns are up‑to‑date.")
|
||||
|
||||
# Check Unique Constraints
|
||||
db_unique_constraints = inspector.get_unique_constraints(table.name, schema=schema)
|
||||
# Map by column names (since constraint names may differ)
|
||||
db_unique_map = {}
|
||||
for uc in db_unique_constraints:
|
||||
key = tuple(sorted(uc['column_names']))
|
||||
db_unique_map[key] = uc['name']
|
||||
|
||||
# Find unique constraints defined in model
|
||||
model_unique_constraints = [c for c in table.constraints if isinstance(c, UniqueConstraint)]
|
||||
for uc in model_unique_constraints:
|
||||
uc_columns = tuple(sorted([col.name for col in uc.columns]))
|
||||
if uc_columns not in db_unique_map:
|
||||
# Constraint missing
|
||||
constraint_name = uc.name or f"uq_{table.name}_{'_'.join(uc_columns)}"
|
||||
columns_sql = ', '.join([f'"{col}"' for col in uc_columns])
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD CONSTRAINT "{constraint_name}" UNIQUE ({columns_sql})'
|
||||
print(f"⚠️ Missing unique constraint on {schema}.{table.name} columns {uc_columns}")
|
||||
print(f" SQL: {sql}")
|
||||
if apply:
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Unique constraint {constraint_name} added.")
|
||||
else:
|
||||
print(f"✅ Unique constraint on {uc_columns} exists.")
|
||||
|
||||
# Check Indexes
|
||||
db_indexes = inspector.get_indexes(table.name, schema=schema)
|
||||
db_index_map = {}
|
||||
for idx in db_indexes:
|
||||
key = tuple(sorted(idx['column_names']))
|
||||
db_index_map[key] = idx['name']
|
||||
|
||||
# Find indexes defined in model (Index objects)
|
||||
model_indexes = [idx for idx in table.indexes]
|
||||
for idx in model_indexes:
|
||||
idx_columns = tuple(sorted([col.name for col in idx.columns]))
|
||||
if idx_columns not in db_index_map:
|
||||
# Index missing
|
||||
index_name = idx.name or f"idx_{table.name}_{'_'.join(idx_columns)}"
|
||||
columns_sql = ', '.join([f'"{col}"' for col in idx_columns])
|
||||
unique_sql = "UNIQUE " if idx.unique else ""
|
||||
sql = f'CREATE {unique_sql}INDEX "{index_name}" ON "{schema}"."{table.name}" ({columns_sql})'
|
||||
print(f"⚠️ Missing index on {schema}.{table.name} columns {idx_columns}")
|
||||
print(f" SQL: {sql}")
|
||||
if apply:
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Index {index_name} added.")
|
||||
else:
|
||||
print(f"✅ Index on {idx_columns} exists.")
|
||||
|
||||
print("\n--- ✅ Schema synchronization complete. ---")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(get_diff_and_repair)
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
async def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="Unified Database Synchronizer")
|
||||
parser.add_argument('--apply', action='store_true', help='Apply changes to database (otherwise dry‑run)')
|
||||
args = parser.parse_args()
|
||||
|
||||
print("🚀 Unified Database Synchronizer")
|
||||
print("=" * 50)
|
||||
|
||||
# Step 1: Dynamic import
|
||||
print("\n📥 Step 1: Dynamically importing all models...")
|
||||
dynamic_import_models()
|
||||
|
||||
# Step 2: Compare and repair
|
||||
print("\n🔧 Step 2: Comparing with database and repairing...")
|
||||
await compare_and_repair(apply=args.apply)
|
||||
|
||||
# Step 3: Final verification
|
||||
print("\n📊 Step 3: Final verification...")
|
||||
try:
|
||||
from app.tests_internal.diagnostics.compare_schema import compare
|
||||
await compare()
|
||||
except ImportError:
|
||||
print("⚠️ compare_schema module not found, skipping verification.")
|
||||
|
||||
print("\n✨ Synchronization finished successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Reference in New Issue
Block a user