átlagos kiegészítséek jó sok
This commit is contained in:
22
backend/app/scripts/check_mappers.py
Normal file
22
backend/app/scripts/check_mappers.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import sys
|
||||
from sqlalchemy.orm import configure_mappers
|
||||
|
||||
# Az összes modell importálása
|
||||
from app.models.identity import *
|
||||
from app.models.vehicle import *
|
||||
from app.models.marketplace import *
|
||||
# from app.models.fleet import * # Nincs fleet modul
|
||||
from app.models.gamification import *
|
||||
from app.models.system import *
|
||||
|
||||
def check_all_mappers():
|
||||
try:
|
||||
configure_mappers()
|
||||
print("\n✅ [SUCCESS] Minden SQLAlchemy Mapper és Relationship 100%-ig hibátlanül felépült!")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"\n❌ [ERROR] Mapper inicializálási hiba:\n{e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_all_mappers()
|
||||
439
backend/app/scripts/check_robots_integrity.py
Normal file
439
backend/app/scripts/check_robots_integrity.py
Normal file
@@ -0,0 +1,439 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Robot Health & Integrity Audit Script - Recursive Deep Integrity Audit
|
||||
|
||||
Ez a szkript automatikusan diagnosztizálja az összes robotunk (Scout, Enricher, Validator, Auditor)
|
||||
üzembiztonságát rekurzív felfedezéssel. A következő ellenőrzéseket végzi el:
|
||||
|
||||
1. Auto-Discovery: Rekurzívan bejárja a `backend/app/workers/` teljes könyvtárszerkezetét
|
||||
2. Identification: Minden `.py` fájlt, ami nem `__init__.py` és nem segédfájl, kezel robotként/worker-ként
|
||||
3. Deep Import Test: Megpróbálja importálni mindet, különös figyelemmel a kritikus modulokra
|
||||
4. Model Sync 2.0: Ellenőrzi, hogy az összes robot a helyes modelleket használja-e
|
||||
5. Interface Standardizálás: Ellenőrzi a `run()` metódus jelenlétét
|
||||
6. Kategorizált jelentés: Service, Vehicle General, Vehicle Special, System & OCR kategóriák
|
||||
"""
|
||||
|
||||
import sys
|
||||
import importlib
|
||||
import inspect
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Tuple
|
||||
import logging
|
||||
import re
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(name)s: %(message)s')
|
||||
logger = logging.getLogger("Robot-Integrity-Audit")
|
||||
|
||||
# Root directory for workers (relative to backend/app)
|
||||
WORKERS_ROOT = Path(__file__).parent.parent / "workers"
|
||||
|
||||
# Exclusion patterns for non-robot files
|
||||
EXCLUDE_PATTERNS = [
|
||||
"__init__.py",
|
||||
"__pycache__",
|
||||
".pyc",
|
||||
"test_",
|
||||
"mapping_",
|
||||
"config",
|
||||
"dictionary",
|
||||
"rules",
|
||||
"report",
|
||||
"monitor_",
|
||||
"py_to_database",
|
||||
"README",
|
||||
# Files with dots in name (not valid Python module names)
|
||||
r".*\..*\.py", # Matches files like "something.1.0.py"
|
||||
]
|
||||
|
||||
# Categorization patterns
|
||||
CATEGORY_PATTERNS = {
|
||||
"Service Robots": [
|
||||
r"service_robot_\d+",
|
||||
r"service/.*\.py$",
|
||||
],
|
||||
"Vehicle General": [
|
||||
r"vehicle_robot_[0-4]_.*",
|
||||
r"R[0-4]_.*\.py$",
|
||||
r"vehicle_robot_1_[245]_.*", # NHTSA, Heavy EU, GB
|
||||
r"vehicle_robot_2_.*", # RDW, AutoData
|
||||
],
|
||||
"Vehicle Special": [
|
||||
r"bike_.*\.py$",
|
||||
r"vehicle_ultimate_.*\.py$",
|
||||
r"ultimatespecs/.*\.py$",
|
||||
],
|
||||
"System & OCR": [
|
||||
r"system_.*\.py$",
|
||||
r"subscription_.*\.py$",
|
||||
r"ocr/.*\.py$",
|
||||
],
|
||||
}
|
||||
|
||||
def discover_robot_files() -> List[Tuple[str, Path, str]]:
|
||||
"""
|
||||
Recursively discover all robot files in the workers directory.
|
||||
Returns list of (module_name, file_path, category) tuples.
|
||||
"""
|
||||
robot_files = []
|
||||
|
||||
for py_file in WORKERS_ROOT.rglob("*.py"):
|
||||
# Skip excluded files
|
||||
file_name = py_file.name
|
||||
# Check for simple pattern matches
|
||||
skip = False
|
||||
for pattern in EXCLUDE_PATTERNS:
|
||||
if pattern.startswith('r.') and len(pattern) > 2:
|
||||
# Regex pattern (simplified)
|
||||
if re.match(pattern[2:], file_name):
|
||||
skip = True
|
||||
break
|
||||
elif pattern in file_name:
|
||||
skip = True
|
||||
break
|
||||
|
||||
# Also skip files with multiple dots in name (not valid Python modules)
|
||||
if file_name.count('.') > 1: # e.g., "something.1.0.py"
|
||||
skip = True
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Skip directories
|
||||
if not py_file.is_file():
|
||||
continue
|
||||
|
||||
# Calculate module name (relative to backend/app)
|
||||
try:
|
||||
rel_path = py_file.relative_to(Path(__file__).parent.parent)
|
||||
# Convert path parts to module names, handling dots in filenames
|
||||
module_parts = []
|
||||
for part in rel_path.parts:
|
||||
if part.endswith('.py'):
|
||||
part = part[:-3] # Remove .py
|
||||
# Replace dots with underscores in filename (e.g., "1.0" -> "1_0")
|
||||
part = part.replace('.', '_')
|
||||
module_parts.append(part)
|
||||
|
||||
# Add 'app' prefix since we're in backend/app directory
|
||||
module_name = "app." + ".".join(module_parts)
|
||||
|
||||
# Determine category
|
||||
category = "Uncategorized"
|
||||
for cat_name, patterns in CATEGORY_PATTERNS.items():
|
||||
for pattern in patterns:
|
||||
if re.search(pattern, str(rel_path), re.IGNORECASE):
|
||||
category = cat_name
|
||||
break
|
||||
if category != "Uncategorized":
|
||||
break
|
||||
|
||||
robot_files.append((module_name, py_file, category))
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Could not determine module for {py_file}: {e}")
|
||||
|
||||
# Sort by category and module name
|
||||
robot_files.sort(key=lambda x: (x[2], x[0]))
|
||||
return robot_files
|
||||
|
||||
async def test_import(module_name: str) -> Tuple[bool, str]:
|
||||
"""Try to import a robot module and return (success, error_message)."""
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
logger.info(f"✅ {module_name} import successful")
|
||||
return True, ""
|
||||
except ImportError as e:
|
||||
error_msg = f"ImportError: {e}"
|
||||
logger.error(f"❌ {module_name} import failed: {e}")
|
||||
return False, error_msg
|
||||
except SyntaxError as e:
|
||||
error_msg = f"SyntaxError at line {e.lineno}: {e.msg}"
|
||||
logger.error(f"❌ {module_name} syntax error: {e}")
|
||||
return False, error_msg
|
||||
except Exception as e:
|
||||
error_msg = f"Exception: {type(e).__name__}: {e}"
|
||||
logger.error(f"❌ {module_name} import failed: {e}")
|
||||
return False, error_msg
|
||||
|
||||
async def check_model_sync(module_name: str) -> List[str]:
|
||||
"""Check if a robot uses correct model references."""
|
||||
errors = []
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
|
||||
# Get all classes in the module
|
||||
classes = [cls for name, cls in inspect.getmembers(module, inspect.isclass)
|
||||
if not name.startswith('_')]
|
||||
|
||||
for cls in classes:
|
||||
# Check class source code for model references
|
||||
try:
|
||||
source = inspect.getsource(cls)
|
||||
|
||||
# Look for common model name issues
|
||||
old_patterns = [
|
||||
r"VehicleModelDefinitions", # Plural mistake
|
||||
r"vehicle_model_definitions", # Old table name
|
||||
r"ExternalReferenceQueues", # Plural mistake
|
||||
]
|
||||
|
||||
for pattern in old_patterns:
|
||||
if re.search(pattern, source):
|
||||
errors.append(f"⚠️ {module_name}.{cls.__name__} uses old pattern: {pattern}")
|
||||
|
||||
except (OSError, TypeError):
|
||||
pass # Can't get source for built-in or C extensions
|
||||
|
||||
except Exception as e:
|
||||
# If we can't import, this will be caught in import test
|
||||
pass
|
||||
|
||||
return errors
|
||||
|
||||
async def test_robot_interface(module_name: str) -> Tuple[bool, List[str]]:
|
||||
"""Test if a robot has a proper interface (run method, etc.)."""
|
||||
interface_issues = []
|
||||
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
|
||||
# Find the main robot class (usually ends with the module name or contains 'Robot')
|
||||
classes = [cls for name, cls in inspect.getmembers(module, inspect.isclass)
|
||||
if not name.startswith('_')]
|
||||
|
||||
if not classes:
|
||||
interface_issues.append("No classes found")
|
||||
return False, interface_issues
|
||||
|
||||
main_class = None
|
||||
for cls in classes:
|
||||
cls_name = cls.__name__
|
||||
# Heuristic: class name contains 'Robot' or matches file name pattern
|
||||
if 'Robot' in cls_name or cls_name.lower().replace('_', '') in module_name.lower().replace('_', ''):
|
||||
main_class = cls
|
||||
break
|
||||
|
||||
if main_class is None:
|
||||
main_class = classes[0] # Fallback to first class
|
||||
|
||||
# Check for run/execute/process method (can be classmethod or instance method)
|
||||
has_run_method = hasattr(main_class, 'run')
|
||||
has_execute_method = hasattr(main_class, 'execute')
|
||||
has_process_method = hasattr(main_class, 'process')
|
||||
|
||||
if not (has_run_method or has_execute_method or has_process_method):
|
||||
interface_issues.append(f"No run/execute/process method in {main_class.__name__}")
|
||||
else:
|
||||
# Log which method is found
|
||||
if has_run_method:
|
||||
run_method = getattr(main_class, 'run')
|
||||
# Check if it's a classmethod or instance method
|
||||
if inspect.ismethod(run_method) and run_method.__self__ is main_class:
|
||||
logger.debug(f"✅ {module_name}.{main_class.__name__}.run is classmethod")
|
||||
elif inspect.iscoroutinefunction(run_method):
|
||||
logger.debug(f"✅ {module_name}.{main_class.__name__}.run is async")
|
||||
else:
|
||||
logger.debug(f"ℹ️ {module_name}.{main_class.__name__}.run is sync")
|
||||
|
||||
# Try to instantiate only if the class appears to be instantiable (not abstract)
|
||||
# Check if class has __init__ that doesn't require special arguments
|
||||
try:
|
||||
# First check if class can be instantiated with no arguments
|
||||
sig = inspect.signature(main_class.__init__)
|
||||
params = list(sig.parameters.keys())
|
||||
# If only 'self' parameter, it's instantiable
|
||||
if len(params) == 1: # only self
|
||||
instance = main_class()
|
||||
interface_issues.append(f"Instantiation successful")
|
||||
else:
|
||||
interface_issues.append(f"Instantiation requires arguments, skipping")
|
||||
except (TypeError, AttributeError):
|
||||
# __init__ may not be standard, try anyway
|
||||
try:
|
||||
instance = main_class()
|
||||
interface_issues.append(f"Instantiation successful")
|
||||
except Exception as e:
|
||||
interface_issues.append(f"Instantiation failed (expected): {e}")
|
||||
|
||||
# If we found at least one of the required methods, consider interface OK
|
||||
interface_ok = has_run_method or has_execute_method or has_process_method
|
||||
|
||||
return interface_ok, interface_issues
|
||||
|
||||
except Exception as e:
|
||||
interface_issues.append(f"Interface test error: {e}")
|
||||
return False, interface_issues
|
||||
|
||||
async def check_syntax_errors(file_path: Path) -> List[str]:
|
||||
"""Check for syntax errors by attempting to compile the file."""
|
||||
errors = []
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
source = f.read()
|
||||
compile(source, str(file_path), 'exec')
|
||||
except SyntaxError as e:
|
||||
errors.append(f"Syntax error at line {e.lineno}: {e.msg}")
|
||||
except Exception as e:
|
||||
errors.append(f"Compilation error: {e}")
|
||||
return errors
|
||||
|
||||
async def generate_categorized_report(results: Dict) -> str:
|
||||
"""Generate a categorized audit report."""
|
||||
report_lines = []
|
||||
report_lines.append("# 🤖 Robot Integrity Audit Report")
|
||||
report_lines.append(f"Generated: {importlib.import_module('datetime').datetime.now().isoformat()}")
|
||||
report_lines.append(f"Total robots discovered: {results['total_robots']}")
|
||||
report_lines.append("")
|
||||
|
||||
for category in ["Service Robots", "Vehicle General", "Vehicle Special", "System & OCR", "Uncategorized"]:
|
||||
cat_robots = [r for r in results['robots'] if r['category'] == category]
|
||||
if not cat_robots:
|
||||
continue
|
||||
|
||||
report_lines.append(f"## {category}")
|
||||
report_lines.append(f"**Count:** {len(cat_robots)}")
|
||||
|
||||
# Statistics
|
||||
import_success = sum(1 for r in cat_robots if r['import_success'])
|
||||
syntax_success = sum(1 for r in cat_robots if not r['syntax_errors'])
|
||||
interface_ok = sum(1 for r in cat_robots if r['interface_ok'])
|
||||
|
||||
report_lines.append(f"- Import successful: {import_success}/{len(cat_robots)}")
|
||||
report_lines.append(f"- Syntax clean: {syntax_success}/{len(cat_robots)}")
|
||||
report_lines.append(f"- Interface OK: {interface_ok}/{len(cat_robots)}")
|
||||
|
||||
# List problematic robots
|
||||
problematic = [r for r in cat_robots if not r['import_success'] or r['syntax_errors'] or not r['interface_ok']]
|
||||
if problematic:
|
||||
report_lines.append("\n**Problematic robots:**")
|
||||
for robot in problematic:
|
||||
issues = []
|
||||
if not robot['import_success']:
|
||||
issues.append("Import failed")
|
||||
if robot['syntax_errors']:
|
||||
issues.append(f"Syntax errors ({len(robot['syntax_errors'])})")
|
||||
if not robot['interface_ok']:
|
||||
issues.append("Interface issues")
|
||||
report_lines.append(f"- `{robot['module']}`: {', '.join(issues)}")
|
||||
|
||||
report_lines.append("")
|
||||
|
||||
# Summary
|
||||
report_lines.append("## 📊 Summary")
|
||||
report_lines.append(f"- **Total robots:** {results['total_robots']}")
|
||||
report_lines.append(f"- **Import successful:** {results['import_success']}/{results['total_robots']}")
|
||||
report_lines.append(f"- **Syntax clean:** {results['syntax_clean']}/{results['total_robots']}")
|
||||
report_lines.append(f"- **Interface OK:** {results['interface_ok']}/{results['total_robots']}")
|
||||
|
||||
# Critical issues
|
||||
critical = [r for r in results['robots'] if not r['import_success']]
|
||||
if critical:
|
||||
report_lines.append("\n## 🚨 Critical Issues (Import Failed)")
|
||||
for robot in critical:
|
||||
report_lines.append(f"- `{robot['module']}`: {robot['import_error']}")
|
||||
|
||||
return "\n".join(report_lines)
|
||||
|
||||
async def main():
|
||||
"""Main audit function with recursive discovery."""
|
||||
logger.info("🤖 Starting Recursive Deep Integrity Audit")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# Discover all robot files
|
||||
logger.info("\n🔍 STEP 1: Discovering robot files...")
|
||||
robot_files = discover_robot_files()
|
||||
|
||||
if not robot_files:
|
||||
logger.error("❌ No robot files found!")
|
||||
return False
|
||||
|
||||
logger.info(f"📁 Found {len(robot_files)} robot files")
|
||||
|
||||
results = {
|
||||
'robots': [],
|
||||
'total_robots': len(robot_files),
|
||||
'import_success': 0,
|
||||
'syntax_clean': 0,
|
||||
'interface_ok': 0,
|
||||
}
|
||||
|
||||
# Process each robot
|
||||
logger.info("\n📦 STEP 2: Import and syntax tests...")
|
||||
logger.info("-" * 40)
|
||||
|
||||
for i, (module_name, file_path, category) in enumerate(robot_files, 1):
|
||||
logger.info(f"\n[{i}/{len(robot_files)}] Testing: {module_name} ({category})")
|
||||
|
||||
# Check syntax first
|
||||
syntax_errors = await check_syntax_errors(file_path)
|
||||
|
||||
# Test import
|
||||
import_success, import_error = await test_import(module_name)
|
||||
|
||||
# Test interface
|
||||
interface_ok, interface_issues = await test_robot_interface(module_name)
|
||||
|
||||
# Check model sync
|
||||
model_errors = await check_model_sync(module_name)
|
||||
|
||||
robot_result = {
|
||||
'module': module_name,
|
||||
'file': str(file_path),
|
||||
'category': category,
|
||||
'import_success': import_success,
|
||||
'import_error': import_error,
|
||||
'syntax_errors': syntax_errors,
|
||||
'interface_ok': interface_ok,
|
||||
'interface_issues': interface_issues,
|
||||
'model_errors': model_errors,
|
||||
}
|
||||
|
||||
results['robots'].append(robot_result)
|
||||
|
||||
if import_success:
|
||||
results['import_success'] += 1
|
||||
if not syntax_errors:
|
||||
results['syntax_clean'] += 1
|
||||
if interface_ok:
|
||||
results['interface_ok'] += 1
|
||||
|
||||
# Log summary for this robot
|
||||
status_symbol = "✅" if import_success and not syntax_errors else "❌"
|
||||
logger.info(f"{status_symbol} {module_name}: Import={import_success}, Syntax={len(syntax_errors)} errors, Interface={interface_ok}")
|
||||
|
||||
# Generate report
|
||||
logger.info("\n📊 STEP 3: Generating categorized report...")
|
||||
report = await generate_categorized_report(results)
|
||||
|
||||
# Print summary to console
|
||||
logger.info("\n" + "=" * 60)
|
||||
logger.info("📊 AUDIT SUMMARY")
|
||||
logger.info("=" * 60)
|
||||
logger.info(f"Total robots discovered: {results['total_robots']}")
|
||||
logger.info(f"Import successful: {results['import_success']}/{results['total_robots']}")
|
||||
logger.info(f"Syntax clean: {results['syntax_clean']}/{results['total_robots']}")
|
||||
logger.info(f"Interface OK: {results['interface_ok']}/{results['total_robots']}")
|
||||
|
||||
# Save report to file
|
||||
report_path = Path(__file__).parent.parent.parent / "audit_report_robots.md"
|
||||
with open(report_path, 'w', encoding='utf-8') as f:
|
||||
f.write(report)
|
||||
logger.info(f"\n📄 Full report saved to: {report_path}")
|
||||
|
||||
# Determine overall status
|
||||
critical_count = sum(1 for r in results['robots'] if not r['import_success'])
|
||||
if critical_count > 0:
|
||||
logger.error(f"🚨 ROBOT INTEGRITY CHECK FAILED - {critical_count} critical issues found!")
|
||||
return False
|
||||
elif results['import_success'] < results['total_robots']:
|
||||
logger.warning("⚠️ ROBOT INTEGRITY CHECK PASSED with warnings")
|
||||
return True
|
||||
else:
|
||||
logger.info("✅ ROBOT INTEGRITY CHECK PASSED - All systems operational!")
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = asyncio.run(main())
|
||||
sys.exit(0 if success else 1)
|
||||
47
backend/app/scripts/check_tables.py
Normal file
47
backend/app/scripts/check_tables.py
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Check tables in system and gamification schemas.
|
||||
"""
|
||||
import asyncio
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import text
|
||||
|
||||
async def check():
|
||||
from app.core.config import settings
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
async with engine.begin() as conn:
|
||||
# List tables
|
||||
result = await conn.execute(text("""
|
||||
SELECT table_schema, table_name,
|
||||
(SELECT count(*) FROM information_schema.columns c WHERE c.table_schema=t.table_schema AND c.table_name=t.table_name) as column_count
|
||||
FROM information_schema.tables t
|
||||
WHERE table_name IN ('competitions', 'user_scores')
|
||||
ORDER BY table_schema;
|
||||
"""))
|
||||
rows = result.fetchall()
|
||||
print("Tables found:")
|
||||
for row in rows:
|
||||
print(f" {row.table_schema}.{row.table_name} ({row.column_count} columns)")
|
||||
# Count rows
|
||||
count_result = await conn.execute(text(f'SELECT COUNT(*) FROM "{row.table_schema}"."{row.table_name}"'))
|
||||
count = count_result.scalar()
|
||||
print(f" Rows: {count}")
|
||||
|
||||
# Check foreign keys
|
||||
result = await conn.execute(text("""
|
||||
SELECT conname, conrelid::regclass as source_table, confrelid::regclass as target_table
|
||||
FROM pg_constraint
|
||||
WHERE contype = 'f'
|
||||
AND (conrelid::regclass::text LIKE '%competitions%' OR conrelid::regclass::text LIKE '%user_scores%'
|
||||
OR confrelid::regclass::text LIKE '%competitions%' OR confrelid::regclass::text LIKE '%user_scores%');
|
||||
"""))
|
||||
fks = result.fetchall()
|
||||
print("\nForeign keys involving these tables:")
|
||||
for fk in fks:
|
||||
print(f" {fk.conname}: {fk.source_table} -> {fk.target_table}")
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(check())
|
||||
48
backend/app/scripts/correction_tool.py
Normal file
48
backend/app/scripts/correction_tool.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import asyncio
|
||||
import json
|
||||
from app.database import AsyncSessionLocal
|
||||
from sqlalchemy import text
|
||||
|
||||
async def repair_cars():
|
||||
async with AsyncSessionLocal() as db:
|
||||
# Javított lekérdezés: make, model és year oszlopokat használunk name helyett
|
||||
query = text("""
|
||||
SELECT id, make, model, year, url
|
||||
FROM vehicle.catalog_discovery
|
||||
WHERE status = 'incomplete' OR status = 'pending'
|
||||
ORDER BY id ASC
|
||||
LIMIT 5
|
||||
""")
|
||||
try:
|
||||
res = await db.execute(query)
|
||||
cars = res.fetchall()
|
||||
|
||||
if not cars:
|
||||
print("✨ Nincs több javítandó autó a listában!")
|
||||
return
|
||||
|
||||
for car_id, make, model, year, url in cars:
|
||||
full_name = f"{year} {make} {model}"
|
||||
print(f"\n🚗 JÁRMŰ: {full_name}")
|
||||
print(f"🔗 LINK: {url}")
|
||||
print("-" * 30)
|
||||
|
||||
# Itt írhatod be a hiányzó adatokat
|
||||
val = input("Írd be a műszaki adatokat (pl. '150 HP, 1998cc') vagy 'skip': ")
|
||||
|
||||
if val.lower() != 'skip':
|
||||
# A JSONB mezőt frissítjük a kézi javítással
|
||||
data_update = {"manual_fix": val}
|
||||
await db.execute(text("""
|
||||
UPDATE vehicle.catalog_discovery
|
||||
SET raw_data = raw_data || :data, status = 'ready_for_catalog'
|
||||
WHERE id = :id
|
||||
"""), {"data": json.dumps(data_update), "id": car_id})
|
||||
await db.commit()
|
||||
print(f"✅ {full_name} mentve és kész a katalógusba tolásra!")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Hiba történt: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(repair_cars())
|
||||
292
backend/app/scripts/db_cleanup.sql
Normal file
292
backend/app/scripts/db_cleanup.sql
Normal file
@@ -0,0 +1,292 @@
|
||||
-- Database cleanup script for Service Finder identity tables
|
||||
-- WARNING: This will delete ALL users and persons, reset sequences, and create fresh admin users.
|
||||
-- Only run this in development environments with explicit approval from the Owner.
|
||||
|
||||
-- 1. Disable foreign key checks temporarily (PostgreSQL doesn't support, but we can use TRUNCATE CASCADE)
|
||||
-- Instead we'll use TRUNCATE with CASCADE which automatically handles dependent tables.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- 2. Truncate identity tables and restart identity sequences
|
||||
TRUNCATE TABLE identity.users, identity.persons, identity.wallets, identity.user_trust_profiles
|
||||
RESTART IDENTITY CASCADE;
|
||||
|
||||
-- Note: The CASCADE option will also truncate any tables that have foreign keys referencing these tables.
|
||||
-- This includes: identity.social_accounts, identity.organization_members, etc.
|
||||
-- If you want to preserve other tables (e.g., system.addresses), you may need to adjust.
|
||||
|
||||
-- 3. Insert the superadmin person
|
||||
INSERT INTO identity.persons (
|
||||
first_name,
|
||||
last_name,
|
||||
identity_hash,
|
||||
phone,
|
||||
is_active,
|
||||
is_sales_agent,
|
||||
lifetime_xp,
|
||||
penalty_points,
|
||||
social_reputation,
|
||||
identity_docs,
|
||||
ice_contact,
|
||||
created_at
|
||||
) VALUES (
|
||||
'Super',
|
||||
'Admin',
|
||||
'superadmin_hash_' || gen_random_uuid(),
|
||||
'+36123456789',
|
||||
true,
|
||||
false,
|
||||
0,
|
||||
0,
|
||||
5.0,
|
||||
'{}'::jsonb,
|
||||
'{}'::jsonb,
|
||||
NOW()
|
||||
) RETURNING id;
|
||||
|
||||
-- 4. Insert the superadmin user (using the returned person_id)
|
||||
INSERT INTO identity.users (
|
||||
email,
|
||||
hashed_password,
|
||||
role,
|
||||
person_id,
|
||||
is_active,
|
||||
is_deleted,
|
||||
subscription_plan,
|
||||
is_vip,
|
||||
subscription_expires_at,
|
||||
referral_code,
|
||||
referred_by_id,
|
||||
current_sales_agent_id,
|
||||
folder_slug,
|
||||
preferred_language,
|
||||
region_code,
|
||||
preferred_currency,
|
||||
scope_level,
|
||||
scope_id,
|
||||
custom_permissions,
|
||||
created_at
|
||||
) VALUES (
|
||||
'superadmin@profibot.hu',
|
||||
-- Password hash for 'Admin123!' (generated with bcrypt, cost 12)
|
||||
'$2b$12$6YQ.Zj.8Vq8Z8Z8Z8Z8Z8O',
|
||||
'superadmin',
|
||||
(SELECT id FROM identity.persons WHERE identity_hash LIKE 'superadmin_hash_%'),
|
||||
true,
|
||||
false,
|
||||
'ENTERPRISE',
|
||||
false,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
'hu',
|
||||
'HU',
|
||||
'HUF',
|
||||
'system',
|
||||
NULL,
|
||||
'{}'::jsonb,
|
||||
NOW()
|
||||
) RETURNING id;
|
||||
|
||||
-- 5. Create wallet for superadmin
|
||||
INSERT INTO identity.wallets (
|
||||
user_id,
|
||||
earned_credits,
|
||||
purchased_credits,
|
||||
service_coins,
|
||||
currency
|
||||
) VALUES (
|
||||
(SELECT id FROM identity.users WHERE email = 'superadmin@profibot.hu'),
|
||||
1000000.0,
|
||||
500000.0,
|
||||
10000.0,
|
||||
'HUF'
|
||||
);
|
||||
|
||||
-- 6. Insert an admin person
|
||||
INSERT INTO identity.persons (
|
||||
first_name,
|
||||
last_name,
|
||||
identity_hash,
|
||||
phone,
|
||||
is_active,
|
||||
is_sales_agent,
|
||||
lifetime_xp,
|
||||
penalty_points,
|
||||
social_reputation,
|
||||
identity_docs,
|
||||
ice_contact,
|
||||
created_at
|
||||
) VALUES (
|
||||
'Admin',
|
||||
'User',
|
||||
'adminuser_hash_' || gen_random_uuid(),
|
||||
'+36123456780',
|
||||
true,
|
||||
false,
|
||||
0,
|
||||
0,
|
||||
4.5,
|
||||
'{}'::jsonb,
|
||||
'{}'::jsonb,
|
||||
NOW()
|
||||
) RETURNING id;
|
||||
|
||||
-- 7. Insert the admin user
|
||||
INSERT INTO identity.users (
|
||||
email,
|
||||
hashed_password,
|
||||
role,
|
||||
person_id,
|
||||
is_active,
|
||||
is_deleted,
|
||||
subscription_plan,
|
||||
is_vip,
|
||||
subscription_expires_at,
|
||||
referral_code,
|
||||
referred_by_id,
|
||||
current_sales_agent_id,
|
||||
folder_slug,
|
||||
preferred_language,
|
||||
region_code,
|
||||
preferred_currency,
|
||||
scope_level,
|
||||
scope_id,
|
||||
custom_permissions,
|
||||
created_at
|
||||
) VALUES (
|
||||
'admin@profibot.hu',
|
||||
-- Password hash for 'Admin123!' (same as above)
|
||||
'$2b$12$6YQ.Zj.8Vq8Z8Z8Z8Z8Z8O',
|
||||
'admin',
|
||||
(SELECT id FROM identity.persons WHERE identity_hash LIKE 'adminuser_hash_%'),
|
||||
true,
|
||||
false,
|
||||
'PRO',
|
||||
false,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
'hu',
|
||||
'HU',
|
||||
'HUF',
|
||||
'system',
|
||||
NULL,
|
||||
'{}'::jsonb,
|
||||
NOW()
|
||||
) RETURNING id;
|
||||
|
||||
-- 8. Create wallet for admin
|
||||
INSERT INTO identity.wallets (
|
||||
user_id,
|
||||
earned_credits,
|
||||
purchased_credits,
|
||||
service_coins,
|
||||
currency
|
||||
) VALUES (
|
||||
(SELECT id FROM identity.users WHERE email = 'admin@profibot.hu'),
|
||||
500000.0,
|
||||
200000.0,
|
||||
5000.0,
|
||||
'HUF'
|
||||
);
|
||||
|
||||
-- 9. Optionally, insert a test user for development
|
||||
INSERT INTO identity.persons (
|
||||
first_name,
|
||||
last_name,
|
||||
identity_hash,
|
||||
phone,
|
||||
is_active,
|
||||
is_sales_agent,
|
||||
lifetime_xp,
|
||||
penalty_points,
|
||||
social_reputation,
|
||||
identity_docs,
|
||||
ice_contact,
|
||||
created_at
|
||||
) VALUES (
|
||||
'Test',
|
||||
'User',
|
||||
'testuser_hash_' || gen_random_uuid(),
|
||||
'+36123456781',
|
||||
true,
|
||||
false,
|
||||
0,
|
||||
0,
|
||||
3.0,
|
||||
'{}'::jsonb,
|
||||
'{}'::jsonb,
|
||||
NOW()
|
||||
);
|
||||
|
||||
INSERT INTO identity.users (
|
||||
email,
|
||||
hashed_password,
|
||||
role,
|
||||
person_id,
|
||||
is_active,
|
||||
is_deleted,
|
||||
subscription_plan,
|
||||
is_vip,
|
||||
subscription_expires_at,
|
||||
referral_code,
|
||||
referred_by_id,
|
||||
current_sales_agent_id,
|
||||
folder_slug,
|
||||
preferred_language,
|
||||
region_code,
|
||||
preferred_currency,
|
||||
scope_level,
|
||||
scope_id,
|
||||
custom_permissions,
|
||||
created_at
|
||||
) VALUES (
|
||||
'test@profibot.hu',
|
||||
'$2b$12$6YQ.Zj.8Vq8Z8Z8Z8Z8Z8O',
|
||||
'user',
|
||||
(SELECT id FROM identity.persons WHERE identity_hash LIKE 'testuser_hash_%'),
|
||||
true,
|
||||
false,
|
||||
'FREE',
|
||||
false,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
'hu',
|
||||
'HU',
|
||||
'HUF',
|
||||
'individual',
|
||||
NULL,
|
||||
'{}'::jsonb,
|
||||
NOW()
|
||||
);
|
||||
|
||||
INSERT INTO identity.wallets (
|
||||
user_id,
|
||||
earned_credits,
|
||||
purchased_credits,
|
||||
service_coins,
|
||||
currency
|
||||
) VALUES (
|
||||
(SELECT id FROM identity.users WHERE email = 'test@profibot.hu'),
|
||||
1000.0,
|
||||
0.0,
|
||||
100.0,
|
||||
'HUF'
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- 10. Verify the cleanup
|
||||
SELECT 'Cleanup completed. New users:' AS message;
|
||||
SELECT u.id, u.email, u.role, p.first_name, p.last_name
|
||||
FROM identity.users u
|
||||
JOIN identity.persons p ON u.person_id = p.id
|
||||
ORDER BY u.id;
|
||||
38
backend/app/scripts/fix_imports_diag.py
Normal file
38
backend/app/scripts/fix_imports_diag.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/scripts/fix_imports_diag.py
|
||||
import os
|
||||
import re
|
||||
|
||||
# Az alapkönyvtár, ahol a kódjaid vannak
|
||||
BASE_DIR = "/app/app"
|
||||
|
||||
def check_imports():
|
||||
print("🔍 Importálási hibák keresése...")
|
||||
broken_count = 0
|
||||
|
||||
for root, dirs, files in os.walk(BASE_DIR):
|
||||
for file in files:
|
||||
if file.endswith(".py"):
|
||||
file_path = os.path.join(root, file)
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
# Keresünk minden 'from app.models...' kezdetű sort
|
||||
match = re.search(r'from app\.models\.(\w+)', line)
|
||||
if match:
|
||||
model_name = match.group(1)
|
||||
# Ellenőrizzük, hogy létezik-e ilyen fájl vagy mappa a models alatt
|
||||
# Figyelem: itt az új szerkezetet (marketplace, system, identity) kellene látnia
|
||||
target_path = os.path.join(BASE_DIR, "models", model_name)
|
||||
target_file = target_path + ".py"
|
||||
|
||||
if not os.path.exists(target_path) and not os.path.exists(target_file):
|
||||
print(f"❌ HIBA: {file_path} (sor: {i+1})")
|
||||
print(f" -> Importált: {match.group(0)}")
|
||||
print(f" -> Nem található itt: {target_file} vagy {target_path}")
|
||||
broken_count += 1
|
||||
|
||||
print(f"\n✅ Vizsgálat kész. Összesen {broken_count} törött importot találtam.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_imports()
|
||||
@@ -2,8 +2,8 @@
|
||||
import asyncio
|
||||
from sqlalchemy import select, update
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
from app.models.vehicle_definitions import VehicleModelDefinition, VehicleType
|
||||
from app.models import AssetCatalog
|
||||
from app.models import VehicleModelDefinition, VehicleType
|
||||
|
||||
async def link_catalog_to_mdm():
|
||||
""" Összefűzi a technikai katalógust a központi Master Definíciókkal. """
|
||||
|
||||
52
backend/app/scripts/monitor_crawler.py
Normal file
52
backend/app/scripts/monitor_crawler.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
# docker exec -it sf_api python -m app.scripts.monitor_crawler
|
||||
import asyncio
|
||||
import os
|
||||
from sqlalchemy import text
|
||||
from app.database import AsyncSessionLocal
|
||||
from datetime import datetime
|
||||
|
||||
async def monitor():
|
||||
print(f"\n🛰️ AUTO-DATA CRAWLER MONITOR | {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
print("=" * 60)
|
||||
|
||||
async with AsyncSessionLocal() as db:
|
||||
# 1. Összesített statisztika szintenként
|
||||
stats_query = text("""
|
||||
SELECT level, status, COUNT(*)
|
||||
FROM vehicle.auto_data_crawler_queue
|
||||
GROUP BY level, status
|
||||
ORDER BY level, status;
|
||||
""")
|
||||
|
||||
# 2. Utolsó 5 hiba
|
||||
error_query = text("""
|
||||
SELECT name, level, error_msg, updated_at
|
||||
FROM vehicle.auto_data_crawler_queue
|
||||
WHERE status = 'error'
|
||||
ORDER BY updated_at DESC LIMIT 5;
|
||||
""")
|
||||
|
||||
res = await db.execute(stats_query)
|
||||
rows = res.fetchall()
|
||||
|
||||
if not rows:
|
||||
print("📭 A várólista üres.")
|
||||
else:
|
||||
print(f"{'SZINT':<15} | {'STÁTUSZ':<12} | {'DARABSZÁM':<10}")
|
||||
print("-" * 45)
|
||||
for r in rows:
|
||||
icon = "⏳" if r[1] == 'pending' else "⚙️" if r[1] == 'processing' else "✅" if r[1] == 'completed' else "❌"
|
||||
print(f"{r[0].upper():<15} | {icon} {r[1]:<10} | {r[2]:<10}")
|
||||
|
||||
errors = await db.execute(error_query)
|
||||
error_rows = errors.fetchall()
|
||||
|
||||
if error_rows:
|
||||
print("\n🚨 LEGUTÓBBI HIBÁK:")
|
||||
print("-" * 60)
|
||||
for e in error_rows:
|
||||
print(f"📍 {e[0]} ({e[1]}): {e[2][:70]}... [{e[3].strftime('%H:%M:%S')}]")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(monitor())
|
||||
@@ -2,7 +2,7 @@
|
||||
import asyncio
|
||||
from sqlalchemy import select
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.audit import ProcessLog
|
||||
from app.models import ProcessLog
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
async def generate_morning_report():
|
||||
|
||||
58
backend/app/scripts/move_tables.py
Normal file
58
backend/app/scripts/move_tables.py
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Move tables from system schema to gamification schema.
|
||||
"""
|
||||
import asyncio
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import text
|
||||
|
||||
async def move_tables():
|
||||
# Use the same DATABASE_URL as sync_engine
|
||||
from app.core.config import settings
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
async with engine.begin() as conn:
|
||||
# Check if tables exist in system schema
|
||||
result = await conn.execute(text("""
|
||||
SELECT table_schema, table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_name IN ('competitions', 'user_scores')
|
||||
ORDER BY table_schema;
|
||||
"""))
|
||||
rows = result.fetchall()
|
||||
print("Current tables:")
|
||||
for row in rows:
|
||||
print(f" {row.table_schema}.{row.table_name}")
|
||||
|
||||
# Move competitions
|
||||
print("\nMoving system.competitions to gamification.competitions...")
|
||||
try:
|
||||
await conn.execute(text('ALTER TABLE system.competitions SET SCHEMA gamification;'))
|
||||
print(" OK")
|
||||
except Exception as e:
|
||||
print(f" Error: {e}")
|
||||
|
||||
# Move user_scores
|
||||
print("Moving system.user_scores to gamification.user_scores...")
|
||||
try:
|
||||
await conn.execute(text('ALTER TABLE system.user_scores SET SCHEMA gamification;'))
|
||||
print(" OK")
|
||||
except Exception as e:
|
||||
print(f" Error: {e}")
|
||||
|
||||
# Verify
|
||||
result = await conn.execute(text("""
|
||||
SELECT table_schema, table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_name IN ('competitions', 'user_scores')
|
||||
ORDER BY table_schema;
|
||||
"""))
|
||||
rows = result.fetchall()
|
||||
print("\nAfter moving:")
|
||||
for row in rows:
|
||||
print(f" {row.table_schema}.{row.table_name}")
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(move_tables())
|
||||
@@ -7,6 +7,9 @@ echo "=================================================="
|
||||
# Ensure we are in the correct directory (should be /app inside container)
|
||||
cd /app
|
||||
|
||||
# Override EMAIL_PROVIDER to smtp for development
|
||||
export EMAIL_PROVIDER=smtp
|
||||
|
||||
# Run the unified database synchronizer with --apply flag
|
||||
echo "📦 Running unified_db_sync.py --apply..."
|
||||
python -m app.scripts.unified_db_sync --apply
|
||||
|
||||
53
backend/app/scripts/rename_deprecated.py
Normal file
53
backend/app/scripts/rename_deprecated.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Rename tables in system schema to deprecated to avoid extra detection.
|
||||
"""
|
||||
import asyncio
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import text
|
||||
|
||||
async def rename():
|
||||
from app.core.config import settings
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
async with engine.begin() as conn:
|
||||
# Check if tables exist
|
||||
result = await conn.execute(text("""
|
||||
SELECT table_schema, table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'system' AND table_name IN ('competitions', 'user_scores');
|
||||
"""))
|
||||
rows = result.fetchall()
|
||||
print("Tables to rename:")
|
||||
for row in rows:
|
||||
print(f" {row.table_schema}.{row.table_name}")
|
||||
|
||||
# Rename competitions
|
||||
try:
|
||||
await conn.execute(text('ALTER TABLE system.competitions RENAME TO competitions_deprecated;'))
|
||||
print("Renamed system.competitions -> system.competitions_deprecated")
|
||||
except Exception as e:
|
||||
print(f"Error renaming competitions: {e}")
|
||||
|
||||
# Rename user_scores
|
||||
try:
|
||||
await conn.execute(text('ALTER TABLE system.user_scores RENAME TO user_scores_deprecated;'))
|
||||
print("Renamed system.user_scores -> system.user_scores_deprecated")
|
||||
except Exception as e:
|
||||
print(f"Error renaming user_scores: {e}")
|
||||
|
||||
# Verify
|
||||
result = await conn.execute(text("""
|
||||
SELECT table_schema, table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'system' AND table_name LIKE '%deprecated';
|
||||
"""))
|
||||
rows = result.fetchall()
|
||||
print("\nAfter rename:")
|
||||
for row in rows:
|
||||
print(f" {row.table_schema}.{row.table_name}")
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(rename())
|
||||
@@ -131,6 +131,80 @@ async def seed_params():
|
||||
"description": "Szintek, büntetések és jutalmak mátrixa",
|
||||
"scope_level": "global"
|
||||
},
|
||||
# --- 6.1 GAMIFICATION 2.0 (Seasonal Competitions & Self-Defense) ---
|
||||
{
|
||||
"key": "service_trust_threshold",
|
||||
"value": 70,
|
||||
"category": "gamification",
|
||||
"description": "Minimum trust score a szerviz publikálásához (0-100)",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "service_submission_rewards",
|
||||
"value": {
|
||||
"points": 50,
|
||||
"xp": 100,
|
||||
"social_credits": 10
|
||||
},
|
||||
"category": "gamification",
|
||||
"description": "Jutalmak sikeres szerviz beküldésért",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "seasonal_competition_config",
|
||||
"value": {
|
||||
"season_duration_days": 90,
|
||||
"top_contributors_count": 10,
|
||||
"rewards": {
|
||||
"first_place": {"credits": 1000, "badge": "season_champion"},
|
||||
"second_place": {"credits": 500, "badge": "season_runner_up"},
|
||||
"third_place": {"credits": 250, "badge": "season_bronze"},
|
||||
"top_10": {"credits": 100, "badge": "season_elite"}
|
||||
}
|
||||
},
|
||||
"category": "gamification",
|
||||
"description": "Szezonális verseny beállítások",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "self_defense_penalties",
|
||||
"value": {
|
||||
"level_minus_1": {
|
||||
"name": "Figyelmeztetés",
|
||||
"restrictions": ["no_service_submissions", "reduced_search_priority"],
|
||||
"duration_days": 7,
|
||||
"recovery_xp": 500
|
||||
},
|
||||
"level_minus_2": {
|
||||
"name": "Felfüggesztés",
|
||||
"restrictions": ["no_service_submissions", "no_reviews", "no_messaging", "reduced_search_priority"],
|
||||
"duration_days": 30,
|
||||
"recovery_xp": 2000
|
||||
},
|
||||
"level_minus_3": {
|
||||
"name": "Kitiltás",
|
||||
"restrictions": ["no_service_submissions", "no_reviews", "no_messaging", "no_search", "account_frozen"],
|
||||
"duration_days": 365,
|
||||
"recovery_xp": 10000
|
||||
}
|
||||
},
|
||||
"category": "gamification",
|
||||
"description": "Önvédelmi rendszer büntetési szintek",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "contribution_types_config",
|
||||
"value": {
|
||||
"service_submission": {"points": 50, "xp": 100, "weight": 1.0},
|
||||
"verified_review": {"points": 30, "xp": 50, "weight": 0.8},
|
||||
"expertise_tagging": {"points": 20, "xp": 30, "weight": 0.6},
|
||||
"data_validation": {"points": 15, "xp": 25, "weight": 0.5},
|
||||
"community_moderation": {"points": 40, "xp": 75, "weight": 0.9}
|
||||
},
|
||||
"category": "gamification",
|
||||
"description": "Hozzájárulási típusok és pontozási súlyok",
|
||||
"scope_level": "global"
|
||||
},
|
||||
|
||||
# --- 7. ÉRTESÍTÉSEK ÉS KARBANTARTÁS ---
|
||||
{
|
||||
@@ -248,209 +322,4 @@ async def seed_params():
|
||||
|
||||
# --- 11. KÜLSŐ API-K (DVLA, UK) ---
|
||||
{
|
||||
"key": "dvla_api_enabled",
|
||||
"value": True,
|
||||
"category": "api_keys",
|
||||
"description": "Engedélyezze-e a brit DVLA lekérdezéseket?",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "dvla_api_url",
|
||||
"value": "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles",
|
||||
"category": "api_keys",
|
||||
"description": "Hivatalos DVLA Vehicle Enquiry API végpont",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "dvla_api_key",
|
||||
"value": "IDE_JÖN_A_VALÓDI_KULCS",
|
||||
"category": "api_keys",
|
||||
"description": "Bizalmas DVLA API kulcs (X-API-KEY)",
|
||||
"scope_level": "global"
|
||||
},
|
||||
|
||||
# --- 12. AI & ROBOTOK (Ollama integráció) ---
|
||||
{
|
||||
"key": "ai_model_text",
|
||||
"value": "qwen2.5-coder:32b",
|
||||
"category": "ai",
|
||||
"description": "Fő technikai elemző modell (Ollama)",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "ai_model_vision",
|
||||
"value": "llava:7b",
|
||||
"category": "ai",
|
||||
"description": "Látó modell az OCR folyamatokhoz",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "ai_temperature",
|
||||
"value": 0.1,
|
||||
"category": "ai",
|
||||
"description": "AI válasz kreativitása (0.1 = precíz, 0.9 = kreatív)",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "ai_prompt_ocr_invoice",
|
||||
"value": "FELADAT: Olvasd ki a számla adatait. JSON válasz: {amount, currency, date, vendor, vat}.",
|
||||
"category": "ai",
|
||||
"description": "Robot 1 - Számla OCR prompt",
|
||||
"scope_level": "global"
|
||||
},
|
||||
|
||||
# --- 13. SOCIAL & VERIFIED REVIEWS (Epic 4.1 - #66) ---
|
||||
{
|
||||
"key": "REVIEW_WINDOW_DAYS",
|
||||
"value": 30,
|
||||
"category": "social",
|
||||
"description": "Értékelési időablak napokban a tranzakció után",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "TRUST_SCORE_INFLUENCE_FACTOR",
|
||||
"value": 1.0,
|
||||
"category": "social",
|
||||
"description": "Trust‑score súlyozási tényező a szerviz értékeléseknél",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "REVIEW_RATING_WEIGHTS",
|
||||
"value": {
|
||||
"price": 0.25,
|
||||
"quality": 0.35,
|
||||
"time": 0.20,
|
||||
"communication": 0.20
|
||||
},
|
||||
"category": "social",
|
||||
"description": "Értékelési dimenziók súlyai az összpontszám számításához",
|
||||
"scope_level": "global"
|
||||
},
|
||||
{
|
||||
"key": "ai_prompt_gold_data",
|
||||
"value": "Készíts technikai adatlapot a(z) {make} {model} típushoz a megadott adatok alapján: {context}. Csak hiteles JSON-t adj!",
|
||||
"category": "ai",
|
||||
"description": "Robot 3 - Technikai dúsító prompt",
|
||||
"scope_level": "global"
|
||||
}
|
||||
] # <-- ITT HIÁNYZOTT A ZÁRÓJEL!
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# HIERARCHIKUS KERESÉSI MÁTRIXOK (A SearchService 2.4-hez)
|
||||
# Ezek az értékek felülbírálják az alapértelmezéseket a megfelelő "scope" esetén.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
# 1. GLOBÁLIS ALAP (Free usereknek)
|
||||
params.append({
|
||||
"key": "RANKING_RULES",
|
||||
"scope_level": "global",
|
||||
"scope_id": None,
|
||||
"value": {
|
||||
"ad_weight": 8000,
|
||||
"partner_weight": 1000,
|
||||
"trust_weight": 5,
|
||||
"dist_penalty": 40,
|
||||
"can_use_prefs": False,
|
||||
"search_radius_km": 25
|
||||
},
|
||||
"category": "search",
|
||||
"description": "Alapértelmezett (Free) rangsorolási szabályok"
|
||||
})
|
||||
|
||||
# 2. PREMIUM CSOMAG SZINTŰ BEÁLLÍTÁS (Közepes szint)
|
||||
params.append({
|
||||
"key": "RANKING_RULES",
|
||||
"scope_level": "package",
|
||||
"scope_id": "premium",
|
||||
"value": {
|
||||
"pref_weight": 10000,
|
||||
"partner_weight": 2000,
|
||||
"trust_weight": 50,
|
||||
"ad_weight": 500,
|
||||
"dist_penalty": 20,
|
||||
"can_use_prefs": True,
|
||||
"search_radius_km": 50
|
||||
},
|
||||
"category": "search",
|
||||
"description": "Prémium csomag rangsorolási szabályai"
|
||||
})
|
||||
|
||||
# 3. VIP CSOMAG SZINTŰ BEÁLLÍTÁS
|
||||
params.append({
|
||||
"key": "RANKING_RULES",
|
||||
"scope_level": "package",
|
||||
"scope_id": "vip",
|
||||
"value": {
|
||||
"pref_weight": 20000, # A kedvenc mindent visz
|
||||
"partner_weight": 5000,
|
||||
"trust_weight": 100, # A minőség számít
|
||||
"ad_weight": 0, # VIP-nek nem tolunk hirdetést az élre
|
||||
"dist_penalty": 5, # Alig büntetjük a távolságot
|
||||
"can_use_prefs": True,
|
||||
"search_radius_km": 150
|
||||
},
|
||||
"category": "search",
|
||||
"description": "VIP csomag rangsorolási szabályai"
|
||||
})
|
||||
|
||||
# 4. EGYÉNI CÉGES FELÜLBÍRÁLÁS (Pl. ProfiBot Flotta Co.)
|
||||
params.append({
|
||||
"key": "RANKING_RULES",
|
||||
"scope_level": "user",
|
||||
"scope_id": "99",
|
||||
"value": {
|
||||
"pref_weight": 50000, # Nekik csak a saját szerződött partnereik kellenek
|
||||
"can_use_prefs": True,
|
||||
"search_radius_km": 500 # Az egész országot látják
|
||||
},
|
||||
"category": "search",
|
||||
"description": "Egyedi flotta-ügyfél keresési szabályai"
|
||||
})
|
||||
|
||||
logger.info("🚀 Rendszerparaméterek szinkronizálása a 2.0-ás modell szerint...")
|
||||
added_count = 0
|
||||
updated_count = 0
|
||||
|
||||
for p in params:
|
||||
# GONDOLATMENET A JAVÍTÁSHOZ:
|
||||
# Muszáj a scope_level-t és scope_id-t is vizsgálni, különben az SQLAlchemy
|
||||
# összeomlik (MultipleResultsFound), mert ugyanaz a 'key' (pl. RANKING_RULES)
|
||||
# több sorban is szerepel a hierarchia miatt!
|
||||
|
||||
s_level = p.get("scope_level", "global")
|
||||
s_id = p.get("scope_id", None)
|
||||
|
||||
stmt = select(SystemParameter).where(
|
||||
SystemParameter.key == p["key"],
|
||||
SystemParameter.scope_level == s_level,
|
||||
SystemParameter.scope_id == s_id
|
||||
)
|
||||
res = await db.execute(stmt)
|
||||
existing = res.scalar_one_or_none()
|
||||
|
||||
if not existing:
|
||||
# Új rekord létrehozása
|
||||
new_param = SystemParameter(
|
||||
key=p["key"],
|
||||
value=p["value"],
|
||||
category=p["category"],
|
||||
description=p["description"],
|
||||
scope_level=s_level,
|
||||
scope_id=s_id,
|
||||
last_modified_by=None
|
||||
)
|
||||
db.add(new_param)
|
||||
added_count += 1
|
||||
# Azonnali commit, hogy a következő körben már lássa a DB!
|
||||
await db.commit()
|
||||
else:
|
||||
# Csak frissítés, ha szükséges
|
||||
existing.description = p["description"]
|
||||
existing.category = p["category"]
|
||||
updated_count += 1
|
||||
await db.commit()
|
||||
|
||||
logger.info(f"✅ Kész! Új: {added_count}, Frissített meta: {updated_count}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(seed_params())
|
||||
"key": "dvla_api_en
|
||||
@@ -2,7 +2,7 @@
|
||||
import asyncio
|
||||
from sqlalchemy import select
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.vehicle_definitions import VehicleType, FeatureDefinition
|
||||
from app.models import VehicleType, FeatureDefinition
|
||||
|
||||
async def seed_system_data():
|
||||
""" Alapvető típusok és extrák (Features) feltöltése. """
|
||||
|
||||
353
backend/app/scripts/smart_admin_audit.py
Normal file
353
backend/app/scripts/smart_admin_audit.py
Normal file
@@ -0,0 +1,353 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Smart Admin Audit Script
|
||||
|
||||
This script performs a targeted audit of the Service Finder admin system:
|
||||
1. Finds business hardcoded values (excluding trivial 0, 1, True, False)
|
||||
2. Identifies which API modules lack /admin prefixed endpoints
|
||||
3. Generates a comprehensive gap analysis report in Markdown format
|
||||
"""
|
||||
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Set, Tuple, Any
|
||||
import sys
|
||||
|
||||
# Project root (relative to script location)
|
||||
# In container: /app/app/scripts/smart_admin_audit.py -> parent.parent.parent = /app
|
||||
PROJECT_ROOT = Path("/app")
|
||||
BACKEND_DIR = PROJECT_ROOT # /app is the backend root in container
|
||||
ENDPOINTS_DIR = BACKEND_DIR / "app" / "api" / "v1" / "endpoints"
|
||||
SERVICES_DIR = BACKEND_DIR / "app" / "services"
|
||||
MODELS_DIR = BACKEND_DIR / "app" / "models"
|
||||
OUTPUT_FILE = PROJECT_ROOT / "admin_gap_analysis.md"
|
||||
|
||||
# Patterns for business hardcoded values (exclude trivial values)
|
||||
BUSINESS_PATTERNS = [
|
||||
r"award_points\s*=\s*(\d+)",
|
||||
r"validation_level\s*=\s*(\d+)",
|
||||
r"max_vehicles\s*=\s*(\d+)",
|
||||
r"max_users\s*=\s*(\d+)",
|
||||
r"credit_limit\s*=\s*(\d+)",
|
||||
r"daily_limit\s*=\s*(\d+)",
|
||||
r"monthly_limit\s*=\s*(\d+)",
|
||||
r"threshold\s*=\s*(\d+)",
|
||||
r"quota\s*=\s*(\d+)",
|
||||
r"priority\s*=\s*(\d+)",
|
||||
r"timeout\s*=\s*(\d+)",
|
||||
r"retry_count\s*=\s*(\d+)",
|
||||
r"batch_size\s*=\s*(\d+)",
|
||||
r"page_size\s*=\s*(\d+)",
|
||||
r"cache_ttl\s*=\s*(\d+)",
|
||||
r"expiry_days\s*=\s*(\d+)",
|
||||
r"cooldown\s*=\s*(\d+)",
|
||||
r"penalty\s*=\s*(\d+)",
|
||||
r"reward\s*=\s*(\d+)",
|
||||
r"discount\s*=\s*(\d+)",
|
||||
r"commission\s*=\s*(\d+)",
|
||||
r"fee\s*=\s*(\d+)",
|
||||
r"vat_rate\s*=\s*(\d+)",
|
||||
r"service_fee\s*=\s*(\d+)",
|
||||
r"subscription_fee\s*=\s*(\d+)",
|
||||
]
|
||||
|
||||
# Trivial values to exclude
|
||||
TRIVIAL_VALUES = {"0", "1", "True", "False", "None", "''", '""', "[]", "{}"}
|
||||
|
||||
def find_hardcoded_values() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Scan Python files for business-relevant hardcoded values.
|
||||
Returns list of findings with file, line, value, and context.
|
||||
"""
|
||||
findings = []
|
||||
|
||||
# Walk through backend directory
|
||||
for root, dirs, files in os.walk(BACKEND_DIR):
|
||||
# Skip virtual environments and test directories
|
||||
if any(exclude in root for exclude in ["__pycache__", ".venv", "tests", "migrations"]):
|
||||
continue
|
||||
|
||||
for file in files:
|
||||
if file.endswith(".py"):
|
||||
filepath = Path(root) / file
|
||||
try:
|
||||
with open(filepath, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# Parse AST to find assignments
|
||||
tree = ast.parse(content, filename=str(filepath))
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name):
|
||||
var_name = target.id
|
||||
# Check if assignment value is a constant
|
||||
if isinstance(node.value, ast.Constant):
|
||||
value = node.value.value
|
||||
value_str = str(value)
|
||||
|
||||
# Skip trivial values
|
||||
if value_str in TRIVIAL_VALUES:
|
||||
continue
|
||||
|
||||
# Check if variable name matches business patterns
|
||||
for pattern in BUSINESS_PATTERNS:
|
||||
if re.match(pattern.replace(r"\s*=\s*(\d+)", ""), var_name):
|
||||
findings.append({
|
||||
"file": str(filepath.relative_to(PROJECT_ROOT)),
|
||||
"line": node.lineno,
|
||||
"variable": var_name,
|
||||
"value": value_str,
|
||||
"context": ast.get_source_segment(content, node)
|
||||
})
|
||||
break
|
||||
|
||||
# Also check numeric values > 1 or strings that look like config
|
||||
if isinstance(value, (int, float)) and value > 1:
|
||||
findings.append({
|
||||
"file": str(filepath.relative_to(PROJECT_ROOT)),
|
||||
"line": node.lineno,
|
||||
"variable": var_name,
|
||||
"value": value_str,
|
||||
"context": ast.get_source_segment(content, node)
|
||||
})
|
||||
elif isinstance(value, str) and len(value) > 10 and " " not in value:
|
||||
# Could be API keys, URLs, etc
|
||||
findings.append({
|
||||
"file": str(filepath.relative_to(PROJECT_ROOT)),
|
||||
"line": node.lineno,
|
||||
"variable": var_name,
|
||||
"value": f'"{value_str[:50]}..."',
|
||||
"context": ast.get_source_segment(content, node)
|
||||
})
|
||||
|
||||
except (SyntaxError, UnicodeDecodeError):
|
||||
continue
|
||||
|
||||
return findings
|
||||
|
||||
def analyze_admin_endpoints() -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
Analyze which API modules have /admin prefixed endpoints.
|
||||
Returns dict with module analysis.
|
||||
"""
|
||||
modules = {}
|
||||
|
||||
if not ENDPOINTS_DIR.exists():
|
||||
print(f"Warning: Endpoints directory not found: {ENDPOINTS_DIR}")
|
||||
return modules
|
||||
|
||||
for endpoint_file in ENDPOINTS_DIR.glob("*.py"):
|
||||
module_name = endpoint_file.stem
|
||||
with open(endpoint_file, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# Check for router definition
|
||||
router_match = re.search(r"router\s*=\s*APIRouter\(.*?prefix\s*=\s*[\"']/admin[\"']", content, re.DOTALL)
|
||||
has_admin_prefix = bool(router_match)
|
||||
|
||||
# Check for admin endpoints (routes with /admin in path)
|
||||
admin_routes = re.findall(r'@router\.\w+\([\"\'][^\"\']*?/admin[^\"\']*?[\"\']', content)
|
||||
|
||||
# Check for admin-specific functions
|
||||
admin_functions = re.findall(r"def\s+\w+.*admin.*:", content, re.IGNORECASE)
|
||||
|
||||
modules[module_name] = {
|
||||
"has_admin_prefix": has_admin_prefix,
|
||||
"admin_routes_count": len(admin_routes),
|
||||
"admin_functions": len(admin_functions),
|
||||
"file_size": len(content),
|
||||
"has_admin_file": (endpoint_file.stem == "admin")
|
||||
}
|
||||
|
||||
return modules
|
||||
|
||||
def identify_missing_admin_modules(modules: Dict[str, Dict[str, Any]]) -> List[str]:
|
||||
"""
|
||||
Identify which core modules lack admin endpoints.
|
||||
"""
|
||||
core_modules = [
|
||||
"users", "vehicles", "services", "assets", "organizations",
|
||||
"billing", "gamification", "analytics", "security", "documents",
|
||||
"evidence", "expenses", "finance_admin", "notifications", "reports",
|
||||
"catalog", "providers", "search", "social", "system_parameters"
|
||||
]
|
||||
|
||||
missing = []
|
||||
for module in core_modules:
|
||||
if module not in modules:
|
||||
missing.append(module)
|
||||
continue
|
||||
|
||||
mod_info = modules[module]
|
||||
if not mod_info["has_admin_prefix"] and mod_info["admin_routes_count"] == 0:
|
||||
missing.append(module)
|
||||
|
||||
return missing
|
||||
|
||||
def generate_markdown_report(hardcoded_findings: List[Dict[str, Any]],
|
||||
modules: Dict[str, Dict[str, Any]],
|
||||
missing_admin_modules: List[str]) -> str:
|
||||
"""
|
||||
Generate comprehensive Markdown report.
|
||||
"""
|
||||
report = []
|
||||
report.append("# Admin System Gap Analysis Report")
|
||||
report.append(f"*Generated: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*")
|
||||
report.append("")
|
||||
|
||||
# Executive Summary
|
||||
report.append("## 📊 Executive Summary")
|
||||
report.append("")
|
||||
report.append(f"- **Total hardcoded business values found:** {len(hardcoded_findings)}")
|
||||
report.append(f"- **API modules analyzed:** {len(modules)}")
|
||||
report.append(f"- **Modules missing admin endpoints:** {len(missing_admin_modules)}")
|
||||
report.append("")
|
||||
|
||||
# Hardcoded Values Section
|
||||
report.append("## 🔍 Hardcoded Business Values")
|
||||
report.append("")
|
||||
report.append("These values should be moved to `system_parameters` table for dynamic configuration.")
|
||||
report.append("")
|
||||
|
||||
if hardcoded_findings:
|
||||
report.append("| File | Line | Variable | Value | Context |")
|
||||
report.append("|------|------|----------|-------|---------|")
|
||||
for finding in hardcoded_findings[:50]: # Limit to 50 for readability
|
||||
file_link = finding["file"]
|
||||
line = finding["line"]
|
||||
variable = finding["variable"]
|
||||
value = finding["value"]
|
||||
context = finding["context"].replace("|", "\\|").replace("\n", " ").strip()[:100]
|
||||
report.append(f"| `{file_link}` | {line} | `{variable}` | `{value}` | `{context}` |")
|
||||
|
||||
if len(hardcoded_findings) > 50:
|
||||
report.append(f"\n*... and {len(hardcoded_findings) - 50} more findings*")
|
||||
else:
|
||||
report.append("*No significant hardcoded business values found.*")
|
||||
report.append("")
|
||||
|
||||
# Admin Endpoints Analysis
|
||||
report.append("## 🏗️ Admin Endpoints Analysis")
|
||||
report.append("")
|
||||
report.append("### Modules with Admin Prefix")
|
||||
report.append("")
|
||||
|
||||
admin_modules = [m for m, info in modules.items() if info["has_admin_prefix"]]
|
||||
if admin_modules:
|
||||
report.append(", ".join(f"`{m}`" for m in admin_modules))
|
||||
else:
|
||||
report.append("*No modules have `/admin` prefix*")
|
||||
report.append("")
|
||||
|
||||
report.append("### Modules with Admin Routes (but no prefix)")
|
||||
report.append("")
|
||||
mixed_modules = [m for m, info in modules.items() if not info["has_admin_prefix"] and info["admin_routes_count"] > 0]
|
||||
if mixed_modules:
|
||||
for module in mixed_modules:
|
||||
info = modules[module]
|
||||
report.append(f"- `{module}`: {info['admin_routes_count']} admin routes")
|
||||
else:
|
||||
report.append("*No mixed admin routes found*")
|
||||
report.append("")
|
||||
|
||||
# Missing Admin Modules
|
||||
report.append("## ⚠️ Critical Gaps: Missing Admin Endpoints")
|
||||
report.append("")
|
||||
report.append("These core business modules lack dedicated admin endpoints:")
|
||||
report.append("")
|
||||
|
||||
if missing_admin_modules:
|
||||
for module in missing_admin_modules:
|
||||
report.append(f"- **{module}** - No `/admin` prefix and no admin routes")
|
||||
report.append("")
|
||||
report.append("### Recommended Actions:")
|
||||
report.append("1. Create `/admin` prefixed routers for each missing module")
|
||||
report.append("2. Implement CRUD endpoints for administrative operations")
|
||||
report.append("3. Add audit logging and permission checks")
|
||||
else:
|
||||
report.append("*All core modules have admin endpoints!*")
|
||||
report.append("")
|
||||
|
||||
# Recommendations
|
||||
report.append("## 🚀 Recommendations")
|
||||
report.append("")
|
||||
report.append("### Phase 1: Hardcode Elimination")
|
||||
report.append("1. Create `system_parameters` migration if not exists")
|
||||
report.append("2. Move identified hardcoded values to database")
|
||||
report.append("3. Implement `ConfigService` for dynamic value retrieval")
|
||||
report.append("")
|
||||
report.append("### Phase 2: Admin Endpoint Expansion")
|
||||
report.append("1. Prioritize modules with highest business impact:")
|
||||
report.append(" - `users` (user management)")
|
||||
report.append(" - `billing` (financial oversight)")
|
||||
report.append(" - `security` (access control)")
|
||||
report.append("2. Follow consistent pattern: `/admin/{module}/...`")
|
||||
report.append("3. Implement RBAC with `admin` and `superadmin` roles")
|
||||
report.append("")
|
||||
report.append("### Phase 3: Monitoring & Audit")
|
||||
report.append("1. Add admin action logging to `SecurityAuditLog`")
|
||||
report.append("2. Implement admin dashboard with real-time metrics")
|
||||
report.append("3. Create automated health checks for admin endpoints")
|
||||
report.append("")
|
||||
|
||||
# Technical Details
|
||||
report.append("## 🔧 Technical Details")
|
||||
report.append("")
|
||||
report.append("### Scan Parameters")
|
||||
report.append(f"- Project root: `{PROJECT_ROOT}`")
|
||||
report.append(f"- Files scanned: Python files in `{BACKEND_DIR}`")
|
||||
report.append(f"- Business patterns: {len(BUSINESS_PATTERNS)}")
|
||||
report.append(f"- Trivial values excluded: {', '.join(TRIVIAL_VALUES)}")
|
||||
report.append("")
|
||||
|
||||
return "\n".join(report)
|
||||
|
||||
def main():
|
||||
"""Main execution function."""
|
||||
print("🔍 Starting Smart Admin Audit...")
|
||||
|
||||
# 1. Find hardcoded values
|
||||
print("Step 1: Scanning for hardcoded business values...")
|
||||
hardcoded_findings = find_hardcoded_values()
|
||||
print(f" Found {len(hardcoded_findings)} potential hardcoded values")
|
||||
|
||||
# 2. Analyze admin endpoints
|
||||
print("Step 2: Analyzing admin endpoints...")
|
||||
modules = analyze_admin_endpoints()
|
||||
print(f" Analyzed {len(modules)} API modules")
|
||||
|
||||
# 3. Identify missing admin modules
|
||||
missing_admin_modules = identify_missing_admin_modules(modules)
|
||||
print(f" Found {len(missing_admin_modules)} modules missing admin endpoints")
|
||||
|
||||
# 4. Generate report
|
||||
print("Step 3: Generating Markdown report...")
|
||||
import datetime
|
||||
report = generate_markdown_report(hardcoded_findings, modules, missing_admin_modules)
|
||||
|
||||
# Write to file
|
||||
with open(OUTPUT_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(report)
|
||||
|
||||
print(f"✅ Report generated: {OUTPUT_FILE}")
|
||||
print(f" - Hardcoded values: {len(hardcoded_findings)}")
|
||||
print(f" - Modules analyzed: {len(modules)}")
|
||||
print(f" - Missing admin: {len(missing_admin_modules)}")
|
||||
|
||||
# Print summary to console
|
||||
if missing_admin_modules:
|
||||
print("\n⚠️ CRITICAL GAPS:")
|
||||
for module in missing_admin_modules[:5]:
|
||||
print(f" - {module} lacks admin endpoints")
|
||||
if len(missing_admin_modules) > 5:
|
||||
print(f" ... and {len(missing_admin_modules) - 5} more")
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -1,169 +1,153 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/scripts/sync_engine.py
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Universal Schema Synchronizer
|
||||
|
||||
Dynamically imports all SQLAlchemy models from app.models, compares them with the live database,
|
||||
and creates missing tables/columns without dropping anything.
|
||||
|
||||
Safety First:
|
||||
- NEVER drops tables or columns.
|
||||
- Prints planned SQL before execution.
|
||||
- Requires confirmation for destructive operations (none in this script).
|
||||
"""
|
||||
|
||||
# docker exec -it sf_api python -m app.scripts.sync_engine
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect, text
|
||||
from sqlalchemy.schema import CreateTable, AddConstraint
|
||||
from sqlalchemy.sql.ddl import CreateColumn
|
||||
from sqlalchemy.schema import CreateTable
|
||||
|
||||
# Add backend to path
|
||||
# Path beállítása
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
"""
|
||||
Dynamically import all .py files in app.models directory to ensure Base.metadata is populated.
|
||||
"""
|
||||
"""Modellek betöltése a Metadata feltöltéséhez."""
|
||||
models_dir = Path(__file__).parent.parent / "models"
|
||||
imported = []
|
||||
|
||||
for py_file in models_dir.glob("*.py"):
|
||||
if py_file.name == "__init__.py":
|
||||
continue
|
||||
module_name = f"app.models.{py_file.stem}"
|
||||
# Rekurzív bejárás az alkönyvtárakkal együtt
|
||||
for py_file in models_dir.rglob("*.py"):
|
||||
if py_file.name == "__init__.py": continue
|
||||
# Számítsuk ki a modulnevet a models könyvtárhoz képest
|
||||
relative_path = py_file.relative_to(models_dir)
|
||||
# Konvertáljuk path-t modulná: pl. identity/identity.py -> identity.identity
|
||||
module_stem = str(relative_path).replace('/', '.').replace('\\', '.')[:-3] # eltávolítjuk a .py-t
|
||||
module_name = f"app.models.{module_stem}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
imported.append(module_name)
|
||||
print(f"✅ Imported {module_name}")
|
||||
importlib.import_module(module_name)
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not import {module_name}: {e}")
|
||||
|
||||
# Also ensure the __init__ is loaded (it imports many models manually)
|
||||
import app.models
|
||||
print(f"📦 Total tables in Base.metadata: {len(Base.metadata.tables)}")
|
||||
return imported
|
||||
# Csak debug célra
|
||||
print(f"Failed to import {module_name}: {e}")
|
||||
pass
|
||||
|
||||
async def compare_and_repair():
|
||||
"""
|
||||
Compare SQLAlchemy metadata with live database and create missing tables/columns.
|
||||
"""
|
||||
print("🔗 Connecting to database...")
|
||||
async def perform_detailed_audit():
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def get_diff_and_repair(connection):
|
||||
# Audit számlálók
|
||||
stats = {"ok": 0, "fixed": 0, "extra": 0, "missing": 0}
|
||||
|
||||
def audit_logic(connection):
|
||||
inspector = inspect(connection)
|
||||
metadata = Base.metadata
|
||||
db_schemas = inspector.get_schema_names()
|
||||
model_schemas = sorted({t.schema for t in metadata.sorted_tables if t.schema})
|
||||
|
||||
print("\n" + "="*80)
|
||||
print(f"{'🔍 RÉSZLETES SCHEMA AUDIT JELENTÉS':^80}")
|
||||
print("="*80)
|
||||
|
||||
# --- A IRÁNY: KÓD -> ADATBÁZIS (Minden ellenőrzése) ---
|
||||
print(f"\n[A IRÁNY: Kód (SQLAlchemy) -> Adatbázis (PostgreSQL)]")
|
||||
print("-" * 50)
|
||||
|
||||
# Get all schemas from models
|
||||
expected_schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
print(f"📋 Expected schemas: {expected_schemas}")
|
||||
|
||||
# Ensure enum types exist in marketplace schema
|
||||
if 'marketplace' in expected_schemas:
|
||||
print("\n🔧 Ensuring enum types in marketplace schema...")
|
||||
# moderation_status enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'moderation_status' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.moderation_status AS ENUM ('pending', 'approved', 'rejected');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
# source_type enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'source_type' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.source_type AS ENUM ('manual', 'ocr', 'import');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
print("✅ Enum types ensured.")
|
||||
|
||||
for schema in expected_schemas:
|
||||
print(f"\n--- 🔍 Checking schema '{schema}' ---")
|
||||
|
||||
# Check if schema exists
|
||||
db_schemas = inspector.get_schema_names()
|
||||
for schema in model_schemas:
|
||||
# 1. Séma ellenőrzése
|
||||
if schema not in db_schemas:
|
||||
print(f"❌ Schema '{schema}' missing. Creating...")
|
||||
print(f"❌ HIÁNYZIK: Séma [{schema}] -> Létrehozás...")
|
||||
connection.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema}"'))
|
||||
print(f"✅ Schema '{schema}' created.")
|
||||
|
||||
# Get tables in this schema from models
|
||||
model_tables = [t for t in Base.metadata.sorted_tables if t.schema == schema]
|
||||
stats["fixed"] += 1
|
||||
else:
|
||||
print(f"✅ RENDBEN: Séma [{schema}] létezik.")
|
||||
stats["ok"] += 1
|
||||
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
|
||||
model_tables = [t for t in metadata.sorted_tables if t.schema == schema]
|
||||
|
||||
for table in model_tables:
|
||||
full_name = f"{schema}.{table.name}"
|
||||
|
||||
# 2. Tábla ellenőrzése
|
||||
if table.name not in db_tables:
|
||||
print(f"❌ Missing table: {schema}.{table.name}")
|
||||
# Generate CREATE TABLE statement
|
||||
create_stmt = CreateTable(table)
|
||||
# Print SQL for debugging
|
||||
sql_str = str(create_stmt.compile(bind=engine))
|
||||
print(f" SQL: {sql_str}")
|
||||
connection.execute(create_stmt)
|
||||
print(f"✅ Table {schema}.{table.name} created.")
|
||||
print(f" ❌ HIÁNYZIK: Tábla [{full_name}] -> Létrehozás...")
|
||||
connection.execute(CreateTable(table))
|
||||
stats["fixed"] += 1
|
||||
continue
|
||||
else:
|
||||
# Check columns
|
||||
db_columns = {c['name']: c for c in inspector.get_columns(table.name, schema=schema)}
|
||||
model_columns = table.columns
|
||||
|
||||
missing_cols = []
|
||||
for col in model_columns:
|
||||
if col.name not in db_columns:
|
||||
missing_cols.append(col)
|
||||
|
||||
if missing_cols:
|
||||
print(f"⚠️ Table {schema}.{table.name} missing columns: {[c.name for c in missing_cols]}")
|
||||
for col in missing_cols:
|
||||
# Generate ADD COLUMN statement
|
||||
col_type = col.type.compile(dialect=engine.dialect)
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}'
|
||||
if col.nullable is False:
|
||||
sql += " NOT NULL"
|
||||
if col.default is not None:
|
||||
# Handle default values (simplistic)
|
||||
sql += f" DEFAULT {col.default.arg}"
|
||||
print(f" SQL: {sql}")
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Column {col.name} added.")
|
||||
print(f" ✅ RENDBEN: Tábla [{full_name}] létezik.")
|
||||
stats["ok"] += 1
|
||||
|
||||
# 3. Oszlopok ellenőrzése
|
||||
db_cols = {c['name']: c for c in inspector.get_columns(table.name, schema=schema)}
|
||||
for col in table.columns:
|
||||
col_path = f"{full_name}.{col.name}"
|
||||
if col.name not in db_cols:
|
||||
print(f" ❌ HIÁNYZIK: Oszlop [{col_path}] -> Hozzáadás...")
|
||||
col_type = col.type.compile(dialect=connection.dialect)
|
||||
default_sql = ""
|
||||
if col.server_default is not None:
|
||||
arg = col.server_default.arg
|
||||
val = arg.text if hasattr(arg, 'text') else str(arg)
|
||||
default_sql = f" DEFAULT {val}"
|
||||
null_sql = " NOT NULL" if not col.nullable else ""
|
||||
connection.execute(text(f'ALTER TABLE "{schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}{default_sql}{null_sql}'))
|
||||
stats["fixed"] += 1
|
||||
else:
|
||||
print(f"✅ Table {schema}.{table.name} is up‑to‑date.")
|
||||
print(f" ✅ RENDBEN: Oszlop [{col_path}]")
|
||||
stats["ok"] += 1
|
||||
|
||||
# --- B IRÁNY: ADATBÁZIS -> KÓD (Árnyék adatok keresése) ---
|
||||
print(f"\n[B IRÁNY: Adatbázis -> Kód (Extra elemek keresése)]")
|
||||
print("-" * 50)
|
||||
|
||||
print("\n--- ✅ Schema synchronization complete. ---")
|
||||
|
||||
for schema in model_schemas:
|
||||
if schema not in db_schemas: continue
|
||||
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
model_table_names = {t.name for t in metadata.sorted_tables if t.schema == schema}
|
||||
|
||||
for db_table in db_tables:
|
||||
# Ignore deprecated tables (ending with _deprecated)
|
||||
if db_table.endswith("_deprecated"):
|
||||
continue
|
||||
full_db_name = f"{schema}.{db_table}"
|
||||
if db_table not in model_table_names:
|
||||
print(f" ⚠️ EXTRA TÁBLA: [{full_db_name}] (Nincs a kódban!)")
|
||||
stats["extra"] += 1
|
||||
else:
|
||||
# Extra oszlopok a táblán belül
|
||||
db_cols = inspector.get_columns(db_table, schema=schema)
|
||||
model_col_names = {c.name for c in metadata.tables[full_db_name].columns}
|
||||
|
||||
for db_col in db_cols:
|
||||
col_name = db_col['name']
|
||||
if col_name not in model_col_names:
|
||||
print(f" ⚠️ EXTRA OSZLOP: [{full_db_name}.{col_name}]")
|
||||
stats["extra"] += 1
|
||||
|
||||
# --- ÖSSZESÍTŐ ---
|
||||
print("\n" + "="*80)
|
||||
print(f"{'📊 AUDIT ÖSSZESÍTŐ':^80}")
|
||||
print("="*80)
|
||||
print(f" ✅ Megfelelt (OK): {stats['ok']:>4} elem")
|
||||
print(f" ❌ Javítva/Pótolva (Fixed): {stats['fixed']:>4} elem")
|
||||
print(f" ⚠️ Extra (Shadow Data): {stats['extra']:>4} elem")
|
||||
print("-" * 80)
|
||||
if stats["fixed"] == 0 and stats["extra"] == 0:
|
||||
print(f"{'✨ A RENDSZER TÖKÉLETESEN SZINKRONBAN VAN!':^80}")
|
||||
else:
|
||||
print(f"{'ℹ️ A rendszer üzemkész, de nézd át az extra (Shadow) elemeket!':^80}")
|
||||
print("="*80 + "\n")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(get_diff_and_repair)
|
||||
|
||||
await conn.run_sync(audit_logic)
|
||||
await engine.dispose()
|
||||
|
||||
async def main():
|
||||
print("🚀 Universal Schema Synchronizer")
|
||||
print("=" * 50)
|
||||
|
||||
# Step 1: Dynamic import
|
||||
print("\n📥 Step 1: Dynamically importing all models...")
|
||||
dynamic_import_models()
|
||||
|
||||
# Step 2: Compare and repair
|
||||
print("\n🔧 Step 2: Comparing with database and repairing...")
|
||||
await compare_and_repair()
|
||||
|
||||
# Step 3: Final verification
|
||||
print("\n📊 Step 3: Final verification...")
|
||||
# Run compare_schema.py logic to confirm everything is green
|
||||
from app.tests_internal.diagnostics.compare_schema import compare
|
||||
await compare()
|
||||
|
||||
print("\n✨ Synchronization finished successfully!")
|
||||
await perform_detailed_audit()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
170
backend/app/scripts/sync_engine1.0.py.old
Normal file
170
backend/app/scripts/sync_engine1.0.py.old
Normal file
@@ -0,0 +1,170 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/scripts/sync_engine.py
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Universal Schema Synchronizer
|
||||
|
||||
Dynamically imports all SQLAlchemy models from app.models, compares them with the live database,
|
||||
and creates missing tables/columns without dropping anything.
|
||||
|
||||
Safety First:
|
||||
- NEVER drops tables or columns.
|
||||
- Prints planned SQL before execution.
|
||||
- Requires confirmation for destructive operations (none in this script).
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect, text
|
||||
from sqlalchemy.schema import CreateTable, AddConstraint
|
||||
from sqlalchemy.sql.ddl import CreateColumn
|
||||
|
||||
# Add backend to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
"""
|
||||
Dynamically import all .py files in app.models directory to ensure Base.metadata is populated.
|
||||
"""
|
||||
models_dir = Path(__file__).parent.parent / "models"
|
||||
imported = []
|
||||
|
||||
for py_file in models_dir.glob("*.py"):
|
||||
if py_file.name == "__init__.py":
|
||||
continue
|
||||
module_name = f"app.models.{py_file.stem}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
imported.append(module_name)
|
||||
print(f"✅ Imported {module_name}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not import {module_name}: {e}")
|
||||
|
||||
# Also ensure the __init__ is loaded (it imports many models manually)
|
||||
import app.models
|
||||
print(f"📦 Total tables in Base.metadata: {len(Base.metadata.tables)}")
|
||||
return imported
|
||||
|
||||
async def compare_and_repair():
|
||||
"""
|
||||
Compare SQLAlchemy metadata with live database and create missing tables/columns.
|
||||
"""
|
||||
print("🔗 Connecting to database...")
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def get_diff_and_repair(connection):
|
||||
inspector = inspect(connection)
|
||||
|
||||
# Get all schemas from models
|
||||
expected_schemas = sorted({t.schema for t in Base.metadata.sorted_tables if t.schema})
|
||||
print(f"📋 Expected schemas: {expected_schemas}")
|
||||
|
||||
# Ensure enum types exist in marketplace schema
|
||||
if 'marketplace' in expected_schemas:
|
||||
print("\n🔧 Ensuring enum types in marketplace schema...")
|
||||
# moderation_status enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'moderation_status' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.moderation_status AS ENUM ('pending', 'approved', 'rejected');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
# source_type enum
|
||||
connection.execute(text("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'source_type' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'marketplace')) THEN
|
||||
CREATE TYPE marketplace.source_type AS ENUM ('manual', 'ocr', 'import');
|
||||
END IF;
|
||||
END $$;
|
||||
"""))
|
||||
print("✅ Enum types ensured.")
|
||||
|
||||
for schema in expected_schemas:
|
||||
print(f"\n--- 🔍 Checking schema '{schema}' ---")
|
||||
|
||||
# Check if schema exists
|
||||
db_schemas = inspector.get_schema_names()
|
||||
if schema not in db_schemas:
|
||||
print(f"❌ Schema '{schema}' missing. Creating...")
|
||||
connection.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema}"'))
|
||||
print(f"✅ Schema '{schema}' created.")
|
||||
|
||||
# Get tables in this schema from models
|
||||
model_tables = [t for t in Base.metadata.sorted_tables if t.schema == schema]
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
|
||||
for table in model_tables:
|
||||
if table.name not in db_tables:
|
||||
print(f"❌ Missing table: {schema}.{table.name}")
|
||||
# Generate CREATE TABLE statement
|
||||
create_stmt = CreateTable(table)
|
||||
# Print SQL for debugging
|
||||
sql_str = str(create_stmt.compile(bind=engine))
|
||||
print(f" SQL: {sql_str}")
|
||||
connection.execute(create_stmt)
|
||||
print(f"✅ Table {schema}.{table.name} created.")
|
||||
else:
|
||||
# Check columns
|
||||
db_columns = {c['name']: c for c in inspector.get_columns(table.name, schema=schema)}
|
||||
model_columns = table.columns
|
||||
|
||||
missing_cols = []
|
||||
for col in model_columns:
|
||||
if col.name not in db_columns:
|
||||
missing_cols.append(col)
|
||||
|
||||
if missing_cols:
|
||||
print(f"⚠️ Table {schema}.{table.name} missing columns: {[c.name for c in missing_cols]}")
|
||||
for col in missing_cols:
|
||||
# Generate ADD COLUMN statement
|
||||
col_type = col.type.compile(dialect=engine.dialect)
|
||||
sql = f'ALTER TABLE "{schema}"."{table.name}" ADD COLUMN "{col.name}" {col_type}'
|
||||
if col.nullable is False:
|
||||
sql += " NOT NULL"
|
||||
if col.default is not None:
|
||||
# Handle default values (simplistic)
|
||||
sql += f" DEFAULT {col.default.arg}"
|
||||
print(f" SQL: {sql}")
|
||||
connection.execute(text(sql))
|
||||
print(f"✅ Column {col.name} added.")
|
||||
else:
|
||||
print(f"✅ Table {schema}.{table.name} is up‑to‑date.")
|
||||
|
||||
print("\n--- ✅ Schema synchronization complete. ---")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(get_diff_and_repair)
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
async def main():
|
||||
print("🚀 Universal Schema Synchronizer")
|
||||
print("=" * 50)
|
||||
|
||||
# Step 1: Dynamic import
|
||||
print("\n📥 Step 1: Dynamically importing all models...")
|
||||
dynamic_import_models()
|
||||
|
||||
# Step 2: Compare and repair
|
||||
print("\n🔧 Step 2: Comparing with database and repairing...")
|
||||
await compare_and_repair()
|
||||
|
||||
# Step 3: Final verification
|
||||
print("\n📊 Step 3: Final verification...")
|
||||
# Run compare_schema.py logic to confirm everything is green
|
||||
from app.tests_internal.diagnostics.compare_schema import compare
|
||||
await compare()
|
||||
|
||||
print("\n✨ Synchronization finished successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
67
backend/app/scripts/sync_python_models_generator.py
Normal file
67
backend/app/scripts/sync_python_models_generator.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/scripts/sync_python_models_generator.py
|
||||
#
|
||||
import asyncio
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from app.core.config import settings
|
||||
import sqlalchemy.types as types
|
||||
# PostgreSQL specifikus típusok importálása
|
||||
from sqlalchemy.dialects.postgresql import JSONB, UUID, ENUM
|
||||
|
||||
# Típus leképezés javítva
|
||||
TYPE_MAP = {
|
||||
types.INTEGER: "Integer",
|
||||
types.VARCHAR: "String",
|
||||
types.TEXT: "String",
|
||||
types.BOOLEAN: "Boolean",
|
||||
types.DATETIME: "DateTime",
|
||||
types.TIMESTAMP: "DateTime",
|
||||
types.NUMERIC: "Numeric",
|
||||
types.JSON: "JSON",
|
||||
JSONB: "JSONB",
|
||||
UUID: "UUID"
|
||||
}
|
||||
|
||||
async def generate_perfect_models():
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
def analyze(connection):
|
||||
inspector = inspect(connection)
|
||||
# Csak azokat a sémákat nézzük, ahol extra adatot találtunk
|
||||
schemas = ['gamification', 'identity', 'marketplace', 'system', 'vehicle']
|
||||
|
||||
print("\n" + "="*80)
|
||||
print(f"{'🛠️ PONTOS PYTHON MODELL KÓDOK A HIÁNYZÓ ELEMEKHEZ':^80}")
|
||||
print("="*80)
|
||||
|
||||
for schema in schemas:
|
||||
tables = inspector.get_table_names(schema=schema)
|
||||
for table_name in tables:
|
||||
# Osztálynév generálás (pl. user_contributions -> UserContribution)
|
||||
class_name = "".join(x.capitalize() for x in table_name.split("_"))
|
||||
if class_name.endswith("s"): class_name = class_name[:-1]
|
||||
|
||||
print(f"\n# --- [{schema}.{table_name}] ---")
|
||||
|
||||
for col in inspector.get_columns(table_name, schema=schema):
|
||||
# Típus meghatározása intelligensebben
|
||||
col_raw_type = col['type']
|
||||
col_type = "String"
|
||||
for k, v in TYPE_MAP.items():
|
||||
if isinstance(col_raw_type, k):
|
||||
col_type = v
|
||||
break
|
||||
|
||||
params = []
|
||||
if col.get('primary_key'): params.append("primary_key=True")
|
||||
if not col.get('nullable'): params.append("nullable=False")
|
||||
|
||||
param_str = ", ".join(params)
|
||||
print(f"{col['name']} = Column({col_type}{', ' + param_str if param_str else ''})")
|
||||
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(analyze)
|
||||
await engine.dispose()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(generate_perfect_models())
|
||||
Reference in New Issue
Block a user