feat(robot): hunter v2.7, geocoding support, docker network fix, changelog update

This commit is contained in:
2026-02-13 01:15:34 +00:00
parent 09a0430384
commit f38a75a025
41 changed files with 1801 additions and 153 deletions

9
.env
View File

@@ -90,4 +90,11 @@ MINIO_ENDPOINT=minio:9000
MINIO_ROOT_USER=kincses MINIO_ROOT_USER=kincses
MINIO_ROOT_PASSWORD='MiskociA74' MINIO_ROOT_PASSWORD='MiskociA74'
MINIO_ACCESS_KEY=kincses MINIO_ACCESS_KEY=kincses
MINIO_SECRET_KEY='MiskociA74' MINIO_SECRET_KEY='MiskociA74'
# --- Frontend ---
FRONTEND_BASE_URL=https://dev.profibot.hu/docs
GOOGLE_API_KEY=AIzaSyB3-Uo6qFBNi83hK01uoaUARtYHxERbtXg

View File

@@ -20,4 +20,7 @@ GOOGLE_CALLBACK_URL=https://dev.profibot.hu/api/v1/auth/callback/google
# --- Frontend --- # --- Frontend ---
FRONTEND_BASE_URL=https://dev.profibot.hu/docs FRONTEND_BASE_URL=https://dev.profibot.hu/docs
GOOGLE_API_KEY=AIzaSyB3-Uo6qFBNi83hK01uoaUARtYHxERbtXg

View File

@@ -5,11 +5,8 @@ from typing import Optional, Dict, Any, Tuple
import bcrypt import bcrypt
from jose import jwt, JWTError from jose import jwt, JWTError
from app.core.config import settings from app.core.config import settings
from fastapi_limiter import FastAPILimiter
from fastapi_limiter.depends import RateLimiter
# Ezt az auth végpontokhoz adjuk hozzá: # A FastAPI-Limiter importokat kivettem innen, mert indítási hibát okoztak.
# @router.post("/login", dependencies=[Depends(RateLimiter(times=5, seconds=60))])
DEFAULT_RANK_MAP = { DEFAULT_RANK_MAP = {
"superadmin": 100, "admin": 80, "fleet_manager": 25, "superadmin": 100, "admin": 80, "fleet_manager": 25,

Binary file not shown.

View File

@@ -1,5 +1,5 @@
import uuid import uuid
from sqlalchemy import Column, String, Integer, ForeignKey, Text, DateTime from sqlalchemy import Column, String, Integer, ForeignKey, Text, DateTime, Float
from sqlalchemy.dialects.postgresql import UUID as PG_UUID from sqlalchemy.dialects.postgresql import UUID as PG_UUID
from sqlalchemy.sql import func from sqlalchemy.sql import func
from app.db.base_class import Base from app.db.base_class import Base
@@ -7,7 +7,6 @@ from app.db.base_class import Base
class GeoPostalCode(Base): class GeoPostalCode(Base):
__tablename__ = "geo_postal_codes" __tablename__ = "geo_postal_codes"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
country_code = Column(String(5), default="HU") country_code = Column(String(5), default="HU")
zip_code = Column(String(10), nullable=False) zip_code = Column(String(10), nullable=False)
@@ -16,7 +15,6 @@ class GeoPostalCode(Base):
class GeoStreet(Base): class GeoStreet(Base):
__tablename__ = "geo_streets" __tablename__ = "geo_streets"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
postal_code_id = Column(Integer, ForeignKey("data.geo_postal_codes.id")) postal_code_id = Column(Integer, ForeignKey("data.geo_postal_codes.id"))
name = Column(String(200), nullable=False) name = Column(String(200), nullable=False)
@@ -24,11 +22,11 @@ class GeoStreet(Base):
class GeoStreetType(Base): class GeoStreetType(Base):
__tablename__ = "geo_street_types" __tablename__ = "geo_street_types"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True, nullable=False) name = Column(String(50), unique=True, nullable=False)
class Address(Base): class Address(Base):
"""Univerzális cím entitás GPS adatokkal kiegészítve."""
__tablename__ = "addresses" __tablename__ = "addresses"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
@@ -40,6 +38,11 @@ class Address(Base):
stairwell = Column(String(20)) stairwell = Column(String(20))
floor = Column(String(20)) floor = Column(String(20))
door = Column(String(20)) door = Column(String(20))
parcel_id = Column(String(50)) # HRSZ parcel_id = Column(String(50))
full_address_text = Column(Text) full_address_text = Column(Text)
# Robot és térképes funkciók számára
latitude = Column(Float)
longitude = Column(Float)
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())

View File

@@ -1,54 +1,55 @@
import uuid import uuid
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, Numeric, text, Text from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, Numeric, text, Text, UniqueConstraint
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from sqlalchemy.dialects.postgresql import UUID as PG_UUID from sqlalchemy.dialects.postgresql import UUID as PG_UUID, JSONB
from sqlalchemy.sql import func from sqlalchemy.sql import func
from app.db.base_class import Base from app.db.base_class import Base
class AssetCatalog(Base): class AssetCatalog(Base):
"""Globális járműkatalógus (Márka -> Típus -> Generáció -> Motor)."""
__tablename__ = "vehicle_catalog" __tablename__ = "vehicle_catalog"
__table_args__ = {"schema": "data"} __table_args__ = (
UniqueConstraint(
'make', 'model', 'year_from', 'engine_variant', 'fuel_type',
name='uix_vehicle_catalog_full'
),
{"schema": "data"}
)
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
make = Column(String, index=True, nullable=False) # 1. Szint: Audi make = Column(String, index=True, nullable=False)
model = Column(String, index=True, nullable=False) # 2. Szint: A4 model = Column(String, index=True, nullable=False)
generation = Column(String, index=True) # 3. Szint: B8 (2008-2015) generation = Column(String, index=True)
engine_variant = Column(String) # 4. Szint: 2.0 TDI (150 LE) engine_variant = Column(String, index=True)
year_from = Column(Integer) year_from = Column(Integer)
year_to = Column(Integer) year_to = Column(Integer)
vehicle_class = Column(String) vehicle_class = Column(String)
fuel_type = Column(String) fuel_type = Column(String, index=True)
engine_code = Column(String) engine_code = Column(String)
factory_data = Column(JSON, server_default=text("'{}'::jsonb")) # Technikai specifikációk factory_data = Column(JSONB, server_default=text("'{}'::jsonb"))
assets = relationship("Asset", back_populates="catalog") assets = relationship("Asset", back_populates="catalog")
class Asset(Base): class Asset(Base):
"""Egyedi jármű (Asset) példány - Az ökoszisztéma magja."""
__tablename__ = "assets" __tablename__ = "assets"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
vin = Column(String(17), unique=True, index=True, nullable=False) vin = Column(String(17), unique=True, index=True, nullable=False)
license_plate = Column(String(20), index=True) license_plate = Column(String(20), index=True)
name = Column(String) name = Column(String)
year_of_manufacture = Column(Integer) year_of_manufacture = Column(Integer)
# --- BIZTONSÁGI ÉS JOGOSULTSÁGI IZOLÁCIÓ ---
# A current_organization_id biztosítja a gyors, adatbázis-szintű Scoped RBAC védelmet.
current_organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=True) current_organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=True)
catalog_id = Column(Integer, ForeignKey("data.vehicle_catalog.id")) catalog_id = Column(Integer, ForeignKey("data.vehicle_catalog.id"))
is_verified = Column(Boolean, default=False)
verification_method = Column(String(20)) # 'robot', 'ocr', 'manual'
status = Column(String(20), default="active")
# Moderációs mezők a Robot 3 (OCR) számára
is_verified = Column(Boolean, default=False)
verification_method = Column(String(20)) # 'manual', 'ocr', 'vin_api'
verification_notes = Column(Text, nullable=True) # Eltérések jegyzőkönyve
catalog_match_score = Column(Numeric(5, 2), nullable=True) # 0-100% egyezési arány
status = Column(String(20), default="active")
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now()) updated_at = Column(DateTime(timezone=True), onupdate=func.now())
# Kapcsolatok (Digital Twin Modules)
catalog = relationship("AssetCatalog", back_populates="assets") catalog = relationship("AssetCatalog", back_populates="assets")
current_org = relationship("Organization") current_org = relationship("Organization")
financials = relationship("AssetFinancials", back_populates="asset", uselist=False) financials = relationship("AssetFinancials", back_populates="asset", uselist=False)
@@ -57,6 +58,7 @@ class Asset(Base):
events = relationship("AssetEvent", back_populates="asset") events = relationship("AssetEvent", back_populates="asset")
costs = relationship("AssetCost", back_populates="asset") costs = relationship("AssetCost", back_populates="asset")
reviews = relationship("AssetReview", back_populates="asset") reviews = relationship("AssetReview", back_populates="asset")
ownership_history = relationship("VehicleOwnership", back_populates="vehicle")
class AssetFinancials(Base): class AssetFinancials(Base):
__tablename__ = "asset_financials" __tablename__ = "asset_financials"
@@ -87,15 +89,13 @@ class AssetReview(Base):
asset_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.assets.id"), nullable=False) asset_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.assets.id"), nullable=False)
user_id = Column(Integer, ForeignKey("data.users.id"), nullable=False) user_id = Column(Integer, ForeignKey("data.users.id"), nullable=False)
overall_rating = Column(Integer) overall_rating = Column(Integer)
criteria_scores = Column(JSON, server_default=text("'{}'::jsonb")) criteria_scores = Column(JSONB, server_default=text("'{}'::jsonb"))
comment = Column(Text) comment = Column(Text)
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())
asset = relationship("Asset", back_populates="reviews") asset = relationship("Asset", back_populates="reviews")
user = relationship("User") user = relationship("User")
class AssetAssignment(Base): class AssetAssignment(Base):
"""Jármű flotta-történetének nyilvántartása."""
__tablename__ = "asset_assignments" __tablename__ = "asset_assignments"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
@@ -104,7 +104,6 @@ class AssetAssignment(Base):
assigned_at = Column(DateTime(timezone=True), server_default=func.now()) assigned_at = Column(DateTime(timezone=True), server_default=func.now())
released_at = Column(DateTime(timezone=True), nullable=True) released_at = Column(DateTime(timezone=True), nullable=True)
status = Column(String(30), default="active") status = Column(String(30), default="active")
asset = relationship("Asset", back_populates="assignments") asset = relationship("Asset", back_populates="assignments")
organization = relationship("Organization") organization = relationship("Organization")
@@ -115,7 +114,7 @@ class AssetEvent(Base):
asset_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.assets.id"), nullable=False) asset_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.assets.id"), nullable=False)
event_type = Column(String(50), nullable=False) event_type = Column(String(50), nullable=False)
recorded_mileage = Column(Integer) recorded_mileage = Column(Integer)
data = Column(JSON, server_default=text("'{}'::jsonb")) data = Column(JSONB, server_default=text("'{}'::jsonb"))
asset = relationship("Asset", back_populates="events") asset = relationship("Asset", back_populates="events")
class AssetCost(Base): class AssetCost(Base):
@@ -129,10 +128,12 @@ class AssetCost(Base):
amount_local = Column(Numeric(18, 2), nullable=False) amount_local = Column(Numeric(18, 2), nullable=False)
currency_local = Column(String(3), nullable=False) currency_local = Column(String(3), nullable=False)
amount_eur = Column(Numeric(18, 2), nullable=True) amount_eur = Column(Numeric(18, 2), nullable=True)
net_amount_local = Column(Numeric(18, 2))
vat_rate = Column(Numeric(5, 2))
exchange_rate_used = Column(Numeric(18, 6))
date = Column(DateTime(timezone=True), server_default=func.now()) date = Column(DateTime(timezone=True), server_default=func.now())
mileage_at_cost = Column(Integer) mileage_at_cost = Column(Integer)
data = Column(JSON, server_default=text("'{}'::jsonb")) data = Column(JSONB, server_default=text("'{}'::jsonb"))
asset = relationship("Asset", back_populates="costs") asset = relationship("Asset", back_populates="costs")
organization = relationship("Organization") organization = relationship("Organization")
driver = relationship("User") driver = relationship("User")
@@ -143,5 +144,4 @@ class ExchangeRate(Base):
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
base_currency = Column(String(3), default="EUR") base_currency = Column(String(3), default="EUR")
target_currency = Column(String(3), unique=True) target_currency = Column(String(3), unique=True)
rate = Column(Numeric(18, 6), nullable=False) rate = Column(Numeric(18, 6), nullable=False)
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@@ -7,41 +7,82 @@ from sqlalchemy.sql import func
from app.db.base_class import Base from app.db.base_class import Base
class UserRole(str, enum.Enum): class UserRole(str, enum.Enum):
superadmin = "superadmin"; admin = "admin"; user = "user" superadmin = "superadmin"
service = "service"; fleet_manager = "fleet_manager"; driver = "driver" admin = "admin"
user = "user"
service = "service"
fleet_manager = "fleet_manager"
driver = "driver"
class Person(Base): class Person(Base):
__tablename__ = "persons"; __table_args__ = {"schema": "data"} """
Természetes személy identitása.
A bot által talált személyek is ide kerülnek (is_ghost=True).
Azonosítás: Név + Anyja neve + Születési adatok alapján.
"""
__tablename__ = "persons"
__table_args__ = {"schema": "data"}
id = Column(BigInteger, primary_key=True, index=True) id = Column(BigInteger, primary_key=True, index=True)
id_uuid = Column(PG_UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False) id_uuid = Column(PG_UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
address_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.addresses.id"), nullable=True) address_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.addresses.id"), nullable=True)
last_name = Column(String, nullable=False); first_name = Column(String, nullable=False); phone = Column(String, nullable=True)
mothers_last_name = Column(String); mothers_first_name = Column(String); birth_place = Column(String); birth_date = Column(DateTime) last_name = Column(String, nullable=False)
first_name = Column(String, nullable=False)
phone = Column(String, nullable=True)
# --- TERMÉSZETES AZONOSÍTÓK (Azonosításhoz, nem publikus) ---
mothers_last_name = Column(String)
mothers_first_name = Column(String)
birth_place = Column(String)
birth_date = Column(DateTime)
identity_docs = Column(JSON, server_default=text("'{}'::jsonb")) identity_docs = Column(JSON, server_default=text("'{}'::jsonb"))
ice_contact = Column(JSON, server_default=text("'{}'::jsonb")) ice_contact = Column(JSON, server_default=text("'{}'::jsonb"))
is_active = Column(Boolean, default=False, nullable=False) is_active = Column(Boolean, default=False, nullable=False)
is_ghost = Column(Boolean, default=True, nullable=False) # Bot találta = True, Regisztrált = False
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now()) updated_at = Column(DateTime(timezone=True), onupdate=func.now())
users = relationship("User", back_populates="person") users = relationship("User", back_populates="person")
memberships = relationship("OrganizationMember", back_populates="person")
class User(Base): class User(Base):
__tablename__ = "users"; __table_args__ = {"schema": "data"} __tablename__ = "users"
__table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True, nullable=False) email = Column(String, unique=True, index=True, nullable=False)
hashed_password = Column(String, nullable=True) hashed_password = Column(String, nullable=True)
role = Column(Enum(UserRole), default=UserRole.user) role = Column(Enum(UserRole), default=UserRole.user)
is_active = Column(Boolean, default=False); is_deleted = Column(Boolean, default=False) is_active = Column(Boolean, default=False)
is_deleted = Column(Boolean, default=False)
person_id = Column(BigInteger, ForeignKey("data.persons.id"), nullable=True) person_id = Column(BigInteger, ForeignKey("data.persons.id"), nullable=True)
folder_slug = Column(String(12), unique=True, index=True) folder_slug = Column(String(12), unique=True, index=True)
refresh_token_hash = Column(String(255), nullable=True) refresh_token_hash = Column(String(255), nullable=True)
two_factor_secret = Column(String(100), nullable=True) two_factor_secret = Column(String(100), nullable=True)
two_factor_enabled = Column(Boolean, default=False) two_factor_enabled = Column(Boolean, default=False)
preferred_language = Column(String(5), server_default="hu"); region_code = Column(String(5), server_default="HU"); preferred_currency = Column(String(3), server_default="HUF")
scope_level = Column(String(30), server_default="individual"); scope_id = Column(String(50)); custom_permissions = Column(JSON, server_default=text("'{}'::jsonb")) preferred_language = Column(String(5), server_default="hu")
region_code = Column(String(5), server_default="HU")
preferred_currency = Column(String(3), server_default="HUF")
scope_level = Column(String(30), server_default="individual")
scope_id = Column(String(50))
custom_permissions = Column(JSON, server_default=text("'{}'::jsonb"))
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())
person = relationship("Person", back_populates="users"); wallet = relationship("Wallet", back_populates="user", uselist=False)
stats = relationship("UserStats", back_populates="user", uselist=False); ownership_history = relationship("VehicleOwnership", back_populates="user") person = relationship("Person", back_populates="users")
owned_organizations = relationship("Organization", back_populates="owner"); social_accounts = relationship("SocialAccount", back_populates="user", cascade="all, delete-orphan") wallet = relationship("Wallet", back_populates="user", uselist=False)
stats = relationship("UserStats", back_populates="user", uselist=False)
ownership_history = relationship("VehicleOwnership", back_populates="user")
owned_organizations = relationship("Organization", back_populates="owner")
social_accounts = relationship("SocialAccount", back_populates="user", cascade="all, delete-orphan")
class Wallet(Base): class Wallet(Base):
__tablename__ = "wallets"; __table_args__ = {"schema": "data"} __tablename__ = "wallets"; __table_args__ = {"schema": "data"}

View File

@@ -1,5 +1,5 @@
import enum import enum
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, text from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, text, Numeric, BigInteger
from sqlalchemy.dialects.postgresql import ENUM as PG_ENUM from sqlalchemy.dialects.postgresql import ENUM as PG_ENUM
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from sqlalchemy.sql import func from sqlalchemy.sql import func
@@ -14,35 +14,43 @@ class OrgType(str, enum.Enum):
club = "club" club = "club"
business = "business" business = "business"
class OrgUserRole(str, enum.Enum):
OWNER = "OWNER"
ADMIN = "ADMIN"
FLEET_MANAGER = "FLEET_MANAGER"
DRIVER = "DRIVER"
MECHANIC = "MECHANIC"
RECEPTIONIST = "RECEPTIONIST"
class Organization(Base): class Organization(Base):
"""
Szervezet entitás. Lehet flotta (user) és szolgáltató (service) egyszerre.
A képességeket a kapcsolódó profilok (pl. ServiceProfile) határozzák meg.
"""
__tablename__ = "organizations" __tablename__ = "organizations"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
address_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.addresses.id"), nullable=True) address_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.addresses.id"), nullable=True)
full_name = Column(String, nullable=False) full_name = Column(String, nullable=False) # Hivatalos név
name = Column(String, nullable=False) name = Column(String, nullable=False) # Rövid név
display_name = Column(String(50)) display_name = Column(String(50))
# --- BIZTONSÁGI BŐVÍTÉS (Mappa elszigetelés) ---
folder_slug = Column(String(12), unique=True, index=True) folder_slug = Column(String(12), unique=True, index=True)
default_currency = Column(String(3), default="HUF") default_currency = Column(String(3), default="HUF")
country_code = Column(String(2), default="HU") country_code = Column(String(2), default="HU")
language = Column(String(5), default="hu") language = Column(String(5), default="hu")
# Cím adatok (redundáns a gyors kereséshez, de address_id a SSoT)
address_zip = Column(String(10)) address_zip = Column(String(10))
address_city = Column(String(100)) address_city = Column(String(100))
address_street_name = Column(String(150)) address_street_name = Column(String(150))
address_street_type = Column(String(50)) address_street_type = Column(String(50))
address_house_number = Column(String(20)) address_house_number = Column(String(20))
address_hrsz = Column(String(50)) address_hrsz = Column(String(50))
address_stairwell = Column(String(20))
address_floor = Column(String(20))
address_door = Column(String(20))
tax_number = Column(String(20), unique=True, index=True) tax_number = Column(String(20), unique=True, index=True) # Robot horgony
reg_number = Column(String(50)) reg_number = Column(String(50))
org_type = Column( org_type = Column(
@@ -52,15 +60,13 @@ class Organization(Base):
status = Column(String(30), default="pending_verification") status = Column(String(30), default="pending_verification")
is_deleted = Column(Boolean, default=False) is_deleted = Column(Boolean, default=False)
notification_settings = Column(JSON, server_default=text("'{ \"notify_owner\": true, \"alert_days_before\": [30, 15, 7, 1] }'::jsonb")) notification_settings = Column(JSON, server_default=text("'{\"notify_owner\": true, \"alert_days_before\": [30, 15, 7, 1]}'::jsonb"))
external_integration_config = Column(JSON, server_default=text("'{}'::jsonb")) external_integration_config = Column(JSON, server_default=text("'{}'::jsonb"))
owner_id = Column(Integer, ForeignKey("data.users.id"), nullable=True) owner_id = Column(Integer, ForeignKey("data.users.id"), nullable=True)
is_active = Column(Boolean, default=True) is_active = Column(Boolean, default=True)
is_transferable = Column(Boolean, default=True)
is_verified = Column(Boolean, default=False) is_verified = Column(Boolean, default=False)
verification_expires_at = Column(DateTime(timezone=True), nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now()) updated_at = Column(DateTime(timezone=True), onupdate=func.now())
@@ -69,15 +75,40 @@ class Organization(Base):
assets = relationship("AssetAssignment", back_populates="organization", cascade="all, delete-orphan") assets = relationship("AssetAssignment", back_populates="organization", cascade="all, delete-orphan")
members = relationship("OrganizationMember", back_populates="organization", cascade="all, delete-orphan") members = relationship("OrganizationMember", back_populates="organization", cascade="all, delete-orphan")
owner = relationship("User", back_populates="owned_organizations") owner = relationship("User", back_populates="owned_organizations")
financials = relationship("OrganizationFinancials", back_populates="organization", cascade="all, delete-orphan")
service_profile = relationship("ServiceProfile", back_populates="organization", uselist=False)
class OrganizationMember(Base): class OrganizationFinancials(Base):
__tablename__ = "organization_members" """Cégek éves gazdasági adatai elemzéshez."""
__tablename__ = "organization_financials"
__table_args__ = {"schema": "data"} __table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=False) organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=False)
user_id = Column(Integer, ForeignKey("data.users.id"), nullable=False) year = Column(Integer, nullable=False)
role = Column(String, default="driver") turnover = Column(Numeric(18, 2))
profit = Column(Numeric(18, 2))
employee_count = Column(Integer)
source = Column(String(50)) # pl. 'manual', 'crawler', 'api'
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
organization = relationship("Organization", back_populates="financials")
class OrganizationMember(Base):
"""Kapcsolótábla a személyek és szervezetek között."""
__tablename__ = "organization_members"
__table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True, index=True)
organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=False)
user_id = Column(Integer, ForeignKey("data.users.id"), nullable=True)
person_id = Column(BigInteger, ForeignKey("data.persons.id"), nullable=True) # Ghost támogatás
role = Column(PG_ENUM(OrgUserRole, name="orguserrole", inherit_schema=True), default=OrgUserRole.DRIVER)
permissions = Column(JSON, server_default=text("'{}'::jsonb")) permissions = Column(JSON, server_default=text("'{}'::jsonb"))
is_permanent = Column(Boolean, default=False)
is_verified = Column(Boolean, default=False) # <--- JAVÍTÁS: Ez az oszlop hiányzott!
organization = relationship("Organization", back_populates="members") organization = relationship("Organization", back_populates="members")
user = relationship("User") user = relationship("User")
person = relationship("Person", back_populates="memberships")

View File

@@ -1,26 +0,0 @@
import enum
from sqlalchemy import Column, Integer, String, Boolean, Enum, ForeignKey
from sqlalchemy.orm import relationship
from app.db.base import Base
# Átnevezve OrgUserRole-ra, hogy ne ütközzön a globális UserRole-al
class OrgUserRole(str, enum.Enum):
OWNER = "OWNER"
ADMIN = "ADMIN"
FLEET_MANAGER = "FLEET_MANAGER"
DRIVER = "DRIVER"
class OrganizationMember(Base):
__tablename__ = "organization_members"
__table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True, index=True)
org_id = Column(Integer, ForeignKey("data.organizations.id", ondelete="CASCADE"))
user_id = Column(Integer, ForeignKey("data.users.id", ondelete="CASCADE"))
# Itt is frissítjük a hivatkozást
role = Column(Enum(OrgUserRole), default=OrgUserRole.DRIVER)
is_permanent = Column(Boolean, default=False)
organization = relationship("Organization", back_populates="members")
# # # user = relationship("User", back_populates="memberships")

View File

@@ -1,7 +1,7 @@
import uuid import uuid
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, text, Text from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, text, Text, Float
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from sqlalchemy.dialects.postgresql import UUID as PG_UUID from sqlalchemy.dialects.postgresql import UUID as PG_UUID, JSONB
from geoalchemy2 import Geometry # PostGIS támogatás from geoalchemy2 import Geometry # PostGIS támogatás
from sqlalchemy.sql import func from sqlalchemy.sql import func
from app.db.base_class import Base from app.db.base_class import Base
@@ -20,6 +20,19 @@ class ServiceProfile(Base):
# PostGIS GPS pont (SRID 4326 = WGS84 koordináták) # PostGIS GPS pont (SRID 4326 = WGS84 koordináták)
location = Column(Geometry(geometry_type='POINT', srid=4326), index=True) location = Column(Geometry(geometry_type='POINT', srid=4326), index=True)
# Állapotkezelés: ghost, active, flagged, inactive
status = Column(String(20), server_default=text("'ghost'"), index=True)
last_audit_at = Column(DateTime(timezone=True), server_default=func.now())
# --- MAGÁNNYOMOZÓ (Deep Enrichment) ADATOK ---
google_place_id = Column(String(100), unique=True)
rating = Column(Float)
user_ratings_total = Column(Integer)
# Bentley vs BMW logika: JSONB a gyors, márkaszintű szűréshez
# Példa: {"brands": ["Bentley", "Audi"], "specialty": ["engine", "tuning"]}
specialization_tags = Column(JSONB, server_default=text("'{}'::jsonb"))
# Trust Engine (Bot Discovery=30, User Entry=50, Admin/Partner=100) # Trust Engine (Bot Discovery=30, User Entry=50, Admin/Partner=100)
trust_score = Column(Integer, default=30) trust_score = Column(Integer, default=30)
is_verified = Column(Boolean, default=False) is_verified = Column(Boolean, default=False)

View File

@@ -1,60 +1,198 @@
import asyncio import asyncio
import httpx
import logging import logging
import json
import re
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select from sqlalchemy import select, func, or_, text
from app.db.session import SessionLocal from app.db.session import SessionLocal
from app.models.asset import AssetCatalog from app.models.asset import AssetCatalog
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("Robot1-Catalog") logger = logging.getLogger("Robot1-Master-Fleet-DeepDive")
class CatalogScout: class CatalogScout:
""" """
Robot 1: Járműkatalógus feltöltő. Robot 1: Univerzális Járműkatalógus Építő és Audit Robot.
Stratégia: Magyarországi alapok -> Globális EU márkák -> Technikai mélység. Logika: EU-Elsődlegesség (CarQuery) -> US-Kiegészítés (NHTSA).
Kategóriák: Car, Motorcycle, Bus, Truck, Trailer, ATV, Marine, Aerial.
Szekvenciák:
1. Deep Dive (Motorvariánsok gyűjtése)
2. Audit (Hiányos adatok pótlása)
""" """
CQ_URL = "https://www.carqueryapi.com/api/0.3/"
NHTSA_BASE = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/"
@staticmethod HEADERS = {
async def get_initial_hu_data(): "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
"Accept": "application/json"
}
# --- KATEGÓRIA DEFINÍCIÓK (Szigorú flotta-szétválasztás) ---
MOTO_MAKES = ['ducati', 'ktm', 'triumph', 'aprilia', 'benelli', 'vespa', 'simson', 'mz', 'etz', 'jawa', 'husqvarna', 'gasgas', 'sherco']
MARINE_IDS = ['DF', 'DT', 'OUTBOARD', 'MARINE', 'JET SKI', 'SEA-DOO', 'WAVERUNNER', 'YACHT', 'BOAT']
AERIAL_IDS = ['CESSNA', 'PIPER', 'AIRBUS', 'BOEING', 'HELICOPTER', 'AIRCRAFT', 'BEECHCRAFT', 'EMBRAER', 'DRONE']
ATV_IDS = ['LT-', 'LTZ', 'LTR', 'KINGQUAD', 'QUAD', 'POLARIS', 'CAN-AM', 'MULE', 'RZR', 'ARCTIC CAT', 'UTV', 'SIDE-BY-SIDE']
# Versenygépek (Motorkerékpárként, üzemóra alapú szervizhez)
RACING_IDS = ['RM-Z', 'KX', 'CRF', 'YZ', 'SX-F', 'XC-W', 'RM125', 'RM250', 'CR125', 'CR250', 'MC450']
MOTO_KEYWORDS = ['CBR', 'GSX', 'YZF', 'NINJA', 'Z1000', 'DR-Z', 'MT-0', 'V-STROM', 'ADVENTURE', 'SCRAMBLER', 'CBF', 'VFR', 'HAYABUSA']
# Flotta kategóriák szétválasztása
BUS_KEYWORDS = ['BUS', 'COACH', 'INTERCITY', 'SHUTTLE', 'TRANSIT']
TRUCK_KEYWORDS = ['TRUCK', 'SEMI', 'TRACTOR', 'HAULER', 'ACTROS', 'MAN', 'SCANIA', 'IVECO', 'VOLVO FH', 'DAF', 'TGX', 'RENAULT T']
TRAILER_KEYWORDS = ['TRAILER', 'SEMITRAILER', 'PÓTKOCSI', 'UTÁNFUTÓ', 'SCHMITZ', 'KRONE', 'KÖGEL']
@classmethod
def identify_class(cls, make: str, model: str) -> str:
"""Kategória meghatározás flottakezelési szempontok alapján."""
m_full = f"{make} {model}".upper()
if any(x in m_full for x in cls.AERIAL_IDS): return "aerial"
if any(x in m_full for x in cls.MARINE_IDS): return "marine"
if any(x in m_full for x in cls.ATV_IDS): return "atv"
# Motorkerékpárok (Versenygépekkel együtt)
if any(x in m_full for x in cls.RACING_IDS) or make.lower() in cls.MOTO_MAKES:
return "motorcycle"
if any(x in m_full for x in cls.MOTO_KEYWORDS):
return "motorcycle"
# Flotta (Busz vs Teherautó vs Pótkocsi)
if any(x in m_full for x in cls.BUS_KEYWORDS): return "bus"
if any(x in m_full for x in cls.TRUCK_KEYWORDS): return "truck"
if any(x in m_full for x in cls.TRAILER_KEYWORDS): return "trailer"
return "car"
@classmethod
async def fetch_api(cls, url, params=None, is_cq=False):
"""API hívó JSONP tisztítással és sebességkorlátozással."""
async with httpx.AsyncClient(headers=cls.HEADERS) as client:
try:
# 1.5s várakozás a Free API limitjei miatt
await asyncio.sleep(1.5)
resp = await client.get(url, params=params, timeout=35)
if resp.status_code != 200: return None
content = resp.text.strip()
if is_cq:
# Robusztusabb JSONP tisztítás regexszel
match = re.search(r'(\{.*\}|\[.*\])', content, re.DOTALL)
if match:
content = match.group(0)
elif "(" in content and ")" in content:
content = content[content.find("(") + 1 : content.rfind(")")]
return json.loads(content)
except Exception as e:
logger.error(f"❌ API hiba: {e} | URL: {url}")
return None
@classmethod
async def enrich_missing_data(cls):
""" """
Kezdeti adathalmaz (Példa). SEQUENCE 2: Audit Robot.
Élesben itt egy külső API vagy CSV feldolgozás helye van. Keresi a hiányos technikai adatokat és próbálja dúsítani őket.
""" """
return [ logger.info("🔍 Audit szekvencia indítása (hiányos adatok keresése)...")
# Suzuki - A magyar utak királya async with SessionLocal() as db:
{"make": "Suzuki", "model": "Swift", "generation": "III (2005-2010)", "engine_variant": "1.3 (92 LE)", "year_from": 2005, "year_to": 2010, "fuel_type": "petrol"}, # Keressük azokat a rekordokat, ahol hiányzik a köbcenti vagy a teljesítmény
{"make": "Suzuki", "model": "Vitara", "generation": "IV (2015-)", "engine_variant": "1.6 VVT (120 LE)", "year_from": 2015, "year_to": 2024, "fuel_type": "petrol"}, stmt = select(AssetCatalog).where(
# Opel - Astra népautó or_(
{"make": "Opel", "model": "Astra", "generation": "H (2004-2009)", "engine_variant": "1.4 Twinport (90 LE)", "year_from": 2004, "year_to": 2009, "fuel_type": "petrol"}, AssetCatalog.factory_data == text("'{}'::jsonb"),
{"make": "Opel", "model": "Astra", "generation": "J (2009-2015)", "engine_variant": "1.7 CDTI (110 LE)", "year_from": 2009, "year_to": 2015, "fuel_type": "diesel"}, AssetCatalog.engine_variant == 'Standard',
# Skoda - Családi/Flotta kedvenc AssetCatalog.fuel_type == None
{"make": "Skoda", "model": "Octavia", "generation": "II (2004-2013)", "engine_variant": "1.6 MPI (102 LE)", "year_from": 2004, "year_to": 2013, "fuel_type": "petrol"}, )
{"make": "Skoda", "model": "Octavia", "generation": "III (2013-2020)", "engine_variant": "2.0 TDI (150 LE)", "year_from": 2013, "year_to": 2020, "fuel_type": "diesel"}, ).limit(100) # Egyszerre csak 100-at nézünk
# BMW - GS Motorosoknak
{"make": "BMW", "model": "R 1200 GS", "generation": "K50 (2013-2018)", "engine_variant": "Adventure (125 LE)", "year_from": 2013, "year_to": 2018, "fuel_type": "petrol"} results = await db.execute(stmt)
] incomplete_records = results.scalars().all()
for record in incomplete_records:
logger.info(f"🛠 Audit: {record.make} {record.model} ({record.year_from}) dúsítása...")
pass
@classmethod @classmethod
async def run(cls): async def run(cls):
logger.info("🤖 Robot 1 indítása: Járműkatalógus feltöltés...") logger.info("🤖 Robot 1: EU-Elsődlegességű Deep Dive szinkron indítása...")
async with SessionLocal() as db:
data = await cls.get_initial_hu_data() # 2026-tól visszafelé haladunk (Modern flották prioritása)
added_count = 0 for year in range(2026, 1989, -1):
logger.info(f"📅 Feldolgozás alatt: {year} évjárat")
for item in data: makes_data = await cls.fetch_api(cls.CQ_URL, {"cmd": "getMakes", "year": year}, is_cq=True)
# Ellenőrizzük az egyediséget (Make + Model + Generation + Engine) if not makes_data or "Makes" not in makes_data: continue
stmt = select(AssetCatalog).where(
AssetCatalog.make == item["make"], for make_entry in makes_data.get("Makes", []):
AssetCatalog.model == item["model"], m_id = make_entry["make_id"]
AssetCatalog.engine_variant == item["engine_variant"] m_display = make_entry["make_display"]
)
result = await db.execute(stmt) # MODELL GYŰJTÉS: EU + US fúzió
if not result.scalar_one_or_none(): models_to_fetch = set()
db.add(AssetCatalog(**item))
added_count += 1 # 🇪🇺 EU Forrás
cq_models = await cls.fetch_api(cls.CQ_URL, {"cmd": "getModels", "make": m_id, "year": year}, is_cq=True)
await db.commit() if cq_models and cq_models.get("Models"):
logger.info(f"✅ Robot 1 sikeresen rögzített {added_count} új katalógus elemet.") for m in cq_models["Models"]: models_to_fetch.add(m["model_name"])
# 🇺🇸 US Forrás kiegészítés
n_data = await cls.fetch_api(f"{cls.NHTSA_BASE}{m_display}/modelyear/{year}?format=json")
if n_data and n_data.get("Results"):
for r in n_data["Results"]: models_to_fetch.add(r["Model_Name"])
async with SessionLocal() as db:
for model_name in models_to_fetch:
# DEEP DIVE: Motorvariánsok (Trims) lekérése
trims_data = await cls.fetch_api(cls.CQ_URL, {
"cmd": "getTrims", "make": m_id, "model": model_name, "year": year
}, is_cq=True)
found_trims = trims_data.get("Trims", []) if trims_data else []
# Ha nincs trim adat, egy standard sor mindenképpen kell
if not found_trims:
found_trims = [{"model_trim": "Standard", "model_engine_fuel": None}]
for t in found_trims:
variant = t.get("model_trim") or "Standard"
fuel = t.get("model_engine_fuel") or "Unknown"
v_class = cls.identify_class(m_display, model_name)
# Szigorú duplikáció-ellenőrzés (UniqueConstraint alapú lekérdezés)
stmt = select(AssetCatalog).where(
AssetCatalog.make == m_display,
AssetCatalog.model == model_name,
AssetCatalog.year_from == year,
AssetCatalog.engine_variant == variant,
AssetCatalog.fuel_type == fuel
)
result = await db.execute(stmt)
if not result.scalars().first():
db.add(AssetCatalog(
make=m_display,
model=model_name,
year_from=year,
engine_variant=variant,
fuel_type=fuel,
vehicle_class=v_class,
factory_data={
"cc": t.get("model_engine_cc"),
"hp": t.get("model_engine_power_ps"),
"cylinders": t.get("model_engine_cyl"),
"transmission": t.get("model_transmission_type"),
"source": "master_v7_deep_dive",
"sync_date": str(func.now())
}
))
# JAVÍTÁS: Márkánkénti véglegesítés az adatbázisban a session-ön belül
await db.commit()
logger.info(f"{m_display} ({year}) összes variánsa rögzítve.")
# SEQUENCE 2: Miután végeztünk a fő listával, nézzük meg a hiányosakat
await cls.enrich_missing_data()
if __name__ == "__main__": if __name__ == "__main__":
asyncio.run(CatalogScout.run()) asyncio.run(CatalogScout.run())

View File

@@ -0,0 +1,3 @@
nev,cim,telefon,web,tipus
Ideál Autó Dunakeszi,"2120 Dunakeszi, Pallag u. 7",+36201234567,http://idealauto.hu,car_repair
IMCMotor Szerviz,"2120 Dunakeszi, Kikerics köz 4",+36703972543,https://www.imcmotor.hu,motorcycle_repair
1 nev cim telefon web tipus
2 Ideál Autó Dunakeszi 2120 Dunakeszi, Pallag u. 7 +36201234567 http://idealauto.hu car_repair
3 IMCMotor Szerviz 2120 Dunakeszi, Kikerics köz 4 +36703972543 https://www.imcmotor.hu motorcycle_repair

View File

@@ -0,0 +1,42 @@
import asyncio
import logging
from app.db.session import SessionLocal
from app.models.organization import Organization
from app.models.service import ServiceProfile
from sqlalchemy import select, and_
logger = logging.getLogger("Robot2-Auditor")
class ServiceAuditor:
@classmethod
async def audit_services(cls):
"""Időszakos ellenőrzés a megszűnt helyek kiszűrésére."""
async with SessionLocal() as db:
# Csak az aktív szervizeket nézzük
stmt = select(Organization).where(
and_(Organization.org_type == "service", Organization.is_active == True)
)
result = await db.execute(stmt)
services = result.scalars().all()
for service in services:
# 1. Ellenőrzés külső forrásnál (API hívás helye)
# status = await check_external_status(service.full_name)
is_still_open = True # Itt jön az OSM/Google API válasza
if not is_still_open:
service.is_active = False # SOFT-DELETE
logger.info(f"⚠️ Szerviz inaktiválva (megszűnt): {service.full_name}")
# Rate limit védelem
await asyncio.sleep(2)
await db.commit()
@classmethod
async def run_periodic_audit(cls):
while True:
logger.info("🕵️ Negyedéves szerviz-audit indítása...")
await cls.audit_services()
# 90 naponta fusson le teljes körűen
await asyncio.sleep(90 * 86400)

View File

@@ -0,0 +1,282 @@
import asyncio
import httpx
import logging
import uuid
import os
import sys
import csv
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, text
from sqlalchemy.orm import selectinload
from app.db.session import SessionLocal
# Modellek importálása
from app.models.service import ServiceProfile, ExpertiseTag
from app.models.organization import Organization, OrganizationFinancials, OrgType, OrgUserRole, OrganizationMember
from app.models.identity import Person
from app.models.address import Address, GeoPostalCode
from geoalchemy2.elements import WKTElement
from datetime import datetime, timezone
# Naplózás beállítása
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("Robot2-Dunakeszi-Detective")
class ServiceHunter:
"""
Robot 2.7.2: Dunakeszi Detective - Deep Model Integration.
Logika:
1. Helyi CSV (Saját beküldés - Cím alapú Geocoding-al - 50 pont Trust)
2. OSM (Közösségi adat - 10 pont Trust)
3. Google (Adatpótlás/Fallback - 30 pont Trust)
"""
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
PLACES_NEW_URL = "https://places.googleapis.com/v1/places:searchNearby"
GEOCODE_URL = "https://maps.googleapis.com/maps/api/geocode/json"
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
LOCAL_CSV_PATH = "/app/app/workers/local_services.csv"
@classmethod
async def geocode_address(cls, address_text):
"""Cím szövegből GPS koordinátát és címkomponenseket csinál."""
if not cls.GOOGLE_API_KEY:
logger.warning("⚠️ Google API kulcs hiányzik!")
return None
params = {"address": address_text, "key": cls.GOOGLE_API_KEY}
try:
async with httpx.AsyncClient() as client:
resp = await client.get(cls.GEOCODE_URL, params=params, timeout=10)
if resp.status_code == 200:
data = resp.json()
if data.get("results"):
result = data["results"][0]
loc = result["geometry"]["location"]
# Címkomponensek kinyerése a kötelező mezőkhöz
components = result.get("address_components", [])
parsed = {"lat": loc["lat"], "lng": loc["lng"], "zip": "", "city": "", "street": "Ismeretlen", "type": "utca", "number": "1"}
for c in components:
types = c.get("types", [])
if "postal_code" in types: parsed["zip"] = c["long_name"]
if "locality" in types: parsed["city"] = c["long_name"]
if "route" in types: parsed["street"] = c["long_name"]
if "street_number" in types: parsed["number"] = c["long_name"]
logger.info(f"📍 Geocoding sikeres: {address_text}")
return parsed
else:
logger.error(f"❌ Geocoding hiba: {resp.status_code}")
except Exception as e:
logger.error(f"❌ Geocoding hiba: {e}")
return None
@classmethod
async def get_google_place_details_new(cls, lat, lon):
"""Google Places API (New) - Adatpótlás FieldMask használatával."""
if not cls.GOOGLE_API_KEY:
return None
headers = {
"Content-Type": "application/json",
"X-Goog-Api-Key": cls.GOOGLE_API_KEY,
"X-Goog-FieldMask": "places.displayName,places.id,places.types,places.internationalPhoneNumber,places.websiteUri"
}
payload = {
"includedTypes": ["car_repair", "gas_station", "ev_charging_station", "car_wash", "motorcycle_repair"],
"maxResultCount": 1,
"locationRestriction": {
"circle": {
"center": {"latitude": lat, "longitude": lon},
"radius": 40.0
}
}
}
try:
async with httpx.AsyncClient() as client:
resp = await client.post(cls.PLACES_NEW_URL, json=payload, headers=headers, timeout=10)
if resp.status_code == 200:
places = resp.json().get("places", [])
if places:
p = places[0]
return {
"name": p.get("displayName", {}).get("text"),
"google_id": p.get("id"),
"types": p.get("types", []),
"phone": p.get("internationalPhoneNumber"),
"website": p.get("websiteUri")
}
except Exception as e:
logger.error(f"❌ Google kiegészítő hívás hiba: {e}")
return None
@classmethod
async def import_local_csv(cls, db: AsyncSession):
"""Manuális adatok betöltése CSV-ből."""
if not os.path.exists(cls.LOCAL_CSV_PATH):
return
try:
with open(cls.LOCAL_CSV_PATH, mode='r', encoding='utf-8') as f:
reader = csv.DictReader(f)
for row in reader:
geo_data = None
if row.get('cim'):
geo_data = await cls.geocode_address(row['cim'])
if geo_data:
element = {
"tags": {
"name": row['nev'], "phone": row.get('telefon'),
"website": row.get('web'), "amenity": row.get('tipus', 'car_repair'),
"addr:full": row.get('cim'),
"addr:city": geo_data["city"], "addr:zip": geo_data["zip"],
"addr:street": geo_data["street"], "addr:type": geo_data["type"],
"addr:number": geo_data["number"]
},
"lat": geo_data["lat"], "lon": geo_data["lng"]
}
await cls.save_service_deep(db, element, source="local_manual")
logger.info("✅ Helyi CSV adatok feldolgozva.")
except Exception as e:
logger.error(f"❌ CSV feldolgozási hiba: {e}")
@classmethod
async def get_or_create_person(cls, db: AsyncSession, name: str) -> Person:
"""Ghost Person kezelése."""
names = name.split(' ', 1)
last_name = names[0]
first_name = names[1] if len(names) > 1 else "Ismeretlen"
stmt = select(Person).where(Person.last_name == last_name, Person.first_name == first_name)
result = await db.execute(stmt); person = result.scalar_one_or_none()
if not person:
person = Person(last_name=last_name, first_name=first_name, is_ghost=True, is_active=False)
db.add(person); await db.flush()
return person
@classmethod
async def enrich_financials(cls, db: AsyncSession, org_id: int):
"""Pénzügyi rekord inicializálása."""
financial = OrganizationFinancials(
organization_id=org_id, year=datetime.now(timezone.utc).year - 1, source="bot_discovery"
)
db.add(financial)
@classmethod
async def save_service_deep(cls, db: AsyncSession, element: dict, source="osm"):
"""Mély mentés a modelled specifikus mezőneveivel és kötelező értékeivel."""
tags = element.get("tags", {})
lat, lon = element.get("lat"), element.get("lon")
if not lat or not lon: return
osm_name = tags.get("name") or tags.get("brand") or tags.get("operator")
google_data = None
if not osm_name or osm_name.lower() in ['aprilia', 'bosch', 'shell', 'mol', 'omv', 'ismeretlen']:
google_data = await cls.get_google_place_details_new(lat, lon)
final_name = (google_data["name"] if google_data else osm_name) or "Ismeretlen Szolgáltató"
stmt = select(Organization).where(Organization.full_name == final_name)
result = await db.execute(stmt); org = result.scalar_one_or_none()
if not org:
# 1. Address létrehozása (a kötelező mezőket kitöltjük az átadott tags-ből vagy alapértékkel)
new_addr = Address(
latitude=lat,
longitude=lon,
full_address_text=tags.get("addr:full") or f"2120 Dunakeszi, {tags.get('addr:street', 'Ismeretlen')} {tags.get('addr:housenumber', '1')}",
street_name=tags.get("addr:street") or "Ismeretlen",
street_type=tags.get("addr:type") or "utca",
house_number=tags.get("addr:number") or tags.get("addr:housenumber") or "1"
)
db.add(new_addr); await db.flush()
# 2. Organization létrehozása (a modelled alapján ezek a mezők itt vannak)
org = Organization(
full_name=final_name,
name=final_name[:50],
org_type=OrgType.service,
address_id=new_addr.id,
address_city=tags.get("addr:city") or "Dunakeszi",
address_zip=tags.get("addr:zip") or "2120",
address_street_name=new_addr.street_name,
address_street_type=new_addr.street_type,
address_house_number=new_addr.house_number
)
db.add(org); await db.flush()
# 3. Service Profile
trust = 50 if source == "local_manual" else (30 if google_data else 10)
spec = {"brands": [], "types": google_data["types"] if google_data else [], "osm_tags": tags}
if tags.get("brand"): spec["brands"].append(tags.get("brand"))
profile = ServiceProfile(
organization_id=org.id,
location=WKTElement(f'POINT({lon} {lat})', srid=4326),
status="ghost",
trust_score=trust,
google_place_id=google_data["google_id"] if google_data else None,
specialization_tags=spec,
website=google_data["website"] if google_data else tags.get("website"),
contact_phone=google_data["phone"] if google_data else tags.get("phone")
)
db.add(profile)
# 4. Tulajdonos rögzítése
owner_name = tags.get("operator") or tags.get("contact:person")
if owner_name and len(owner_name) > 3:
person = await cls.get_or_create_person(db, owner_name)
db.add(OrganizationMember(
organization_id=org.id,
person_id=person.id,
role=OrgUserRole.OWNER,
is_verified=False
))
await cls.enrich_financials(db, org.id)
await db.flush()
logger.info(f"✨ [{source.upper()}] Mentve: {final_name} (Bizalom: {trust})")
@classmethod
async def run(cls):
logger.info("🤖 Robot 2.7.2: Dunakeszi Detective indítása...")
# Kapcsolódási védelem
connected = False
while not connected:
try:
async with SessionLocal() as db:
await db.execute(text("SELECT 1"))
connected = True
except Exception as e:
logger.warning(f"⏳ Várakozás a hálózatra (shared-postgres host?): {e}")
await asyncio.sleep(5)
while True:
async with SessionLocal() as db:
try:
await db.execute(text("SET search_path TO data, public"))
# 1. Beküldött CSV feldolgozása (Geocoding-al)
await cls.import_local_csv(db)
await db.commit()
# 2. OSM Szkennelés
query = """[out:json][timeout:120];area["name"="Dunakeszi"]->.city;(nwr["shop"~"car_repair|motorcycle_repair|tyres|car_parts|motorcycle"](area.city);nwr["amenity"~"car_repair|vehicle_inspection|motorcycle_repair|fuel|charging_station|car_wash"](area.city);nwr["amenity"~"car_repair|fuel|charging_station"](around:5000, 47.63, 19.13););out center;"""
async with httpx.AsyncClient() as client:
resp = await client.post(cls.OVERPASS_URL, data={"data": query}, timeout=120)
if resp.status_code == 200:
elements = resp.json().get("elements", [])
for el in elements:
await cls.save_service_deep(db, el, source="osm")
await db.commit()
except Exception as e:
logger.error(f"❌ Futáshiba: {e}")
logger.info("😴 Scan kész, 24 óra pihenő...")
await asyncio.sleep(86400)
if __name__ == "__main__":
asyncio.run(ServiceHunter.run())

View File

@@ -0,0 +1,218 @@
"""fix_member_is_verified
Revision ID: 143763d5d6fe
Revises: 492849ee0b3a
Create Date: 2026-02-12 22:55:59.491182
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '143763d5d6fe'
down_revision: Union[str, Sequence[str], None] = '492849ee0b3a'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('organization_members', 'role',
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
existing_nullable=True)
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
existing_nullable=True)
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
existing_nullable=True)
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
op.alter_column('organization_members', 'role',
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
existing_nullable=True)
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
# ### end Alembic commands ###

View File

@@ -0,0 +1,206 @@
"""identity_and_hybrid_org_update
Revision ID: 25afe6f4f063
Revises: 398e76c2fa36
Create Date: 2026-02-12 22:38:04.309546
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '25afe6f4f063'
down_revision: Union[str, Sequence[str], None] = '398e76c2fa36'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# --- MANUÁLIS JAVÍTÁS: Enum típus létrehozása a sémában ---
org_user_role = postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data')
org_user_role.create(op.get_bind(), checkfirst=True)
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('organization_financials',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=False),
sa.Column('year', sa.Integer(), nullable=False),
sa.Column('turnover', sa.Numeric(precision=18, scale=2), nullable=True),
sa.Column('profit', sa.Numeric(precision=18, scale=2), nullable=True),
sa.Column('employee_count', sa.Integer(), nullable=True),
sa.Column('source', sa.String(length=50), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['data.organizations.id'], ),
sa.PrimaryKeyConstraint('id'),
schema='data'
)
op.create_index(op.f('ix_data_organization_financials_id'), 'organization_financials', ['id'], unique=False, schema='data')
op.add_column('addresses', sa.Column('latitude', sa.Float(), nullable=True))
op.add_column('addresses', sa.Column('longitude', sa.Float(), nullable=True))
op.drop_constraint('addresses_postal_code_id_fkey', 'addresses', type_='foreignkey')
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
# Asset Assignments fix
op.drop_constraint('asset_assignments_asset_id_fkey', 'asset_assignments', type_='foreignkey')
op.drop_constraint('asset_assignments_organization_id_fkey', 'asset_assignments', type_='foreignkey')
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
# Asset Costs fix
op.drop_constraint('asset_costs_driver_id_fkey', 'asset_costs', type_='foreignkey')
op.drop_constraint('asset_costs_organization_id_fkey', 'asset_costs', type_='foreignkey')
op.drop_constraint('asset_costs_asset_id_fkey', 'asset_costs', type_='foreignkey')
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
# Egyéb Asset és Audit kapcsolatok
op.drop_constraint('asset_events_asset_id_fkey', 'asset_events', type_='foreignkey')
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('asset_financials_asset_id_fkey', 'asset_financials', type_='foreignkey')
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('asset_reviews_user_id_fkey', 'asset_reviews', type_='foreignkey')
op.drop_constraint('asset_reviews_asset_id_fkey', 'asset_reviews', type_='foreignkey')
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('asset_telemetry_asset_id_fkey', 'asset_telemetry', type_='foreignkey')
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('assets_catalog_id_fkey', 'assets', type_='foreignkey')
op.drop_constraint('assets_current_organization_id_fkey', 'assets', type_='foreignkey')
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('audit_logs_user_id_fkey', 'audit_logs', type_='foreignkey')
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('credit_logs_org_id_fkey', 'credit_logs', type_='foreignkey')
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('documents_uploaded_by_fkey', 'documents', type_='foreignkey')
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('geo_streets_postal_code_id_fkey', 'geo_streets', type_='foreignkey')
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('org_subscriptions_tier_id_fkey', 'org_subscriptions', type_='foreignkey')
op.drop_constraint('org_subscriptions_org_id_fkey', 'org_subscriptions', type_='foreignkey')
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
# Organization Members kiegészítése
op.add_column('organization_members', sa.Column('person_id', sa.BigInteger(), nullable=True))
op.add_column('organization_members', sa.Column('is_permanent', sa.Boolean(), nullable=True))
op.alter_column('organization_members', 'user_id', existing_type=sa.INTEGER(), nullable=True)
# ENUM casting fix (kisbetű nagybetűvé alakítás)
op.alter_column('organization_members', 'role',
existing_type=sa.VARCHAR(),
type_=org_user_role,
existing_nullable=True,
postgresql_using='UPPER(role)::data.orguserrole')
op.drop_constraint('organization_members_organization_id_fkey', 'organization_members', type_='foreignkey')
op.drop_constraint('organization_members_user_id_fkey', 'organization_members', type_='foreignkey')
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
# Organization Schema fix
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
existing_nullable=True)
op.drop_constraint('organizations_owner_id_fkey', 'organizations', type_='foreignkey')
op.drop_constraint('organizations_address_id_fkey', 'organizations', type_='foreignkey')
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
# Felesleges oszlopok törlése
op.drop_column('organizations', 'address_floor')
op.drop_column('organizations', 'verification_expires_at')
op.drop_column('organizations', 'is_transferable')
op.drop_column('organizations', 'address_door')
op.drop_column('organizations', 'address_stairwell')
# Pending actions
op.drop_constraint('pending_actions_requester_id_fkey', 'pending_actions', type_='foreignkey')
op.drop_constraint('pending_actions_approver_id_fkey', 'pending_actions', type_='foreignkey')
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
# Person és egyéb kapcsolatok
op.add_column('persons', sa.Column('is_ghost', sa.Boolean(), nullable=False, server_default='true'))
op.drop_constraint('persons_address_id_fkey', 'persons', type_='foreignkey')
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('points_ledger_user_id_fkey', 'points_ledger', type_='foreignkey')
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('ratings_author_id_fkey', 'ratings', type_='foreignkey')
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('service_expertises_expertise_id_fkey', 'service_expertises', type_='foreignkey')
op.drop_constraint('service_expertises_service_id_fkey', 'service_expertises', type_='foreignkey')
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
# Service Profile Enrichment
op.add_column('service_profiles', sa.Column('google_place_id', sa.String(length=100), nullable=True))
op.add_column('service_profiles', sa.Column('rating', sa.Float(), nullable=True))
op.add_column('service_profiles', sa.Column('user_ratings_total', sa.Integer(), nullable=True))
op.add_column('service_profiles', sa.Column('specialization_tags', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), nullable=True))
op.create_unique_constraint(None, 'service_profiles', ['google_place_id'], schema='data')
op.drop_constraint('service_profiles_organization_id_fkey', 'service_profiles', type_='foreignkey')
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('service_specialties_parent_id_fkey', 'service_specialties', type_='foreignkey')
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
# Social Accounts
op.drop_constraint('social_accounts_user_id_fkey', 'social_accounts', type_='foreignkey')
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
# User / Wallet / Tokens
op.drop_constraint('user_badges_badge_id_fkey', 'user_badges', type_='foreignkey')
op.drop_constraint('user_badges_user_id_fkey', 'user_badges', type_='foreignkey')
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('user_stats_user_id_fkey', 'user_stats', type_='foreignkey')
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('users_person_id_fkey', 'users', type_='foreignkey')
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('vehicle_ownerships_user_id_fkey', 'vehicle_ownerships', type_='foreignkey')
op.drop_constraint('vehicle_ownerships_vehicle_id_fkey', 'vehicle_ownerships', type_='foreignkey')
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint('verification_tokens_user_id_fkey', 'verification_tokens', type_='foreignkey')
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint('wallets_user_id_fkey', 'wallets', type_='foreignkey')
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
def downgrade() -> None:
"""Downgrade schema."""
org_user_role = postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data')
# Alap adatok visszagörgetése
op.drop_table('organization_financials', schema='data')
op.drop_index(op.f('ix_data_organization_financials_id'), table_name='organization_financials', schema='data')
op.drop_column('addresses', 'longitude')
op.drop_column('addresses', 'latitude')
# Role visszaállítása (Stringre)
op.alter_column('organization_members', 'role',
existing_type=org_user_role,
type_=sa.VARCHAR(),
existing_nullable=True)
op.drop_column('organization_members', 'is_permanent')
op.drop_column('organization_members', 'person_id')
op.drop_column('persons', 'is_ghost')
# Service Profile takarítás
op.drop_constraint(None, 'service_profiles', schema='data', type_='unique')
op.drop_column('service_profiles', 'specialization_tags')
op.drop_column('service_profiles', 'user_ratings_total')
op.drop_column('service_profiles', 'rating')
op.drop_column('service_profiles', 'google_place_id')
# Enum törlése legutoljára
org_user_role.drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,296 @@
"""audit_and_moderation_fields
Revision ID: 398e76c2fa36
Revises: 9b20430f0ebb
Create Date: 2026-02-12 19:48:09.530752
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '398e76c2fa36'
down_revision: Union[str, Sequence[str], None] = '9b20430f0ebb'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.add_column('asset_costs', sa.Column('net_amount_local', sa.Numeric(precision=18, scale=2), nullable=True))
op.add_column('asset_costs', sa.Column('vat_rate', sa.Numeric(precision=5, scale=2), nullable=True))
op.add_column('asset_costs', sa.Column('exchange_rate_used', sa.Numeric(precision=18, scale=6), nullable=True))
op.alter_column('asset_costs', 'data',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('asset_events', 'data',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('asset_reviews', 'criteria_scores',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.add_column('assets', sa.Column('verification_notes', sa.Text(), nullable=True))
op.add_column('assets', sa.Column('catalog_match_score', sa.Numeric(precision=5, scale=2), nullable=True))
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
op.drop_column('exchange_rates', 'updated_at')
op.alter_column('expertise_tags', 'key',
existing_type=sa.VARCHAR(length=50),
nullable=True)
op.drop_constraint(op.f('expertise_tags_key_key'), 'expertise_tags', type_='unique')
op.create_index(op.f('ix_data_expertise_tags_key'), 'expertise_tags', ['key'], unique=True, schema='data')
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
existing_nullable=True)
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('service_profiles', 'verification_log',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.alter_column('service_profiles', 'opening_hours',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.create_index(op.f('ix_data_service_profiles_id'), 'service_profiles', ['id'], unique=False, schema='data')
op.create_index(op.f('ix_data_service_profiles_location'), 'service_profiles', ['location'], unique=False, schema='data')
op.create_index(op.f('ix_data_service_profiles_status'), 'service_profiles', ['status'], unique=False, schema='data')
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('vehicle_catalog', 'factory_data',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.create_index(op.f('ix_data_vehicle_catalog_engine_variant'), 'vehicle_catalog', ['engine_variant'], unique=False, schema='data')
op.create_index(op.f('ix_data_vehicle_catalog_fuel_type'), 'vehicle_catalog', ['fuel_type'], unique=False, schema='data')
op.create_unique_constraint('uix_vehicle_catalog_full', 'vehicle_catalog', ['make', 'model', 'year_from', 'engine_variant', 'fuel_type'], schema='data')
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
op.drop_constraint('uix_vehicle_catalog_full', 'vehicle_catalog', schema='data', type_='unique')
op.drop_index(op.f('ix_data_vehicle_catalog_fuel_type'), table_name='vehicle_catalog', schema='data')
op.drop_index(op.f('ix_data_vehicle_catalog_engine_variant'), table_name='vehicle_catalog', schema='data')
op.alter_column('vehicle_catalog', 'factory_data',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
op.drop_index(op.f('ix_data_service_profiles_status'), table_name='service_profiles', schema='data')
op.drop_index(op.f('ix_data_service_profiles_location'), table_name='service_profiles', schema='data')
op.drop_index(op.f('ix_data_service_profiles_id'), table_name='service_profiles', schema='data')
op.alter_column('service_profiles', 'opening_hours',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.alter_column('service_profiles', 'verification_log',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
existing_nullable=True)
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
op.drop_index(op.f('ix_data_expertise_tags_key'), table_name='expertise_tags', schema='data')
op.create_unique_constraint(op.f('expertise_tags_key_key'), 'expertise_tags', ['key'], postgresql_nulls_not_distinct=False)
op.alter_column('expertise_tags', 'key',
existing_type=sa.VARCHAR(length=50),
nullable=False)
op.add_column('exchange_rates', sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True))
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
op.drop_column('assets', 'catalog_match_score')
op.drop_column('assets', 'verification_notes')
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
op.alter_column('asset_reviews', 'criteria_scores',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
op.alter_column('asset_events', 'data',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
op.alter_column('asset_costs', 'data',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
op.drop_column('asset_costs', 'exchange_rate_used')
op.drop_column('asset_costs', 'vat_rate')
op.drop_column('asset_costs', 'net_amount_local')
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
# ### end Alembic commands ###

View File

@@ -0,0 +1,220 @@
"""add_is_verified_to_members
Revision ID: 492849ee0b3a
Revises: 25afe6f4f063
Create Date: 2026-02-12 22:54:06.389304
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '492849ee0b3a'
down_revision: Union[str, Sequence[str], None] = '25afe6f4f063'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.add_column('organization_members', sa.Column('is_verified', sa.Boolean(), nullable=True))
op.alter_column('organization_members', 'role',
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
existing_nullable=True)
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
existing_nullable=True)
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
op.alter_column('organizations', 'org_type',
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
existing_nullable=True)
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
op.alter_column('organization_members', 'role',
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
existing_nullable=True)
op.drop_column('organization_members', 'is_verified')
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
# ### end Alembic commands ###

View File

@@ -20,6 +20,7 @@ sendgrid==6.*
Pillow Pillow
Authlib Authlib
itsdangerous itsdangerous
fastapi-limiter fastapi-limiter==0.1.5
pyotp pyotp
cryptography cryptography
GeoAlchemy2>=0.14.0

View File

@@ -1,5 +1,3 @@
version: '3.8'
services: services:
# 1. MIGRÁCIÓ (Adatbázis szerkezet frissítése) # 1. MIGRÁCIÓ (Adatbázis szerkezet frissítése)
migrate: migrate:
@@ -105,13 +103,38 @@ services:
volumes: volumes:
- ./backend:/app - ./backend:/app
env_file: env_file:
- .env # Itt elég a gyökérben lévő .env, ha ott vannak a DB adatok - .env
depends_on: depends_on:
migrate: migrate:
condition: service_completed_successfully # Csak ha a migráció kész! condition: service_completed_successfully
networks: networks:
- default - default
- shared_db_net # Ez kell, hogy lássa a külső adatbázist - shared_db_net
restart: always
# Szerviz vadász robot (Robot 2.7)
service_hunter:
build: ./backend
container_name: service_finder_robot_hunter
command: python -m app.workers.service_hunter
volumes:
- ./backend:/app
- ./backend/app/workers/local_services.csv:/app/app/workers/local_services.csv
environment:
- GOOGLE_API_KEY=${GOOGLE_API_KEY}
# JAVÍTVA: shared-postgres lett a gépnév a 'db' helyett!
- DATABASE_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@shared-postgres:5432/${POSTGRES_DB}
env_file:
- .env
dns:
- 8.8.8.8
- 1.1.1.1
depends_on:
migrate:
condition: service_completed_successfully
networks:
- default
- shared_db_net
restart: always restart: always
networks: networks:

View File

@@ -126,4 +126,10 @@ A technikai belépési pont.
### 5.2.2. TWINS Concept Update ### 5.2.2. TWINS Concept Update
- A `User` (User) és `Person` (Shadow Identity) szétválasztása szigorú. - A `User` (User) és `Person` (Shadow Identity) szétválasztása szigorú.
- Belépéskor a rendszer a `User` táblából olvassa ki a `preferred_language` és `region_code` beállításokat, és ezeket a Token válaszban visszaadja a Frontendnek. - Belépéskor a rendszer a `User` táblából olvassa ki a `preferred_language` és `region_code` beállításokat, és ezeket a Token válaszban visszaadja a Frontendnek.
## 1.3 Shadow Identity & Merging Logic
A rendszer támogatja a "Ghost Person" (Árnyék személy) entitásokat.
- **Ghost Person:** Olyan `data.persons` rekord, amelyet a Robot 2 hozott létre nyilvános adatok (pl. cégjegyzék) alapján.
- **Identity Linkage:** Regisztrációkor a `AuthService.complete_kyc` kötelezően ellenőrzi a meglévő Ghost rekordokat (Adószám/Név egyezés).
- **Merge Action:** Találat esetén a rendszer összefűzi a technikai User fiókot a Ghost Person rekorddal, aktiválja a jogosultságokat, és megszünteti a Ghost státuszt.

View File

@@ -196,4 +196,9 @@ A rendszer az adatintegritás és a sebesség érdekében hibrid modellt haszná
| :--- | :--- | | :--- | :--- |
| `data.addresses` | Konkrét házszám szintű címek (Hibrid hivatkozási pont). | | `data.addresses` | Konkrét házszám szintű címek (Hibrid hivatkozási pont). |
| `data.geo_postal_codes` | Irányítószám és város kapcsolata (HU/EU támogatás). | | `data.geo_postal_codes` | Irányítószám és város kapcsolata (HU/EU támogatás). |
| `data.user_stats` | Felhasználói XP, szintek és strike-ok tárolása. | | `data.user_stats` | Felhasználói XP, szintek és strike-ok tárolása. |
## 2.4 Financial & Enrichment Tables
- **data.organization_financials:** Éves gazdasági adatok (árbevétel, profit, létszám) tárolása historikus elemzéshez.
- **data.service_profiles.specialization_tags:** JSONB mező a szigorú szakmai szűréshez (pl. márkák, specifikus javítási típusok).
- **data.service_profiles.google_place_id:** Külső validációs kulcs a Google Places API-hoz.

View File

@@ -303,4 +303,66 @@ A rendszer most már képes egyetlen KYC folyamat alatt aktiválni a felhasznál
### 🛠 Technical Changes ### 🛠 Technical Changes
- **Migrations:** Új Alembic migráció (`add_lang_and_region_to_user`) generálva és lefuttatva. - **Migrations:** Új Alembic migráció (`add_lang_and_region_to_user`) generálva és lefuttatva.
- **Environment:** A `static/locales` mappa jogosultságai beállítva a Docker konténer számára. - **Environment:** A `static/locales` mappa jogosultságai beállítva a Docker konténer számára.
[2026.02.12] - Fundamentum és Robot Orchestration
FIX: Javítva a docker-compose v1/v2 összeférhetetlenség (ContainerConfig hiba).
FIX: Megszüntetve az ImportError: cannot import name 'FastAPILimiter' hiba a security.py modulban.
DATABASE: PostGIS Geometry típus implementálva a service_profiles táblában.
MODEL: Az Asset (Digital Twin) és ServiceProfile közötti kapcsolatok szinkronizálva az ownership_history modulon keresztül.
WORKERS: Új állapotvezérelt (State-driven) robotlogika bevezetése:
A szervizek alapértelmezetten ghost státusszal jönnek létre.
Bevezetve a last_audit_at mező az automatikus kivezetéshez (Soft-delete).
UX: A keresőmotor számára definiálva a "Nem megerősített szolgáltató" jelzés a bot által talált adatokhoz.
📝 Részletes Összefoglaló az Elvégzett Munkáról
Környezet Stabilizálás: A modern Docker Engine-hez igazítottuk a parancsokat, megoldva a régi Python-alapú compose hibáit.
Adatmodell Integritás: Visszaállítottuk az összes kritikus mezőt (nettó érték, ÁFA, maradványérték, telemetria), így a rendszer alkalmas komplex flottakezelési feladatokra is.
Szerviz Életciklus: Kidolgoztunk egy olyan logikát, ahol a botok nem "szemetelik" az adatbázist, hanem egy ghost (árnyék) réteget hoznak létre. Ezek a szervizek csak akkor válnak teljesen hitelessé, ha a felhasználók interakcióba lépnek velük (Gamification) vagy az Admin jóváhagyja őket.
Robot Koordináció: A robotok immár nem ütköznek. Az egyik a járműkatalógust építi API-kból, a másik a térképi pontokat gyűjti és auditálja.
# Changelog - 2026-02-13
## Service Finder Project - "Dunakeszi Detective" & Docker Infrastructure
### 🚀 Fejlesztések és Architektúra
- **Robot 2.7 (Service Hunter) Implementálása:**
- Hibrid adatgyűjtés bevezetése: OSM (OpenStreetMap) + Google Places API + Helyi CSV.
- **Geocoding Integráció:** A CSV-ben megadott szöveges címek (pl. "Dunakeszi, Kikerics köz 4") automatikus GPS koordinátára fordítása a Google API segítségével.
- **Trust Score alapok:** Különböző források eltérő bizalmi szinttel kerülnek rögzítésre (Manuális > Google > OSM).
- **Adatbázis és Modellek (ORM) Javítása:**
- `Organization` és `Address` modellek szinkronizálása a valós adatbázis sémával.
- Hiányzó mezők kezelése (City, Zip átmozgatása Organization szintre).
- PostGIS geometria (POINT) kezelésének pontosítása.
- **Docker Infrastruktúra Stabilizálás:**
- Hálózati hiba (`[Errno -2] Name or service not known`) elhárítása.
- `shared_db_net` és `bridge` hálózatok megfelelő konfigurálása.
- Konténer DNS beállítások fixálása (Google DNS fallback).
- Adatbázis hostnév korrekció (`db` -> `shared-postgres`).
### 🧠 Üzleti Logika és Stratégia (Döntések)
1. **Multi-Tenant Kezelés:** Egy címen több cég is létezhet. A rendszer nem vonja össze őket automatikusan, csak ha az adószám/név egyezik.
2. **Adatvédelmi Elv (No-Delete):** A robot soha nem töröl adatot fizikailag. Ha egy forrás megszűnik, a rekord "archived" vagy "review_needed" státuszt kap, de az adatbázisban marad.
3. **Emberi Felügyelet:** A duplikációk összefűzése vagy a hibás adatok törlése Admin/Moderátor jogkör, nem a robot automatizmusa.
4. **Dinamikus Adatfrissítés:** A robot a jövőben frissítheti a manuálisan felvitt adatokat is (pl. ha változik a nyitvatartás a Google-ön), de a prioritási szabályokat még finomítani kell.
### 🐛 Javított Hibák
- `socket.gaierror`: Docker konténer internet elérés és belső névfeloldás javítva.
- `AttributeError: 'city'`: SQLAlchemy modell mezőleképezési hiba javítva.
- Függőségi hiba (`depends_at` -> `depends_on`) a docker-compose fájlban.
### 🔜 Következő Lépések
- Gamification és Moderátori felület (Admin UI) tervezése az adatok tisztítására.
- Logikai szabályrendszer (Business Rules) véglegesítése a "Robot vs. Ember" adatkonfliktusokra.

View File

@@ -218,4 +218,9 @@ Kapcsolatot teremt egy Jármű (`Asset`) és egy Szervezet (`Organization`) köz
- **status**: Active / Released - **status**: Active / Released
- **Validáció:** Egy jármű egyszerre csak egy szervezetnél lehet `active` státuszban. - **Validáció:** Egy jármű egyszerre csak egy szervezetnél lehet `active` státuszban.
*(Megjegyzés: A v1.2.5 frissítés javította az ORM kapcsolatokat, így a lekérdezések most már közvetlenül elérik az `assignment.organization` objektumot.)* *(Megjegyzés: A v1.2.5 frissítés javította az ORM kapcsolatokat, így a lekérdezések most már közvetlenül elérik az `assignment.organization` objektumot.)*
## 4.0 Catalog 2022+ Strategy (Hybrid Mode)
A CarQueryAPI korlátai miatt 2022 utáni modelleknél a Robot 1 az alábbi hibrid logikát alkalmazza:
1. **API Ninjas & Auto-Data Sync:** Elsődleges technikai forrás.
2. **European Scraper Mode:** A mobile.de és autoscout24.hu portálok típusválasztóinak (meta-adatok) aratása a legfrissebb modellek és motorváltozatok rögzítéséhez.

View File

@@ -39,4 +39,15 @@ Keresési algoritmus:
Free User: 1. Hirdetők, 2. Légvonalbeli távolság, 3. Trust Score. Free User: 1. Hirdetők, 2. Légvonalbeli távolság, 3. Trust Score.
Útvonaltervezés (Premium): Külső motor (pl. OSRM vagy GraphHopper) integráció a pontos elérési időhöz. Útvonaltervezés (Premium): Külső motor (pl. OSRM vagy GraphHopper) integráció a pontos elérési időhöz.
## 3.0 Specialization & Filtering (Bentley Logic)
A keresőmotor prioritási rendszere:
1. **Explicit Specialist:** Specializációs tag-ek alapján (pl. brand: Bentley).
2. **General Service:** Univerzális javítók, ahol nincs kizáró ok.
3. **Exclusion Logic:** Ha a keresett márka Bentley, de a szerviz specializációja csak "BMW", a találat tiltva van.
## 4.0 Trust Score Multipliers
- **Economic Stability:** 3+ év nyereséges működés (+20 pont).
- **Physical Validation:** Google Street View / Robot Photo Verification (+15 pont).
- **Verified Staff:** Ha a szerelőregisztrációk száma > 2 (+10 pont).

View File

@@ -38,4 +38,65 @@ A Robot 1 (Catalog Filler) egy rétegelt feltöltési stratégiát követ:
Layer 2 (Technical Depth): Folyadékmennyiségek, kerékméretek, meghúzási nyomatékok. Layer 2 (Technical Depth): Folyadékmennyiségek, kerékméretek, meghúzási nyomatékok.
Layer 3 (Service Relation): Melyik alkatrész/szerviz igény kapcsolódik az adott típushoz. Layer 3 (Service Relation): Melyik alkatrész/szerviz igény kapcsolódik az adott típushoz.
API Strategy
24. Robot Scout Adatforrások:
Járművek: A robot a CarQuery API és a NHTSA vPIC API kombinációját használja a 2000 utáni EU-s modellek feltöltéséhez. A ciklusidő: 1 év/5 perc.
Szervizek: Az OSM Overpass API az elsődleges forrás a lokációkhoz. A validációt a Robot 2 végzi a Google Places adatokkal való összevetéssel (Trust Engine).
Motorok: Külön prioritást élveznek a prémium márkák (BMW, KTM, Honda) szakszervizei a "Specialization Tag" rendszerben.
📘 MASTER BOOK KIEGÉSZÍTÉS (v2.4) - 2026.02.13
20.4 Szerviz Életciklus és Automatikus Kivezetés (Soft-Delete)
A Marketplace tisztaságát az automatikus inaktiválási folyamat garantálja:
Státuszok:
ghost: Bot által talált, nem hitelesített rekord.
active: Működő, publikus szerviz.
flagged: Gyanús (pl. bezártnak jelentett), felülvizsgálatra vár.
inactive: Megszűnt vagy inaktivált szerviz (Soft-deleted).
Audit ciklus: A Robot 2 (Auditor) 90 naponta minden active szervizt keresztellenőriz külső forrásokkal (OSM/Google). Ha egy hely "Permanently Closed", a robot átállítja: is_active = False és status = 'inactive'.
22.4 Robot Orchestration (Koordináció)
A robotok az adatbázist használják "jelzőtáblának", így elkerülik az ütközéseket:
Robot 1 (Catalog Scout): Kizárólag a data.vehicle_catalog táblát írja.
Robot 2 (Hunter/Auditor): * A Hunter csak olyan helyeket rögzít, amik még nincsenek az organizations táblában.
Az Auditor csak az is_active=True rekordokat vizsgálja felül.
Robot 3 (OCR/Detective): Dokumentum-alapú validálást végez. Ha az OCR egy inactive szervizt talál egy friss számlán, nem írja felül a robotot, hanem flagged státuszba teszi a szervizt manuális ellenőrzésre ("Lehet, hogy mégis kinyitott?").
20.4 Szerviz Állapotok és Láthatóság
ghost (Alapértelmezett): Bot által talált rekord.
Keresés: Megjelenik, de kötelező "Nem megerősített szolgáltató" jelzéssel ellátni.
Gamification: Teljesen nyitott. A felhasználók értékelhetik, fotózhatják. Minden ilyen interakció növeli a trust_score-t.
active: Megerősített szolgáltató (Admin vagy magas Trust Score alapján).
flagged: Felülvizsgálat alatt (pl. ellentmondásos adatok).
inactive: Igazoltan megszűnt. Csak ez az állapot rejtett a keresés elől.
## 2.0 Robot 2 (The Detective)
A Robot 2 három fázisban dolgozik:
- **Phase 1 (Discovery):** OSM/Overpass alapú koordináta és név rögzítés.
- **Phase 2 (Deep Enrichment):** Google Places, Web Scraping (Email, telefon, tulajdonos neve).
- **Phase 3 (Financial Audit):** Nyilvános cégadatok (Árbevétel, létszám, adózott eredmény) éves szinkronizálása.