STABLE: Final schema sync, optimized gitignore
This commit is contained in:
90
archive/2026.02.18 Archive_old_mapps/brand_seeder.py.old
Normal file
90
archive/2026.02.18 Archive_old_mapps/brand_seeder.py.old
Normal file
@@ -0,0 +1,90 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/workers/brand_seeder.py
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
from sqlalchemy import text
|
||||
from app.db.session import AsyncSessionLocal
|
||||
|
||||
# Logolás beállítása a Sentinel monitorozáshoz
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(name)s: %(message)s')
|
||||
logger = logging.getLogger("Smart-Seeder-v1.0.2")
|
||||
|
||||
async def seed_with_priority():
|
||||
"""
|
||||
Feltölti a catalog_discovery táblát az RDW alapján.
|
||||
Logika: Csak azokat a márkákat keressük, amikből legalább 10 db fut az utakon,
|
||||
hogy ne szemeteljük tele a katalógust egyedi barkács-járművekkel.
|
||||
"""
|
||||
|
||||
# RDW SoQL lekérdezés: Márka (merk), Típus (voertuigsoort) és Darabszám (total)
|
||||
# A szerveroldali csoportosítás és szűrés (having total >= 10) miatt villámgyors.
|
||||
RDW_URL = (
|
||||
"https://opendata.rdw.nl/resource/m9d7-ebf2.json?"
|
||||
"$select=merk,voertuigsoort,count(*)%20as%20total"
|
||||
"&$group=merk,voertuigsoort"
|
||||
"&$having=total%20>=%2010"
|
||||
)
|
||||
|
||||
logger.info("📥 Adatok lekérése az RDW-től prioritásos besoroláshoz...")
|
||||
|
||||
async with httpx.AsyncClient(timeout=120) as client:
|
||||
try:
|
||||
resp = await client.get(RDW_URL)
|
||||
if resp.status_code != 200:
|
||||
logger.error(f"❌ RDW API hiba: {resp.status_code}")
|
||||
return
|
||||
|
||||
raw_data = resp.json()
|
||||
logger.info(f"📊 {len(raw_data)} potenciális márka-kategória páros érkezett.")
|
||||
|
||||
async with AsyncSessionLocal() as db:
|
||||
for entry in raw_data:
|
||||
make = str(entry.get("merk", "")).upper().strip()
|
||||
v_kind = entry.get("voertuigsoort", "")
|
||||
|
||||
if not make:
|
||||
continue
|
||||
|
||||
# --- PRIORITÁS LOGIKA (Master Book 2.0 szerint) ---
|
||||
# 1. Személyautó (Personenauto) -> 'pending' (Azonnal feldolgozandó)
|
||||
# 2. Motor (Motorfiets) -> 'queued_motor'
|
||||
# 3. Minden más (Teher, Busz, Mezőgazdasági) -> 'queued_heavy'
|
||||
|
||||
if "Personenauto" in v_kind:
|
||||
status = 'pending'
|
||||
v_class = 'car'
|
||||
elif "Motorfiets" in v_kind:
|
||||
status = 'queued_motor'
|
||||
v_class = 'motorcycle'
|
||||
else:
|
||||
status = 'queued_heavy'
|
||||
v_class = 'truck'
|
||||
|
||||
# UPSERT Logika: Ha már létezik, de még 'pending', akkor frissítjük a státuszt,
|
||||
# de nem írjuk felül a már feldolgozott (processed) rekordokat.
|
||||
query = text("""
|
||||
INSERT INTO data.catalog_discovery (make, model, vehicle_class, source, status)
|
||||
VALUES (:make, 'ALL_VARIANTS', :v_class, 'smart_seeder_v1_0_2', :status)
|
||||
ON CONFLICT (make, model, vehicle_class)
|
||||
DO UPDATE SET
|
||||
status = CASE
|
||||
WHEN data.catalog_discovery.status = 'pending' THEN EXCLUDED.status
|
||||
ELSE data.catalog_discovery.status
|
||||
END
|
||||
WHERE data.catalog_discovery.make = EXCLUDED.make;
|
||||
""")
|
||||
|
||||
await db.execute(query, {
|
||||
"make": make,
|
||||
"v_class": v_class,
|
||||
"status": status
|
||||
})
|
||||
|
||||
await db.commit()
|
||||
logger.info("✅ Discovery lista sikeresen feltöltve és prioritizálva.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Kritikus hiba a seeder futása közben: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(seed_with_priority())
|
||||
35
archive/2026.02.18 Archive_old_mapps/catalog_filler.py.old
Normal file
35
archive/2026.02.18 Archive_old_mapps/catalog_filler.py.old
Normal file
@@ -0,0 +1,35 @@
|
||||
# app/workers/catalog_filler.py
|
||||
import asyncio
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
from sqlalchemy import select
|
||||
|
||||
class CatalogFiller:
|
||||
@staticmethod
|
||||
async def seed_initial_data():
|
||||
"""Alapértelmezett márkák és típusok feltöltése (Példa)."""
|
||||
initial_data = [
|
||||
{"make": "Audi", "model": "A4", "generation": "B8 (2008-2015)", "engine_variant": "2.0 TDI (150 LE)", "fuel_type": "Diesel"},
|
||||
{"make": "BMW", "model": "3 Series", "generation": "F30 (2012-2019)", "engine_variant": "320d (190 LE)", "fuel_type": "Diesel"},
|
||||
{"make": "Volkswagen", "model": "Passat", "generation": "B8 (2014-)", "engine_variant": "2.0 TDI (150 LE)", "fuel_type": "Diesel"}
|
||||
]
|
||||
|
||||
async with SessionLocal() as db:
|
||||
for item in initial_data:
|
||||
# Ellenőrizzük, létezik-e már
|
||||
stmt = select(AssetCatalog).where(
|
||||
AssetCatalog.make == item["make"],
|
||||
AssetCatalog.model == item["model"],
|
||||
AssetCatalog.engine_variant == item["engine_variant"]
|
||||
)
|
||||
exists = (await db.execute(stmt)).scalar_one_or_none()
|
||||
|
||||
if not exists:
|
||||
db.add(AssetCatalog(**item))
|
||||
|
||||
await db.commit()
|
||||
print("Catalog seeding complete.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogFiller.seed_initial_data())
|
||||
270
archive/2026.02.18 Archive_old_mapps/catalog_robot1.4.1.py.old
Normal file
270
archive/2026.02.18 Archive_old_mapps/catalog_robot1.4.1.py.old
Normal file
@@ -0,0 +1,270 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
import sys
|
||||
from sqlalchemy import text
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
|
||||
# --- KÉNYSZERÍTETT IDŐBÉLYEGES LOGOLÁS ---
|
||||
# Töröljük az esetleges korábbi konfigurációkat, hogy az időbélyeg garantált legyen
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
stream=sys.stdout
|
||||
)
|
||||
logger = logging.getLogger("Robot-v1.4.1-Powerhouse")
|
||||
|
||||
class CatalogMaster:
|
||||
"""
|
||||
Master Hunter Robot v1.4.1 - Powerhouse Edition
|
||||
- Párhuzamos Holland (RDW) és Amerikai (NHTSA Batch) Discovery.
|
||||
- Garantált időbélyeges naplózás.
|
||||
- Multi-Worker Safe (FOR UPDATE SKIP LOCKED).
|
||||
- Rate Limit (429) védelem.
|
||||
"""
|
||||
|
||||
# API Végpontok
|
||||
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
|
||||
RDW_AXLE = "https://opendata.rdw.nl/resource/3huj-srit.json"
|
||||
RDW_BODY = "https://opendata.rdw.nl/resource/vezc-m2t6.json"
|
||||
US_BATCH = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/{make}/modelyear/{year}?format=json"
|
||||
|
||||
# BRIT API (Token után aktiválható)
|
||||
UK_DVLA = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
UK_API_KEY = os.getenv("UK_DVLA_API_KEY")
|
||||
|
||||
HEADERS_RDW = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
HEADERS_UK = {"x-api-key": UK_API_KEY, "Content-Type": "application/json"} if UK_API_KEY else {}
|
||||
|
||||
CATEGORY_MAP = {
|
||||
"Personenauto": "car",
|
||||
"Motorfiets": "motorcycle",
|
||||
"Bedrijfsauto": "truck",
|
||||
"Vrachtwagen": "truck",
|
||||
"Opleggertrekker": "truck",
|
||||
"Bus": "bus",
|
||||
"Aanhangwagen": "trailer",
|
||||
"Oplegger": "trailer",
|
||||
"Landbouw- of bosbouwtrekker": "agricultural",
|
||||
"camper": "camper"
|
||||
}
|
||||
|
||||
# Szabályozzuk a párhuzamos dúsítást (egyszerre max 5 kérés robotpéldányonként)
|
||||
semaphore = asyncio.Semaphore(5)
|
||||
|
||||
@classmethod
|
||||
def clean_kw(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
f_val = float(str(val).replace(',', '.'))
|
||||
if 0 < f_val < 1.0: return None
|
||||
v = int(f_val)
|
||||
return v if v > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def clean_int(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
return int(float(str(val).replace(',', '.')))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def fetch_api(cls, url, params=None, headers=None, method="GET", json_data=None):
|
||||
"""Intelligens API hívó 429-es védelemmel és időzített logolással."""
|
||||
async with httpx.AsyncClient(headers=headers, follow_redirects=True) as client:
|
||||
for attempt in range(3):
|
||||
try:
|
||||
if method == "POST":
|
||||
resp = await client.post(url, json=json_data, timeout=30)
|
||||
else:
|
||||
resp = await client.get(url, params=params, timeout=30)
|
||||
|
||||
if resp.status_code == 429:
|
||||
wait_time = (attempt + 1) * 5
|
||||
logger.warning(f"⚠️ RATE LIMIT! Várakozás {wait_time}mp: {url}")
|
||||
await asyncio.sleep(wait_time)
|
||||
continue
|
||||
|
||||
return resp.json() if resp.status_code in [200, 201] else []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ API Hiba ({url}): {e}")
|
||||
await asyncio.sleep(2)
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
async def get_deep_tech(cls, plate, main_kw=None, vin=None):
|
||||
"""Mély dúsítás több forrásból párhuzamosan."""
|
||||
async with cls.semaphore:
|
||||
res = {"kw": cls.clean_kw(main_kw), "fuel": "Unknown", "axles": None, "body": "Standard", "euro": None}
|
||||
|
||||
# --- 1. HOLLAND (RDW) DÚSÍTÁS ---
|
||||
fuel_task = cls.fetch_api(cls.RDW_FUEL, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
axle_task = cls.fetch_api(cls.RDW_AXLE, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
|
||||
fuel_data, axle_data = await asyncio.gather(fuel_task, axle_task)
|
||||
|
||||
if fuel_data:
|
||||
f0 = fuel_data[0]
|
||||
if not res["kw"]:
|
||||
res["kw"] = cls.clean_kw(f0.get("nettomaximumvermogen") or f0.get("netto_maximum_vermogen"))
|
||||
res["fuel"] = f0.get("brandstof_omschrijving", "Unknown")
|
||||
res["euro"] = f0.get("uitlaatemissieniveau")
|
||||
|
||||
if axle_data:
|
||||
res["axles"] = cls.clean_int(axle_data[0].get("aantal_assen"))
|
||||
|
||||
# --- 2. BRIT (DVLA) ELLENŐRZÉS (AKTIVÁLHATÓ KULCCSAL) ---
|
||||
"""
|
||||
if cls.UK_API_KEY and (not res["kw"] or not res["euro"]):
|
||||
uk_data = await cls.fetch_api(cls.UK_DVLA, method="POST",
|
||||
json_data={"registrationNumber": plate},
|
||||
headers=cls.HEADERS_UK)
|
||||
if uk_data and not isinstance(uk_data, list):
|
||||
res["kw"] = res["kw"] or cls.clean_kw(uk_data.get("engineCapacity"))
|
||||
res["euro"] = res["euro"] or uk_data.get("euroStatus")
|
||||
"""
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
async def discover_holland(cls, make_name, limit=1000):
|
||||
"""Holland Discovery ág: rendszámok gyűjtése."""
|
||||
offset, variants = 0, {}
|
||||
while True:
|
||||
params = {"merk": make_name.upper(), "$limit": limit, "$offset": offset}
|
||||
data = await cls.fetch_api(cls.RDW_MAIN, params, headers=cls.HEADERS_RDW)
|
||||
if not data: break
|
||||
|
||||
for item in data:
|
||||
plate = item.get("kenteken")
|
||||
if not plate: continue
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = cls.clean_int(item.get("cilinderinhoud"))
|
||||
weight = cls.clean_int(item.get("massa_ledig_voertuig") or item.get("massa_rijklaar"))
|
||||
kw = item.get("netto_maximum_vermogen") or item.get("vermogen_massarijklaar")
|
||||
raw_date = item.get("datum_eerste_toelating")
|
||||
year = int(str(raw_date)[:4]) if raw_date else 2024
|
||||
|
||||
v_class = cls.CATEGORY_MAP.get(item.get("voertuigsoort"), "other")
|
||||
key = f"{model}-{ccm}-{weight}-{v_class}-{kw}-{year}"
|
||||
|
||||
if key not in variants:
|
||||
variants[key] = {
|
||||
"model": model, "ccm": ccm, "weight": weight, "v_class": v_class,
|
||||
"plate": plate, "main_kw": kw, "prod_year": year, "vin": item.get("vin")
|
||||
}
|
||||
if len(data) < limit: break
|
||||
offset += limit
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def discover_usa_batch(cls, make_name):
|
||||
"""Amerikai NHTSA Batch Discovery: Típusok gyűjtése."""
|
||||
variants = {}
|
||||
years = range(datetime.datetime.now().year - 5, datetime.datetime.now().year + 1)
|
||||
|
||||
async def fetch_year(year):
|
||||
url = cls.US_BATCH.format(make=make_name.upper(), year=year)
|
||||
logger.info(f"🇺🇸 USA Batch Discovery indítása: {make_name} ({year})")
|
||||
data = await cls.fetch_api(url)
|
||||
if data and "Results" in data:
|
||||
for m in data["Results"]:
|
||||
m_name = m.get("Model_Name", "Unknown").upper()
|
||||
key = f"US-{m_name}-{year}"
|
||||
if key not in variants:
|
||||
variants[key] = {
|
||||
"model": m_name, "ccm": None, "weight": None, "v_class": "car",
|
||||
"plate": "US-DISCOVERY", "main_kw": None, "prod_year": year, "vin": None
|
||||
}
|
||||
|
||||
await asyncio.gather(*(fetch_year(y) for y in years))
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def process_make(cls, db, task_id, make_name):
|
||||
logger.info(f"🚀 >>> {make_name} Powerhouse v1.4.1 INDUL...")
|
||||
|
||||
# Párhuzamos Discovery
|
||||
holland_task = cls.discover_holland(make_name)
|
||||
usa_task = cls.discover_usa_batch(make_name)
|
||||
|
||||
holland_variants, usa_variants = await asyncio.gather(holland_task, usa_task)
|
||||
all_variants = {**usa_variants, **holland_variants}
|
||||
|
||||
logger.info(f"📊 Összefésült variánsok száma: {len(all_variants)}")
|
||||
|
||||
async def enrich_and_save(v):
|
||||
deep = await cls.get_deep_tech(v["plate"], main_kw=v["main_kw"], vin=v["vin"])
|
||||
try:
|
||||
db_item = AssetCatalog(
|
||||
make=make_name.upper(), model=v["model"], vehicle_class=v["v_class"],
|
||||
fuel_type=deep["fuel"], power_kw=deep["kw"], engine_capacity=v["ccm"],
|
||||
max_weight_kg=v["weight"], axle_count=deep["axles"], body_type=deep["body"],
|
||||
year_from=v["prod_year"], euro_class=deep["euro"],
|
||||
factory_data={
|
||||
"source": "Powerhouse-v1.4.1",
|
||||
"discovery_nl": v["plate"] != "US-DISCOVERY",
|
||||
"enriched_at": str(datetime.datetime.now())
|
||||
}
|
||||
)
|
||||
return db_item
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Párhuzamos dúsítás (Semaphore korláttal)
|
||||
results = await asyncio.gather(*(enrich_and_save(v) for v in all_variants.values()))
|
||||
|
||||
total_saved = 0
|
||||
for item in results:
|
||||
if item:
|
||||
db.add(item)
|
||||
total_saved += 1
|
||||
|
||||
await db.commit()
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"🏁 {make_name} KÉSZ. {total_saved} egyedi rekord rögzítve.")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 1.4.1 (Powerhouse) ONLINE - Multi-Worker Safe Mode")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
# SKIP LOCKED védelem a párhuzamos futtatáshoz
|
||||
query = text("""
|
||||
SELECT id, make FROM data.catalog_discovery
|
||||
WHERE status = 'pending'
|
||||
LIMIT 1
|
||||
FOR UPDATE SKIP LOCKED
|
||||
""")
|
||||
res = await db.execute(query)
|
||||
task = res.fetchone()
|
||||
|
||||
if task:
|
||||
task_id, make_name = task
|
||||
await db.execute(
|
||||
text("UPDATE data.catalog_discovery SET status = 'running' WHERE id = :id"),
|
||||
{"id": task_id}
|
||||
)
|
||||
await db.commit()
|
||||
await cls.process_make(db, task_id, make_name)
|
||||
else:
|
||||
logger.info("😴 Várólista üres vagy minden feladat foglalt. Alvás 60mp...")
|
||||
await asyncio.sleep(60)
|
||||
await asyncio.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogMaster.run())
|
||||
272
archive/2026.02.18 Archive_old_mapps/catalog_robot1.4.py.old
Normal file
272
archive/2026.02.18 Archive_old_mapps/catalog_robot1.4.py.old
Normal file
@@ -0,0 +1,272 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from sqlalchemy import text
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot-v1.4-Powerhouse")
|
||||
|
||||
class CatalogMaster:
|
||||
"""
|
||||
Master Hunter Robot v1.4 - Powerhouse Edition
|
||||
- Párhuzamos Holland (RDW) és Amerikai (NHTSA Batch) Discovery.
|
||||
- Előkészített, kikommentelt Brit (DVLA) integráció.
|
||||
- Async Semaphore: Párhuzamos technikai dúsítás (egyszerre 10 szálon).
|
||||
- Intelligens összefésülés a globális források között.
|
||||
"""
|
||||
|
||||
# API Végpontok
|
||||
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
|
||||
RDW_AXLE = "https://opendata.rdw.nl/resource/3huj-srit.json"
|
||||
RDW_BODY = "https://opendata.rdw.nl/resource/vezc-m2t6.json"
|
||||
|
||||
# AMERIKAI BATCH API: Egyetlen hívással az összes modell évjárat szerint
|
||||
US_BATCH = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/{make}/modelyear/{year}?format=json"
|
||||
|
||||
# BRIT API (Kikapcsolva a tokenig)
|
||||
# UK_DVLA = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
UK_API_KEY = os.getenv("UK_DVLA_API_KEY") # Jövőbeli token helye
|
||||
|
||||
HEADERS_RDW = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
# HEADERS_UK = {"x-api-key": UK_API_KEY, "Content-Type": "application/json"} if UK_API_KEY else {}
|
||||
|
||||
CATEGORY_MAP = {
|
||||
"Personenauto": "car",
|
||||
"Motorfiets": "motorcycle",
|
||||
"Bedrijfsauto": "truck",
|
||||
"Vrachtwagen": "truck",
|
||||
"Opleggertrekker": "truck",
|
||||
"Bus": "bus",
|
||||
"Aanhangwagen": "trailer",
|
||||
"Oplegger": "trailer",
|
||||
"Landbouw- of bosbouwtrekker": "agricultural",
|
||||
"camper": "camper"
|
||||
}
|
||||
|
||||
# Szabályozzuk a párhuzamos dúsítást, hogy ne tiltsanak le (egyszerre max 10 kérés)
|
||||
semaphore = asyncio.Semaphore(5)
|
||||
|
||||
@classmethod
|
||||
def clean_kw(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
f_val = float(str(val).replace(',', '.'))
|
||||
if 0 < f_val < 1.0: return None
|
||||
v = int(f_val)
|
||||
return v if v > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def clean_int(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
return int(float(str(val).replace(',', '.')))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def fetch_api(cls, url, params=None, headers=None, method="GET", json_data=None):
|
||||
async with httpx.AsyncClient(headers=headers, follow_redirects=True) as client:
|
||||
for attempt in range(3): # 3-szor próbáljuk újra, ha kell
|
||||
try:
|
||||
if method == "POST":
|
||||
resp = await client.post(url, json=json_data, timeout=30)
|
||||
else:
|
||||
resp = await client.get(url, params=params, timeout=30)
|
||||
|
||||
if resp.status_code == 429: # HOPPÁ, túl gyorsak vagyunk!
|
||||
wait_time = (attempt + 1) * 5 # Egyre többet vár: 5s, 10s...
|
||||
logger.warning(f"⚠️ RDW limit elérve! Pihenő {wait_time} mp...")
|
||||
await asyncio.sleep(wait_time)
|
||||
continue
|
||||
|
||||
return resp.json() if resp.status_code in [200, 201] else []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ API Hiba ({url}): {e}")
|
||||
await asyncio.sleep(2)
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
async def get_deep_tech(cls, plate, main_kw=None, vin=None):
|
||||
"""Mély dúsítás párhuzamos forrásokból."""
|
||||
async with cls.semaphore:
|
||||
res = {"kw": cls.clean_kw(main_kw), "fuel": "Unknown", "axles": None, "body": "Standard", "euro": None}
|
||||
|
||||
# --- 1. HOLLAND (RDW) DÚSÍTÁS ---
|
||||
fuel_task = cls.fetch_api(cls.RDW_FUEL, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
axle_task = cls.fetch_api(cls.RDW_AXLE, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
|
||||
# Holland adatok párhuzamos lekérése
|
||||
fuel_data, axle_data = await asyncio.gather(fuel_task, axle_task)
|
||||
|
||||
if fuel_data:
|
||||
f0 = fuel_data[0]
|
||||
if not res["kw"]:
|
||||
res["kw"] = cls.clean_kw(f0.get("nettomaximumvermogen") or f0.get("netto_maximum_vermogen"))
|
||||
res["fuel"] = f0.get("brandstof_omschrijving", "Unknown")
|
||||
res["euro"] = f0.get("uitlaatemissieniveau")
|
||||
|
||||
if axle_data:
|
||||
res["axles"] = cls.clean_int(axle_data[0].get("aantal_assen"))
|
||||
|
||||
# --- 2. BRIT (DVLA) ELLENŐRZÉS (KIKOMMENTELVE A TOKENIG) ---
|
||||
"""
|
||||
if cls.UK_API_KEY and (not res["kw"] or not res["euro"]):
|
||||
uk_data = await cls.fetch_api(cls.UK_DVLA, method="POST",
|
||||
json_data={"registrationNumber": plate},
|
||||
headers=cls.HEADERS_UK)
|
||||
if uk_data and not isinstance(uk_data, list):
|
||||
res["kw"] = res["kw"] or cls.clean_kw(uk_data.get("engineCapacity"))
|
||||
res["euro"] = res["euro"] or uk_data.get("euroStatus")
|
||||
"""
|
||||
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
async def discover_holland(cls, make_name, limit=1000):
|
||||
"""Holland Discovery ág."""
|
||||
offset, variants = 0, {}
|
||||
while True:
|
||||
params = {"merk": make_name.upper(), "$limit": limit, "$offset": offset}
|
||||
data = await cls.fetch_api(cls.RDW_MAIN, params, headers=cls.HEADERS_RDW)
|
||||
if not data: break
|
||||
|
||||
for item in data:
|
||||
plate = item.get("kenteken")
|
||||
if not plate: continue
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = cls.clean_int(item.get("cilinderinhoud"))
|
||||
weight = cls.clean_int(item.get("massa_ledig_voertuig") or item.get("massa_rijklaar"))
|
||||
kw = item.get("netto_maximum_vermogen") or item.get("vermogen_massarijklaar")
|
||||
raw_date = item.get("datum_eerste_toelating")
|
||||
year = int(str(raw_date)[:4]) if raw_date else 2024
|
||||
|
||||
v_class = cls.CATEGORY_MAP.get(item.get("voertuigsoort"), "other")
|
||||
key = f"{model}-{ccm}-{weight}-{v_class}-{kw}-{year}"
|
||||
|
||||
if key not in variants:
|
||||
variants[key] = {
|
||||
"model": model, "ccm": ccm, "weight": weight, "v_class": v_class,
|
||||
"plate": plate, "main_kw": kw, "prod_year": year, "vin": item.get("vin")
|
||||
}
|
||||
if len(data) < limit: break
|
||||
offset += limit
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def discover_usa_batch(cls, make_name):
|
||||
"""Amerikai NHTSA Batch Discovery ág (2020-2025 évjáratokra)."""
|
||||
variants = {}
|
||||
# Az utolsó 5 évjáratot nézzük a legfrissebb modellekért
|
||||
years = range(datetime.datetime.now().year - 5, datetime.datetime.now().year + 1)
|
||||
|
||||
async def fetch_year(year):
|
||||
url = cls.US_BATCH.format(make=make_name.upper(), year=year)
|
||||
data = await cls.fetch_api(url)
|
||||
if data and "Results" in data:
|
||||
for m in data["Results"]:
|
||||
m_name = m.get("Model_Name", "Unknown").upper()
|
||||
# US adatoknál nincs rendszám, de a Robot 2 dúsítani fogja ha kell
|
||||
key = f"US-{m_name}-{year}"
|
||||
variants[key] = {
|
||||
"model": m_name, "ccm": None, "weight": None, "v_class": "car",
|
||||
"plate": "US-DISCOVERY", "main_kw": None, "prod_year": year, "vin": None
|
||||
}
|
||||
|
||||
await asyncio.gather(*(fetch_year(y) for y in years))
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def process_make(cls, db, task_id, make_name):
|
||||
logger.info(f"🚀 >>> {make_name} Powerhouse v1.4 INDUL...")
|
||||
|
||||
# PÁRHUZAMOS DISCOVERY: Holland és USA egyszerre
|
||||
holland_task = cls.discover_holland(make_name)
|
||||
usa_task = cls.discover_usa_batch(make_name)
|
||||
|
||||
holland_variants, usa_variants = await asyncio.gather(holland_task, usa_task)
|
||||
|
||||
# Összefésülés (Holland élvez elsőbbséget a rendszám miatt)
|
||||
all_variants = {**usa_variants, **holland_variants}
|
||||
logger.info(f"📊 Összesen {len(all_variants)} egyedi variáns (NL: {len(holland_variants)}, US: {len(usa_variants)})")
|
||||
|
||||
# PÁRHUZAMOS DÚSÍTÁS
|
||||
async def enrich_and_save(v):
|
||||
deep = await cls.get_deep_tech(v["plate"], main_kw=v["main_kw"], vin=v["vin"])
|
||||
try:
|
||||
db_item = AssetCatalog(
|
||||
make=make_name.upper(), model=v["model"], vehicle_class=v["v_class"],
|
||||
fuel_type=deep["fuel"], power_kw=deep["kw"], engine_capacity=v["ccm"],
|
||||
max_weight_kg=v["weight"], axle_count=deep["axles"], body_type=deep["body"],
|
||||
year_from=v["prod_year"], euro_class=deep["euro"],
|
||||
factory_data={
|
||||
"source": "Powerhouse-v1.4",
|
||||
"discovery_nl": v["plate"] != "US-DISCOVERY",
|
||||
"enriched_at": str(datetime.datetime.now())
|
||||
}
|
||||
)
|
||||
return db_item
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Egyszerre indítjuk a dúsításokat (A semaphore korlátozza a szálakat)
|
||||
results = await asyncio.gather(*(enrich_and_save(v) for v in all_variants.values()))
|
||||
|
||||
# Mentés
|
||||
total_saved = 0
|
||||
for item in results:
|
||||
if item:
|
||||
db.add(item)
|
||||
total_saved += 1
|
||||
|
||||
await db.commit()
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"🏁 {make_name} KÉSZ. {total_saved} rekord rögzítve.")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 1.4 (Powerhouse) ONLINE - Multi-Worker Safe")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
# 1. 'FOR UPDATE SKIP LOCKED' - Megfogjuk a sort és lelakatoljuk,
|
||||
# de a többi robot átugorja, amit mi már fogunk.
|
||||
query = text("""
|
||||
SELECT id, make FROM data.catalog_discovery
|
||||
WHERE status = 'pending'
|
||||
LIMIT 1
|
||||
FOR UPDATE SKIP LOCKED
|
||||
""")
|
||||
|
||||
res = await db.execute(query)
|
||||
task = res.fetchone()
|
||||
|
||||
if task:
|
||||
task_id, make_name = task
|
||||
# 2. Azonnal átállítjuk 'running'-ra a tranzakción belül,
|
||||
# így senki más nem nyúl hozzá.
|
||||
await db.execute(
|
||||
text("UPDATE data.catalog_discovery SET status = 'running' WHERE id = :id"),
|
||||
{"id": task_id}
|
||||
)
|
||||
await db.commit() # Itt véglegesítjük a foglalást
|
||||
|
||||
# 3. Indulhat a tényleges munka
|
||||
await cls.process_make(db, task_id, make_name)
|
||||
else:
|
||||
logger.info("😴 Várólista üres (vagy minden sor foglalt). Alvás 60 mp...")
|
||||
await asyncio.sleep(60)
|
||||
|
||||
await asyncio.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogMaster.run())
|
||||
48
archive/2026.02.18 Archive_old_mapps/harvester_base.py.old
Normal file
48
archive/2026.02.18 Archive_old_mapps/harvester_base.py.old
Normal file
@@ -0,0 +1,48 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/services/harvester_base.py
|
||||
import httpx
|
||||
import logging
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.models.asset import AssetCatalog
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class BaseHarvester:
|
||||
""" MDM Adatgyűjtő Alaposztály. """
|
||||
def __init__(self, category: str):
|
||||
self.category = category # 'car', 'motorcycle', 'truck'
|
||||
self.headers = {"User-Agent": "ServiceFinder-Harvester-Bot/2.1"}
|
||||
|
||||
async def check_exists(self, db: AsyncSession, brand: str, model: str, gen: str = None):
|
||||
""" Ellenőrzi a katalógusban való létezést az új AssetCatalog modellben. """
|
||||
stmt = select(AssetCatalog).where(
|
||||
AssetCatalog.make == brand,
|
||||
AssetCatalog.model == model,
|
||||
AssetCatalog.vehicle_class == self.category
|
||||
)
|
||||
if gen:
|
||||
stmt = stmt.where(AssetCatalog.generation == gen)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def log_entry(self, db: AsyncSession, brand: str, model: str, specs: dict):
|
||||
""" Létrehoz vagy frissít egy bejegyzést. Támogatja a factory_data dúsítást. """
|
||||
existing = await self.check_exists(db, brand, model, specs.get("generation"))
|
||||
if not existing:
|
||||
new_v = AssetCatalog(
|
||||
make=brand,
|
||||
model=model,
|
||||
generation=specs.get("generation"),
|
||||
year_from=specs.get("year_from"),
|
||||
year_to=specs.get("year_to"),
|
||||
vehicle_class=self.category,
|
||||
fuel_type=specs.get("fuel_type"),
|
||||
power_kw=specs.get("power_kw"),
|
||||
engine_capacity=specs.get("engine_capacity"),
|
||||
factory_data=specs.get("factory_data", {}) # MDM JSONB tárolás
|
||||
)
|
||||
db.add(new_v)
|
||||
logger.info(f"🆕 Új katalógus elem rögzítve: {brand} {model}")
|
||||
return True
|
||||
return False
|
||||
12
archive/2026.02.18 Archive_old_mapps/harvester_bikes.py.old
Normal file
12
archive/2026.02.18 Archive_old_mapps/harvester_bikes.py.old
Normal file
@@ -0,0 +1,12 @@
|
||||
from .harvester_base import BaseHarvester
|
||||
|
||||
class BikeHarvester(BaseHarvester):
|
||||
def __init__(self):
|
||||
super().__init__(category="motorcycle")
|
||||
self.api_url = "https://api.example-bikes.com/v1/" # Példa forrás
|
||||
|
||||
async def harvest_all(self, db):
|
||||
# Ide jön a motor-specifikus API hívás logikája
|
||||
print("🏍️ Motor Robot: Adatgyűjtés indul...")
|
||||
# ... fetch és mentés loop ...
|
||||
await db.commit()
|
||||
48
archive/2026.02.18 Archive_old_mapps/harvester_cars.py.old
Normal file
48
archive/2026.02.18 Archive_old_mapps/harvester_cars.py.old
Normal file
@@ -0,0 +1,48 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/services/harvester_cars.py
|
||||
import httpx
|
||||
import asyncio
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from .harvester_base import BaseHarvester
|
||||
|
||||
class VehicleHarvester(BaseHarvester):
|
||||
def __init__(self):
|
||||
super().__init__(category="car")
|
||||
self.base_url = "https://www.carqueryapi.com/api/0.3/"
|
||||
|
||||
async def _get_api_data(self, params: dict):
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.get(self.base_url, params=params, headers=self.headers, timeout=15.0)
|
||||
if response.status_code == 200:
|
||||
text = response.text
|
||||
if text.startswith("?("): text = text[2:-2]
|
||||
return response.json()
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"CarQuery Robot Hiba: {e}")
|
||||
return None
|
||||
|
||||
async def harvest_all(self, db: AsyncSession):
|
||||
""" Automatikus CarQuery szinkronizáció MDM alapon. """
|
||||
print("🚗 Személyautó Robot: Indul az adatgyűjtés...")
|
||||
|
||||
makes_data = await self._get_api_data({"cmd": "getMakes", "sold_in_us": 0})
|
||||
if not makes_data: return
|
||||
|
||||
for make in makes_data.get("Makes", [])[:50]: # Teszt limit
|
||||
make_id = make['make_id']
|
||||
make_name = make['make_display']
|
||||
|
||||
models_data = await self._get_api_data({"cmd": "getModels", "make": make_id})
|
||||
if not models_data: continue
|
||||
|
||||
for model in models_data.get("Models", []):
|
||||
specs = {
|
||||
"factory_data": {"api_source": "carquery", "api_make_id": make_id}
|
||||
}
|
||||
await self.log_entry(db, make_name, model['model_name'], specs)
|
||||
|
||||
await db.commit()
|
||||
await asyncio.sleep(1) # Rate limiting
|
||||
|
||||
print("🏁 Személyautó Robot: Adatok szinkronizálva.")
|
||||
8
archive/2026.02.18 Archive_old_mapps/harvester_trucks.py
Normal file
8
archive/2026.02.18 Archive_old_mapps/harvester_trucks.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from .harvester_base import BaseHarvester
|
||||
|
||||
class TruckHarvester(BaseHarvester):
|
||||
def __init__(self):
|
||||
super().__init__(category="truck")
|
||||
|
||||
async def run(self, db):
|
||||
print("🚛 Truck Robot: Nehézgépek és teherautók keresése...")
|
||||
282
archive/2026.02.18 Archive_old_mapps/service_hunter_old.py.old
Normal file
282
archive/2026.02.18 Archive_old_mapps/service_hunter_old.py.old
Normal file
@@ -0,0 +1,282 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import uuid
|
||||
import os
|
||||
import sys
|
||||
import csv
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, text
|
||||
from sqlalchemy.orm import selectinload
|
||||
from app.db.session import SessionLocal
|
||||
|
||||
# Modellek importálása
|
||||
from app.models.service import ServiceProfile, ExpertiseTag
|
||||
from app.models.organization import Organization, OrganizationFinancials, OrgType, OrgUserRole, OrganizationMember
|
||||
from app.models.identity import Person
|
||||
from app.models.address import Address, GeoPostalCode
|
||||
from geoalchemy2.elements import WKTElement
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Naplózás beállítása
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot2-Dunakeszi-Detective")
|
||||
|
||||
class ServiceHunter:
|
||||
"""
|
||||
Robot 2.7.2: Dunakeszi Detective - Deep Model Integration.
|
||||
Logika:
|
||||
1. Helyi CSV (Saját beküldés - Cím alapú Geocoding-al - 50 pont Trust)
|
||||
2. OSM (Közösségi adat - 10 pont Trust)
|
||||
3. Google (Adatpótlás/Fallback - 30 pont Trust)
|
||||
"""
|
||||
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
|
||||
PLACES_NEW_URL = "https://places.googleapis.com/v1/places:searchNearby"
|
||||
GEOCODE_URL = "https://maps.googleapis.com/maps/api/geocode/json"
|
||||
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
||||
LOCAL_CSV_PATH = "/app/app/workers/local_services.csv"
|
||||
|
||||
@classmethod
|
||||
async def geocode_address(cls, address_text):
|
||||
"""Cím szövegből GPS koordinátát és címkomponenseket csinál."""
|
||||
if not cls.GOOGLE_API_KEY:
|
||||
logger.warning("⚠️ Google API kulcs hiányzik!")
|
||||
return None
|
||||
|
||||
params = {"address": address_text, "key": cls.GOOGLE_API_KEY}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.get(cls.GEOCODE_URL, params=params, timeout=10)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if data.get("results"):
|
||||
result = data["results"][0]
|
||||
loc = result["geometry"]["location"]
|
||||
|
||||
# Címkomponensek kinyerése a kötelező mezőkhöz
|
||||
components = result.get("address_components", [])
|
||||
parsed = {"lat": loc["lat"], "lng": loc["lng"], "zip": "", "city": "", "street": "Ismeretlen", "type": "utca", "number": "1"}
|
||||
|
||||
for c in components:
|
||||
types = c.get("types", [])
|
||||
if "postal_code" in types: parsed["zip"] = c["long_name"]
|
||||
if "locality" in types: parsed["city"] = c["long_name"]
|
||||
if "route" in types: parsed["street"] = c["long_name"]
|
||||
if "street_number" in types: parsed["number"] = c["long_name"]
|
||||
|
||||
logger.info(f"📍 Geocoding sikeres: {address_text}")
|
||||
return parsed
|
||||
else:
|
||||
logger.error(f"❌ Geocoding hiba: {resp.status_code}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Geocoding hiba: {e}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_google_place_details_new(cls, lat, lon):
|
||||
"""Google Places API (New) - Adatpótlás FieldMask használatával."""
|
||||
if not cls.GOOGLE_API_KEY:
|
||||
return None
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Goog-Api-Key": cls.GOOGLE_API_KEY,
|
||||
"X-Goog-FieldMask": "places.displayName,places.id,places.types,places.internationalPhoneNumber,places.websiteUri"
|
||||
}
|
||||
|
||||
payload = {
|
||||
"includedTypes": ["car_repair", "gas_station", "ev_charging_station", "car_wash", "motorcycle_repair"],
|
||||
"maxResultCount": 1,
|
||||
"locationRestriction": {
|
||||
"circle": {
|
||||
"center": {"latitude": lat, "longitude": lon},
|
||||
"radius": 40.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.post(cls.PLACES_NEW_URL, json=payload, headers=headers, timeout=10)
|
||||
if resp.status_code == 200:
|
||||
places = resp.json().get("places", [])
|
||||
if places:
|
||||
p = places[0]
|
||||
return {
|
||||
"name": p.get("displayName", {}).get("text"),
|
||||
"google_id": p.get("id"),
|
||||
"types": p.get("types", []),
|
||||
"phone": p.get("internationalPhoneNumber"),
|
||||
"website": p.get("websiteUri")
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Google kiegészítő hívás hiba: {e}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def import_local_csv(cls, db: AsyncSession):
|
||||
"""Manuális adatok betöltése CSV-ből."""
|
||||
if not os.path.exists(cls.LOCAL_CSV_PATH):
|
||||
return
|
||||
|
||||
try:
|
||||
with open(cls.LOCAL_CSV_PATH, mode='r', encoding='utf-8') as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
geo_data = None
|
||||
if row.get('cim'):
|
||||
geo_data = await cls.geocode_address(row['cim'])
|
||||
|
||||
if geo_data:
|
||||
element = {
|
||||
"tags": {
|
||||
"name": row['nev'], "phone": row.get('telefon'),
|
||||
"website": row.get('web'), "amenity": row.get('tipus', 'car_repair'),
|
||||
"addr:full": row.get('cim'),
|
||||
"addr:city": geo_data["city"], "addr:zip": geo_data["zip"],
|
||||
"addr:street": geo_data["street"], "addr:type": geo_data["type"],
|
||||
"addr:number": geo_data["number"]
|
||||
},
|
||||
"lat": geo_data["lat"], "lon": geo_data["lng"]
|
||||
}
|
||||
await cls.save_service_deep(db, element, source="local_manual")
|
||||
logger.info("✅ Helyi CSV adatok feldolgozva.")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ CSV feldolgozási hiba: {e}")
|
||||
|
||||
@classmethod
|
||||
async def get_or_create_person(cls, db: AsyncSession, name: str) -> Person:
|
||||
"""Ghost Person kezelése."""
|
||||
names = name.split(' ', 1)
|
||||
last_name = names[0]
|
||||
first_name = names[1] if len(names) > 1 else "Ismeretlen"
|
||||
stmt = select(Person).where(Person.last_name == last_name, Person.first_name == first_name)
|
||||
result = await db.execute(stmt); person = result.scalar_one_or_none()
|
||||
if not person:
|
||||
person = Person(last_name=last_name, first_name=first_name, is_ghost=True, is_active=False)
|
||||
db.add(person); await db.flush()
|
||||
return person
|
||||
|
||||
@classmethod
|
||||
async def enrich_financials(cls, db: AsyncSession, org_id: int):
|
||||
"""Pénzügyi rekord inicializálása."""
|
||||
financial = OrganizationFinancials(
|
||||
organization_id=org_id, year=datetime.now(timezone.utc).year - 1, source="bot_discovery"
|
||||
)
|
||||
db.add(financial)
|
||||
|
||||
@classmethod
|
||||
async def save_service_deep(cls, db: AsyncSession, element: dict, source="osm"):
|
||||
"""Mély mentés a modelled specifikus mezőneveivel és kötelező értékeivel."""
|
||||
tags = element.get("tags", {})
|
||||
lat, lon = element.get("lat"), element.get("lon")
|
||||
if not lat or not lon: return
|
||||
|
||||
osm_name = tags.get("name") or tags.get("brand") or tags.get("operator")
|
||||
google_data = None
|
||||
if not osm_name or osm_name.lower() in ['aprilia', 'bosch', 'shell', 'mol', 'omv', 'ismeretlen']:
|
||||
google_data = await cls.get_google_place_details_new(lat, lon)
|
||||
|
||||
final_name = (google_data["name"] if google_data else osm_name) or "Ismeretlen Szolgáltató"
|
||||
|
||||
stmt = select(Organization).where(Organization.full_name == final_name)
|
||||
result = await db.execute(stmt); org = result.scalar_one_or_none()
|
||||
|
||||
if not org:
|
||||
# 1. Address létrehozása (a kötelező mezőket kitöltjük az átadott tags-ből vagy alapértékkel)
|
||||
new_addr = Address(
|
||||
latitude=lat,
|
||||
longitude=lon,
|
||||
full_address_text=tags.get("addr:full") or f"2120 Dunakeszi, {tags.get('addr:street', 'Ismeretlen')} {tags.get('addr:housenumber', '1')}",
|
||||
street_name=tags.get("addr:street") or "Ismeretlen",
|
||||
street_type=tags.get("addr:type") or "utca",
|
||||
house_number=tags.get("addr:number") or tags.get("addr:housenumber") or "1"
|
||||
)
|
||||
db.add(new_addr); await db.flush()
|
||||
|
||||
# 2. Organization létrehozása (a modelled alapján ezek a mezők itt vannak)
|
||||
org = Organization(
|
||||
full_name=final_name,
|
||||
name=final_name[:50],
|
||||
org_type=OrgType.service,
|
||||
address_id=new_addr.id,
|
||||
address_city=tags.get("addr:city") or "Dunakeszi",
|
||||
address_zip=tags.get("addr:zip") or "2120",
|
||||
address_street_name=new_addr.street_name,
|
||||
address_street_type=new_addr.street_type,
|
||||
address_house_number=new_addr.house_number
|
||||
)
|
||||
db.add(org); await db.flush()
|
||||
|
||||
# 3. Service Profile
|
||||
trust = 50 if source == "local_manual" else (30 if google_data else 10)
|
||||
spec = {"brands": [], "types": google_data["types"] if google_data else [], "osm_tags": tags}
|
||||
if tags.get("brand"): spec["brands"].append(tags.get("brand"))
|
||||
|
||||
profile = ServiceProfile(
|
||||
organization_id=org.id,
|
||||
location=WKTElement(f'POINT({lon} {lat})', srid=4326),
|
||||
status="ghost",
|
||||
trust_score=trust,
|
||||
google_place_id=google_data["google_id"] if google_data else None,
|
||||
specialization_tags=spec,
|
||||
website=google_data["website"] if google_data else tags.get("website"),
|
||||
contact_phone=google_data["phone"] if google_data else tags.get("phone")
|
||||
)
|
||||
db.add(profile)
|
||||
|
||||
# 4. Tulajdonos rögzítése
|
||||
owner_name = tags.get("operator") or tags.get("contact:person")
|
||||
if owner_name and len(owner_name) > 3:
|
||||
person = await cls.get_or_create_person(db, owner_name)
|
||||
db.add(OrganizationMember(
|
||||
organization_id=org.id,
|
||||
person_id=person.id,
|
||||
role=OrgUserRole.OWNER,
|
||||
is_verified=False
|
||||
))
|
||||
|
||||
await cls.enrich_financials(db, org.id)
|
||||
await db.flush()
|
||||
logger.info(f"✨ [{source.upper()}] Mentve: {final_name} (Bizalom: {trust})")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 2.7.2: Dunakeszi Detective indítása...")
|
||||
|
||||
# Kapcsolódási védelem
|
||||
connected = False
|
||||
while not connected:
|
||||
try:
|
||||
async with SessionLocal() as db:
|
||||
await db.execute(text("SELECT 1"))
|
||||
connected = True
|
||||
except Exception as e:
|
||||
logger.warning(f"⏳ Várakozás a hálózatra (shared-postgres host?): {e}")
|
||||
await asyncio.sleep(5)
|
||||
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
try:
|
||||
await db.execute(text("SET search_path TO data, public"))
|
||||
# 1. Beküldött CSV feldolgozása (Geocoding-al)
|
||||
await cls.import_local_csv(db)
|
||||
await db.commit()
|
||||
|
||||
# 2. OSM Szkennelés
|
||||
query = """[out:json][timeout:120];area["name"="Dunakeszi"]->.city;(nwr["shop"~"car_repair|motorcycle_repair|tyres|car_parts|motorcycle"](area.city);nwr["amenity"~"car_repair|vehicle_inspection|motorcycle_repair|fuel|charging_station|car_wash"](area.city);nwr["amenity"~"car_repair|fuel|charging_station"](around:5000, 47.63, 19.13););out center;"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.post(cls.OVERPASS_URL, data={"data": query}, timeout=120)
|
||||
if resp.status_code == 200:
|
||||
elements = resp.json().get("elements", [])
|
||||
for el in elements:
|
||||
await cls.save_service_deep(db, el, source="osm")
|
||||
await db.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Futáshiba: {e}")
|
||||
|
||||
logger.info("😴 Scan kész, 24 óra pihenő...")
|
||||
await asyncio.sleep(86400)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(ServiceHunter.run())
|
||||
115
archive/2026.02.18 Archive_old_mapps/technical_enricher.py.old
Normal file
115
archive/2026.02.18 Archive_old_mapps/technical_enricher.py.old
Normal file
@@ -0,0 +1,115 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import os
|
||||
import datetime
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.vehicle_definitions import VehicleModelDefinition
|
||||
from app.services.ai_service import AIService
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot-Bulk-Master")
|
||||
|
||||
class TechEnricher:
|
||||
API_URL = "https://opendata.rdw.nl/resource/kyri-nuah.json"
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
HEADERS = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
|
||||
@classmethod
|
||||
async def fetch_rdw_tech_data(cls, make, model):
|
||||
params = {"merk": make.upper(), "handelsbenaming": str(model).strip().upper(), "$limit": 1}
|
||||
async with httpx.AsyncClient(headers=cls.HEADERS) as client:
|
||||
try:
|
||||
resp = await client.get(cls.API_URL, params=params, timeout=15)
|
||||
return resp.json()[0] if resp.status_code == 200 and resp.json() else None
|
||||
except: return None
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🚀 Master-Merge Robot FOLYAMATOS ÜZEMMÓD INDUL...")
|
||||
|
||||
while True: # Folyamatos ciklus, amíg el nem fogy az adat
|
||||
async with SessionLocal() as main_db:
|
||||
stmt = select(VehicleModelDefinition.id).where(
|
||||
VehicleModelDefinition.status == "unverified"
|
||||
).limit(50) # Egyszerre 50 ID-t foglalunk le
|
||||
res = await main_db.execute(stmt)
|
||||
ids = res.scalars().all()
|
||||
|
||||
if not ids:
|
||||
logger.info("🏁 Minden rekord feldolgozva. A robot megáll.")
|
||||
break
|
||||
|
||||
logger.info(f"📦 Új csomag indítása: {len(ids)} rekord.")
|
||||
|
||||
for m_id in ids:
|
||||
async with SessionLocal() as db:
|
||||
try:
|
||||
current = await db.get(VehicleModelDefinition, m_id)
|
||||
if not current: continue
|
||||
|
||||
logger.info(f"🧪 Feldolgozás: {current.make} {current.marketing_name} (ID: {m_id})")
|
||||
|
||||
rdw_data = await cls.fetch_rdw_tech_data(current.make, current.marketing_name)
|
||||
if rdw_data:
|
||||
current.engine_capacity = int(float(rdw_data.get("cilinderinhoud", 0))) or current.engine_capacity
|
||||
current.power_kw = int(float(rdw_data.get("netto_maximum_vermogen_kw", 0))) or current.power_kw
|
||||
|
||||
ai_data = await AIService.get_clean_vehicle_data(current.make, current.marketing_name, current.vehicle_type)
|
||||
|
||||
if ai_data:
|
||||
tech_code = ai_data.get("technical_code") or "N/A"
|
||||
new_ccm = ai_data.get("ccm") or current.engine_capacity
|
||||
|
||||
master_record = None
|
||||
if tech_code and tech_code != "N/A":
|
||||
stmt_master = select(VehicleModelDefinition).where(and_(
|
||||
VehicleModelDefinition.make == current.make,
|
||||
VehicleModelDefinition.technical_code == tech_code,
|
||||
VehicleModelDefinition.engine_capacity == new_ccm,
|
||||
VehicleModelDefinition.status == 'ai_enriched',
|
||||
VehicleModelDefinition.id != m_id
|
||||
))
|
||||
master_record = (await db.execute(stmt_master)).scalar_one_or_none()
|
||||
|
||||
if master_record:
|
||||
logger.info(f"🔗 Merge: ID:{m_id} -> Master ID:{master_record.id}")
|
||||
syns = set(master_record.synonyms or [])
|
||||
syns.update(ai_data.get("synonyms", []))
|
||||
syns.add(current.marketing_name)
|
||||
master_record.synonyms = list(syns)
|
||||
current.status = "duplicate"
|
||||
current.parent_id = master_record.id
|
||||
else:
|
||||
current.technical_code = tech_code if tech_code != "N/A" else f"N/A-{m_id}"
|
||||
current.marketing_name = ai_data.get("marketing_name", current.marketing_name)
|
||||
current.engine_capacity = new_ccm
|
||||
current.power_kw = ai_data.get("kw") or current.power_kw
|
||||
current.year_from = ai_data.get("year_from")
|
||||
current.year_to = ai_data.get("year_to")
|
||||
current.synonyms = ai_data.get("synonyms", [])
|
||||
|
||||
if ai_data.get("maintenance"):
|
||||
old_spec = current.specifications or {}
|
||||
old_spec.update(ai_data.get("maintenance"))
|
||||
current.specifications = old_spec
|
||||
|
||||
current.status = "ai_enriched"
|
||||
else:
|
||||
if not current.technical_code:
|
||||
current.technical_code = f"UNKNOWN-{m_id}"
|
||||
|
||||
current.updated_at = datetime.datetime.now()
|
||||
await db.commit()
|
||||
logger.info(f"✅ Mentve (ID: {m_id})")
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"❌ Hiba ID:{m_id}: {e}")
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(TechEnricher.run())
|
||||
7
archive/2026.02.18 Archive_old_mapps/user.py.old
Executable file
7
archive/2026.02.18 Archive_old_mapps/user.py.old
Executable file
@@ -0,0 +1,7 @@
|
||||
# DEPRECATED: Minden funkció átkerült az app.models.identity modulba.
|
||||
# Ez a fájl csak a kompatibilitás miatt maradt meg, de táblát nem definiál.
|
||||
from .identity import User, UserRole
|
||||
|
||||
# Kapcsolatok
|
||||
# memberships = relationship("OrganizationMember", back_populates="user", cascade="all, delete-orphan")
|
||||
# vehicles = relationship("VehicleOwnership", back_populates="user", cascade="all, delete-orphan")
|
||||
@@ -0,0 +1,109 @@
|
||||
from sqlalchemy import Column, Integer, String, JSON, UniqueConstraint, text, Boolean, DateTime, ForeignKey, Numeric, Index, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.dialects.postgresql import JSONB # PostgreSQL specifikus JSONB a hatékony kereséshez
|
||||
from app.db.base_class import Base
|
||||
|
||||
class VehicleType(Base):
|
||||
"""Jármű főtípusok sémája (Séma-gazda)"""
|
||||
__tablename__ = "vehicle_types"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
code = Column(String(30), unique=True, index=True) # car, motorcycle, truck, bus, boat, etc.
|
||||
name = Column(String(50)) # Megjelenítendő név
|
||||
icon = Column(String(50))
|
||||
units = Column(JSON, server_default=text("'{\"power\": \"kW\", \"weight\": \"kg\", \"cargo\": \"m3\"}'::jsonb"))
|
||||
|
||||
features = relationship("FeatureDefinition", back_populates="vehicle_type")
|
||||
definitions = relationship("VehicleModelDefinition", back_populates="v_type_rel")
|
||||
|
||||
class FeatureDefinition(Base):
|
||||
"""Globális felszereltség szótár"""
|
||||
__tablename__ = "feature_definitions"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
vehicle_type_id = Column(Integer, ForeignKey("data.vehicle_types.id"))
|
||||
category = Column(String(50)) # Műszaki, Beltér, Kültér, Multimédia
|
||||
name = Column(String(100), nullable=False)
|
||||
data_type = Column(String(20), default="boolean")
|
||||
|
||||
vehicle_type = relationship("VehicleType", back_populates="features")
|
||||
|
||||
class ModelFeatureMap(Base):
|
||||
"""Modell-szintű felszereltségi sablon (Alap vs Extra)"""
|
||||
__tablename__ = "model_feature_maps"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
model_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), primary_key=True)
|
||||
feature_id = Column(Integer, ForeignKey("data.feature_definitions.id"), primary_key=True)
|
||||
availability = Column(String(20), default="standard") # standard, optional, accessory
|
||||
value = Column(String(100))
|
||||
|
||||
class VehicleModelDefinition(Base):
|
||||
"""MDM Master rekordok - v1.3.0 Pipeline Edition (Researcher & Alchemist)"""
|
||||
__tablename__ = "vehicle_model_definitions"
|
||||
__table_args__ = (
|
||||
UniqueConstraint('make', 'technical_code', 'vehicle_type', name='uix_make_tech_type'),
|
||||
Index('idx_vmd_lookup', 'make', 'technical_code'),
|
||||
{"schema": "data"}
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
make = Column(String(50), nullable=False, index=True)
|
||||
technical_code = Column(String(50), nullable=False, index=True)
|
||||
marketing_name = Column(String(100), index=True)
|
||||
family_name = Column(String(100))
|
||||
|
||||
vehicle_type = Column(String(30), index=True)
|
||||
vehicle_type_id = Column(Integer, ForeignKey("data.vehicle_types.id"))
|
||||
vehicle_class = Column(String(50))
|
||||
|
||||
parent_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), nullable=True)
|
||||
year_from = Column(Integer, nullable=True, index=True)
|
||||
year_to = Column(Integer, nullable=True, index=True)
|
||||
synonyms = Column(JSON, server_default=text("'[]'::jsonb"))
|
||||
|
||||
# --- ROBOT VÉDELMI ÉS PIPELINE MEZŐK (v1.3.0) ---
|
||||
is_manual = Column(Boolean, default=False, server_default=text("false"), index=True)
|
||||
attempts = Column(Integer, default=0, server_default=text("0"), index=True)
|
||||
last_error = Column(Text, nullable=True)
|
||||
|
||||
# Robot 2.1 "Researcher" porszívózott nyers adatai (A szemetesláda)
|
||||
raw_search_context = Column(Text, nullable=True)
|
||||
|
||||
# Telemetria és forrás adatok (melyik API/URL hozta az adatot)
|
||||
research_metadata = Column(JSONB, server_default=text("'{}'::jsonb"), nullable=False)
|
||||
# --------------------------------------------------
|
||||
|
||||
# --- TECHNIKAI FIX OSZLOPOK ---
|
||||
engine_capacity = Column(Integer, index=True)
|
||||
power_kw = Column(Integer, index=True)
|
||||
max_weight_kg = Column(Integer, index=True)
|
||||
|
||||
axle_count = Column(Integer)
|
||||
payload_capacity_kg = Column(Integer)
|
||||
cargo_volume_m3 = Column(Numeric(10, 2))
|
||||
cargo_length_mm = Column(Integer)
|
||||
cargo_width_mm = Column(Integer)
|
||||
cargo_height_mm = Column(Integer)
|
||||
|
||||
specifications = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
features_json = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
|
||||
# Státusz mező hossza növelve a pipeline flagekhez
|
||||
status = Column(String(30), server_default="unverified", index=True)
|
||||
is_master = Column(Boolean, default=False)
|
||||
source = Column(String(50)) # 'ROBOT-v1.3.0-Pipeline'
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
# Kapcsolatok
|
||||
v_type_rel = relationship("VehicleType", back_populates="definitions")
|
||||
master_record = relationship("VehicleModelDefinition", remote_side=[id], backref="merged_variants")
|
||||
|
||||
# AssetCatalog kapcsolat
|
||||
# Megjegyzés: Ellenőrizd, hogy az AssetCatalog modell be van-e importálva a Base-be!
|
||||
variants = relationship("AssetCatalog", back_populates="master_definition", primaryjoin="VehicleModelDefinition.id == AssetCatalog.master_definition_id")
|
||||
19
archive/2026.02.18 Archive_old_mapps/vehicle_ownership.py.old
Executable file
19
archive/2026.02.18 Archive_old_mapps/vehicle_ownership.py.old
Executable file
@@ -0,0 +1,19 @@
|
||||
from sqlalchemy import Column, Integer, ForeignKey, DateTime, Boolean
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.base import Base
|
||||
|
||||
class VehicleOwnership(Base):
|
||||
__tablename__ = "vehicle_ownerships"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
vehicle_id = Column(Integer, ForeignKey("data.vehicles.id"))
|
||||
org_id = Column(Integer, ForeignKey("data.organizations.id"))
|
||||
|
||||
# Érvényességi időablak
|
||||
start_date = Column(DateTime(timezone=True), server_default=func.now())
|
||||
end_date = Column(DateTime(timezone=True), nullable=True) # Ha eladja, ide kerül a dátum
|
||||
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
# Csak ezen az ablakon belüli szervizeket láthatja az aktuális tulajdonos
|
||||
21
archive/2026.02.18 Archive_old_mapps/verification_token.py.old
Executable file
21
archive/2026.02.18 Archive_old_mapps/verification_token.py.old
Executable file
@@ -0,0 +1,21 @@
|
||||
import enum
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Enum
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.base import Base
|
||||
|
||||
class TokenType(str, enum.Enum):
|
||||
email_verify = "email_verify"
|
||||
password_reset = "password_reset"
|
||||
|
||||
class VerificationToken(Base):
|
||||
__tablename__ = "verification_tokens"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
user_id = Column(Integer, ForeignKey("data.users.id", ondelete="CASCADE"), nullable=False)
|
||||
|
||||
token_hash = Column(String(64), unique=True, index=True, nullable=False)
|
||||
token_type = Column(Enum(TokenType, name="tokentype", schema="data"), nullable=False)
|
||||
|
||||
expires_at = Column(DateTime(timezone=True), nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
|
||||
55
archive/data-1772053521182.csv
Executable file
55
archive/data-1772053521182.csv
Executable file
@@ -0,0 +1,55 @@
|
||||
"schema_name","table_name"
|
||||
"data","addresses"
|
||||
"data","asset_assignments"
|
||||
"data","asset_costs"
|
||||
"data","asset_events"
|
||||
"data","asset_financials"
|
||||
"data","asset_inspections"
|
||||
"data","asset_reviews"
|
||||
"data","asset_telemetry"
|
||||
"data","assets"
|
||||
"data","audit_logs"
|
||||
"data","badges"
|
||||
"data","branches"
|
||||
"data","catalog_discovery"
|
||||
"data","credit_logs"
|
||||
"data","discovery_parameters"
|
||||
"data","exchange_rates"
|
||||
"data","expertise_tags"
|
||||
"data","feature_definitions"
|
||||
"data","geo_postal_codes"
|
||||
"data","geo_street_types"
|
||||
"data","geo_streets"
|
||||
"data","level_configs"
|
||||
"data","model_feature_maps"
|
||||
"data","org_sales_assignments"
|
||||
"data","org_subscriptions"
|
||||
"data","organization_financials"
|
||||
"data","organization_members"
|
||||
"data","organizations"
|
||||
"data","point_rules"
|
||||
"data","points_ledger"
|
||||
"data","ratings"
|
||||
"data","service_expertises"
|
||||
"data","service_profiles"
|
||||
"data","service_specialties"
|
||||
"data","service_staging"
|
||||
"data","subscription_tiers"
|
||||
"data","system_parameters"
|
||||
"data","translations"
|
||||
"data","user_badges"
|
||||
"data","user_stats"
|
||||
"data","vehicle_catalog"
|
||||
"data","vehicle_logbook"
|
||||
"data","vehicle_model_definitions"
|
||||
"data","vehicle_ownership_history"
|
||||
"data","vehicle_ownerships"
|
||||
"data","vehicle_types"
|
||||
"identity","persons"
|
||||
"identity","social_accounts"
|
||||
"identity","users"
|
||||
"identity","verification_tokens"
|
||||
"identity","wallets"
|
||||
"public","alembic_version"
|
||||
"public","spatial_ref_sys"
|
||||
"system","pending_actions"
|
||||
|
Can't render this file because it contains an unexpected character in line 10 and column 15.
|
521
archive/data-1772053575794.csv
Executable file
521
archive/data-1772053575794.csv
Executable file
@@ -0,0 +1,521 @@
|
||||
"table_name","index_name","column_name"
|
||||
"addresses","addresses_pkey","id"
|
||||
"alembic_version","alembic_version_pkey","version_num"
|
||||
"asset_assignments","asset_assignments_pkey","id"
|
||||
"asset_costs","asset_costs_pkey","id"
|
||||
"asset_costs","ix_data_asset_costs_registration_uuid","registration_uuid"
|
||||
"asset_events","asset_events_pkey","id"
|
||||
"asset_events","ix_data_asset_events_registration_uuid","registration_uuid"
|
||||
"asset_financials","asset_financials_asset_id_key","asset_id"
|
||||
"asset_financials","asset_financials_pkey","id"
|
||||
"asset_inspections","asset_inspections_pkey","id"
|
||||
"asset_reviews","asset_reviews_pkey","id"
|
||||
"asset_telemetry","asset_telemetry_asset_id_key","asset_id"
|
||||
"asset_telemetry","asset_telemetry_pkey","id"
|
||||
"assets","assets_pkey","id"
|
||||
"assets","ix_data_assets_license_plate","license_plate"
|
||||
"assets","ix_data_assets_registration_uuid","registration_uuid"
|
||||
"assets","ix_data_assets_vin","vin"
|
||||
"audit_logs","audit_logs_pkey","id"
|
||||
"audit_logs","ix_data_audit_logs_action","action"
|
||||
"audit_logs","ix_data_audit_logs_id","id"
|
||||
"audit_logs","ix_data_audit_logs_ip_address","ip_address"
|
||||
"audit_logs","ix_data_audit_logs_target_id","target_id"
|
||||
"audit_logs","ix_data_audit_logs_target_type","target_type"
|
||||
"audit_logs","ix_data_audit_logs_timestamp","timestamp"
|
||||
"badges","badges_name_key","name"
|
||||
"badges","badges_pkey","id"
|
||||
"badges","ix_data_badges_id","id"
|
||||
"branches","branches_pkey","id"
|
||||
"branches","ix_data_branches_city","city"
|
||||
"branches","ix_data_branches_postal_code","postal_code"
|
||||
"catalog_discovery","_make_model_class_uc","model"
|
||||
"catalog_discovery","_make_model_class_uc","make"
|
||||
"catalog_discovery","_make_model_class_uc","vehicle_class"
|
||||
"catalog_discovery","catalog_discovery_pkey","id"
|
||||
"catalog_discovery","ix_data_catalog_discovery_id","id"
|
||||
"catalog_discovery","ix_data_catalog_discovery_make","make"
|
||||
"catalog_discovery","ix_data_catalog_discovery_model","model"
|
||||
"catalog_discovery","ix_data_catalog_discovery_status","status"
|
||||
"catalog_discovery","ix_data_catalog_discovery_vehicle_class","vehicle_class"
|
||||
"credit_logs","credit_logs_pkey","id"
|
||||
"discovery_parameters","discovery_parameters_pkey","id"
|
||||
"exchange_rates","exchange_rates_pkey","id"
|
||||
"exchange_rates","exchange_rates_target_currency_key","target_currency"
|
||||
"expertise_tags","expertise_tags_pkey","id"
|
||||
"expertise_tags","ix_data_expertise_tags_key","key"
|
||||
"feature_definitions","feature_definitions_pkey","id"
|
||||
"feature_definitions","ix_data_feature_definitions_category","category"
|
||||
"feature_definitions","ix_data_feature_definitions_code","code"
|
||||
"geo_postal_codes","geo_postal_codes_pkey","id"
|
||||
"geo_postal_codes","ix_data_geo_postal_codes_city","city"
|
||||
"geo_postal_codes","ix_data_geo_postal_codes_zip_code","zip_code"
|
||||
"geo_street_types","geo_street_types_name_key","name"
|
||||
"geo_street_types","geo_street_types_pkey","id"
|
||||
"geo_streets","geo_streets_pkey","id"
|
||||
"geo_streets","ix_data_geo_streets_name","name"
|
||||
"level_configs","ix_data_level_configs_id","id"
|
||||
"level_configs","level_configs_level_number_key","level_number"
|
||||
"level_configs","level_configs_pkey","id"
|
||||
"model_feature_maps","model_feature_maps_pkey","id"
|
||||
"org_sales_assignments","org_sales_assignments_pkey","id"
|
||||
"org_subscriptions","org_subscriptions_pkey","id"
|
||||
"organization_financials","ix_data_organization_financials_id","id"
|
||||
"organization_financials","organization_financials_pkey","id"
|
||||
"organization_members","ix_data_organization_members_id","id"
|
||||
"organization_members","organization_members_pkey","id"
|
||||
"organizations","ix_data_organizations_folder_slug","folder_slug"
|
||||
"organizations","ix_data_organizations_id","id"
|
||||
"organizations","ix_data_organizations_subscription_plan","subscription_plan"
|
||||
"organizations","ix_data_organizations_tax_number","tax_number"
|
||||
"organizations","organizations_pkey","id"
|
||||
"pending_actions","ix_system_pending_actions_id","id"
|
||||
"pending_actions","pending_actions_pkey","id"
|
||||
"persons","ix_identity_persons_id","id"
|
||||
"persons","ix_identity_persons_identity_hash","identity_hash"
|
||||
"persons","persons_id_uuid_key","id_uuid"
|
||||
"persons","persons_pkey","id"
|
||||
"pg_aggregate","pg_aggregate_fnoid_index","aggfnoid"
|
||||
"pg_am","pg_am_name_index","amname"
|
||||
"pg_am","pg_am_oid_index","oid"
|
||||
"pg_amop","pg_amop_fam_strat_index","amopstrategy"
|
||||
"pg_amop","pg_amop_fam_strat_index","amopfamily"
|
||||
"pg_amop","pg_amop_fam_strat_index","amoprighttype"
|
||||
"pg_amop","pg_amop_fam_strat_index","amoplefttype"
|
||||
"pg_amop","pg_amop_oid_index","oid"
|
||||
"pg_amop","pg_amop_opr_fam_index","amopfamily"
|
||||
"pg_amop","pg_amop_opr_fam_index","amoppurpose"
|
||||
"pg_amop","pg_amop_opr_fam_index","amopopr"
|
||||
"pg_amproc","pg_amproc_fam_proc_index","amprocrighttype"
|
||||
"pg_amproc","pg_amproc_fam_proc_index","amproclefttype"
|
||||
"pg_amproc","pg_amproc_fam_proc_index","amprocfamily"
|
||||
"pg_amproc","pg_amproc_fam_proc_index","amprocnum"
|
||||
"pg_amproc","pg_amproc_oid_index","oid"
|
||||
"pg_attrdef","pg_attrdef_adrelid_adnum_index","adrelid"
|
||||
"pg_attrdef","pg_attrdef_adrelid_adnum_index","adnum"
|
||||
"pg_attrdef","pg_attrdef_oid_index","oid"
|
||||
"pg_attribute","pg_attribute_relid_attnam_index","attname"
|
||||
"pg_attribute","pg_attribute_relid_attnam_index","attrelid"
|
||||
"pg_attribute","pg_attribute_relid_attnum_index","attnum"
|
||||
"pg_attribute","pg_attribute_relid_attnum_index","attrelid"
|
||||
"pg_auth_members","pg_auth_members_member_role_index","roleid"
|
||||
"pg_auth_members","pg_auth_members_member_role_index","member"
|
||||
"pg_auth_members","pg_auth_members_role_member_index","member"
|
||||
"pg_auth_members","pg_auth_members_role_member_index","roleid"
|
||||
"pg_authid","pg_authid_oid_index","oid"
|
||||
"pg_authid","pg_authid_rolname_index","rolname"
|
||||
"pg_cast","pg_cast_oid_index","oid"
|
||||
"pg_cast","pg_cast_source_target_index","casttarget"
|
||||
"pg_cast","pg_cast_source_target_index","castsource"
|
||||
"pg_class","pg_class_oid_index","oid"
|
||||
"pg_class","pg_class_relname_nsp_index","relnamespace"
|
||||
"pg_class","pg_class_relname_nsp_index","relname"
|
||||
"pg_class","pg_class_tblspc_relfilenode_index","reltablespace"
|
||||
"pg_class","pg_class_tblspc_relfilenode_index","relfilenode"
|
||||
"pg_collation","pg_collation_name_enc_nsp_index","collnamespace"
|
||||
"pg_collation","pg_collation_name_enc_nsp_index","collname"
|
||||
"pg_collation","pg_collation_name_enc_nsp_index","collencoding"
|
||||
"pg_collation","pg_collation_oid_index","oid"
|
||||
"pg_constraint","pg_constraint_conname_nsp_index","connamespace"
|
||||
"pg_constraint","pg_constraint_conname_nsp_index","conname"
|
||||
"pg_constraint","pg_constraint_conparentid_index","conparentid"
|
||||
"pg_constraint","pg_constraint_conrelid_contypid_conname_index","conname"
|
||||
"pg_constraint","pg_constraint_conrelid_contypid_conname_index","conrelid"
|
||||
"pg_constraint","pg_constraint_conrelid_contypid_conname_index","contypid"
|
||||
"pg_constraint","pg_constraint_contypid_index","contypid"
|
||||
"pg_constraint","pg_constraint_oid_index","oid"
|
||||
"pg_conversion","pg_conversion_default_index","conforencoding"
|
||||
"pg_conversion","pg_conversion_default_index","oid"
|
||||
"pg_conversion","pg_conversion_default_index","contoencoding"
|
||||
"pg_conversion","pg_conversion_default_index","connamespace"
|
||||
"pg_conversion","pg_conversion_name_nsp_index","connamespace"
|
||||
"pg_conversion","pg_conversion_name_nsp_index","conname"
|
||||
"pg_conversion","pg_conversion_oid_index","oid"
|
||||
"pg_database","pg_database_datname_index","datname"
|
||||
"pg_database","pg_database_oid_index","oid"
|
||||
"pg_db_role_setting","pg_db_role_setting_databaseid_rol_index","setrole"
|
||||
"pg_db_role_setting","pg_db_role_setting_databaseid_rol_index","setdatabase"
|
||||
"pg_default_acl","pg_default_acl_oid_index","oid"
|
||||
"pg_default_acl","pg_default_acl_role_nsp_obj_index","defaclrole"
|
||||
"pg_default_acl","pg_default_acl_role_nsp_obj_index","defaclnamespace"
|
||||
"pg_default_acl","pg_default_acl_role_nsp_obj_index","defaclobjtype"
|
||||
"pg_depend","pg_depend_depender_index","objsubid"
|
||||
"pg_depend","pg_depend_depender_index","objid"
|
||||
"pg_depend","pg_depend_depender_index","classid"
|
||||
"pg_depend","pg_depend_reference_index","refobjid"
|
||||
"pg_depend","pg_depend_reference_index","refobjsubid"
|
||||
"pg_depend","pg_depend_reference_index","refclassid"
|
||||
"pg_description","pg_description_o_c_o_index","objoid"
|
||||
"pg_description","pg_description_o_c_o_index","classoid"
|
||||
"pg_description","pg_description_o_c_o_index","objsubid"
|
||||
"pg_enum","pg_enum_oid_index","oid"
|
||||
"pg_enum","pg_enum_typid_label_index","enumlabel"
|
||||
"pg_enum","pg_enum_typid_label_index","enumtypid"
|
||||
"pg_enum","pg_enum_typid_sortorder_index","enumtypid"
|
||||
"pg_enum","pg_enum_typid_sortorder_index","enumsortorder"
|
||||
"pg_event_trigger","pg_event_trigger_evtname_index","evtname"
|
||||
"pg_event_trigger","pg_event_trigger_oid_index","oid"
|
||||
"pg_extension","pg_extension_name_index","extname"
|
||||
"pg_extension","pg_extension_oid_index","oid"
|
||||
"pg_foreign_data_wrapper","pg_foreign_data_wrapper_name_index","fdwname"
|
||||
"pg_foreign_data_wrapper","pg_foreign_data_wrapper_oid_index","oid"
|
||||
"pg_foreign_server","pg_foreign_server_name_index","srvname"
|
||||
"pg_foreign_server","pg_foreign_server_oid_index","oid"
|
||||
"pg_foreign_table","pg_foreign_table_relid_index","ftrelid"
|
||||
"pg_index","pg_index_indexrelid_index","indexrelid"
|
||||
"pg_index","pg_index_indrelid_index","indrelid"
|
||||
"pg_inherits","pg_inherits_parent_index","inhparent"
|
||||
"pg_inherits","pg_inherits_relid_seqno_index","inhrelid"
|
||||
"pg_inherits","pg_inherits_relid_seqno_index","inhseqno"
|
||||
"pg_init_privs","pg_init_privs_o_c_o_index","objsubid"
|
||||
"pg_init_privs","pg_init_privs_o_c_o_index","objoid"
|
||||
"pg_init_privs","pg_init_privs_o_c_o_index","classoid"
|
||||
"pg_language","pg_language_name_index","lanname"
|
||||
"pg_language","pg_language_oid_index","oid"
|
||||
"pg_largeobject","pg_largeobject_loid_pn_index","loid"
|
||||
"pg_largeobject","pg_largeobject_loid_pn_index","pageno"
|
||||
"pg_largeobject_metadata","pg_largeobject_metadata_oid_index","oid"
|
||||
"pg_namespace","pg_namespace_nspname_index","nspname"
|
||||
"pg_namespace","pg_namespace_oid_index","oid"
|
||||
"pg_opclass","pg_opclass_am_name_nsp_index","opcmethod"
|
||||
"pg_opclass","pg_opclass_am_name_nsp_index","opcnamespace"
|
||||
"pg_opclass","pg_opclass_am_name_nsp_index","opcname"
|
||||
"pg_opclass","pg_opclass_oid_index","oid"
|
||||
"pg_operator","pg_operator_oid_index","oid"
|
||||
"pg_operator","pg_operator_oprname_l_r_n_index","oprright"
|
||||
"pg_operator","pg_operator_oprname_l_r_n_index","oprleft"
|
||||
"pg_operator","pg_operator_oprname_l_r_n_index","oprnamespace"
|
||||
"pg_operator","pg_operator_oprname_l_r_n_index","oprname"
|
||||
"pg_opfamily","pg_opfamily_am_name_nsp_index","opfname"
|
||||
"pg_opfamily","pg_opfamily_am_name_nsp_index","opfnamespace"
|
||||
"pg_opfamily","pg_opfamily_am_name_nsp_index","opfmethod"
|
||||
"pg_opfamily","pg_opfamily_oid_index","oid"
|
||||
"pg_parameter_acl","pg_parameter_acl_oid_index","oid"
|
||||
"pg_parameter_acl","pg_parameter_acl_parname_index","parname"
|
||||
"pg_partitioned_table","pg_partitioned_table_partrelid_index","partrelid"
|
||||
"pg_policy","pg_policy_oid_index","oid"
|
||||
"pg_policy","pg_policy_polrelid_polname_index","polname"
|
||||
"pg_policy","pg_policy_polrelid_polname_index","polrelid"
|
||||
"pg_proc","pg_proc_oid_index","oid"
|
||||
"pg_proc","pg_proc_proname_args_nsp_index","proname"
|
||||
"pg_proc","pg_proc_proname_args_nsp_index","pronamespace"
|
||||
"pg_proc","pg_proc_proname_args_nsp_index","proargtypes"
|
||||
"pg_publication","pg_publication_oid_index","oid"
|
||||
"pg_publication","pg_publication_pubname_index","pubname"
|
||||
"pg_publication_namespace","pg_publication_namespace_oid_index","oid"
|
||||
"pg_publication_namespace","pg_publication_namespace_pnnspid_pnpubid_index","pnnspid"
|
||||
"pg_publication_namespace","pg_publication_namespace_pnnspid_pnpubid_index","pnpubid"
|
||||
"pg_publication_rel","pg_publication_rel_oid_index","oid"
|
||||
"pg_publication_rel","pg_publication_rel_prpubid_index","prpubid"
|
||||
"pg_publication_rel","pg_publication_rel_prrelid_prpubid_index","prrelid"
|
||||
"pg_publication_rel","pg_publication_rel_prrelid_prpubid_index","prpubid"
|
||||
"pg_range","pg_range_rngmultitypid_index","rngmultitypid"
|
||||
"pg_range","pg_range_rngtypid_index","rngtypid"
|
||||
"pg_replication_origin","pg_replication_origin_roiident_index","roident"
|
||||
"pg_replication_origin","pg_replication_origin_roname_index","roname"
|
||||
"pg_rewrite","pg_rewrite_oid_index","oid"
|
||||
"pg_rewrite","pg_rewrite_rel_rulename_index","rulename"
|
||||
"pg_rewrite","pg_rewrite_rel_rulename_index","ev_class"
|
||||
"pg_seclabel","pg_seclabel_object_index","objsubid"
|
||||
"pg_seclabel","pg_seclabel_object_index","objoid"
|
||||
"pg_seclabel","pg_seclabel_object_index","classoid"
|
||||
"pg_seclabel","pg_seclabel_object_index","provider"
|
||||
"pg_sequence","pg_sequence_seqrelid_index","seqrelid"
|
||||
"pg_shdepend","pg_shdepend_depender_index","objsubid"
|
||||
"pg_shdepend","pg_shdepend_depender_index","objid"
|
||||
"pg_shdepend","pg_shdepend_depender_index","dbid"
|
||||
"pg_shdepend","pg_shdepend_depender_index","classid"
|
||||
"pg_shdepend","pg_shdepend_reference_index","refclassid"
|
||||
"pg_shdepend","pg_shdepend_reference_index","refobjid"
|
||||
"pg_shdescription","pg_shdescription_o_c_index","classoid"
|
||||
"pg_shdescription","pg_shdescription_o_c_index","objoid"
|
||||
"pg_shseclabel","pg_shseclabel_object_index","provider"
|
||||
"pg_shseclabel","pg_shseclabel_object_index","objoid"
|
||||
"pg_shseclabel","pg_shseclabel_object_index","classoid"
|
||||
"pg_statistic","pg_statistic_relid_att_inh_index","staattnum"
|
||||
"pg_statistic","pg_statistic_relid_att_inh_index","starelid"
|
||||
"pg_statistic","pg_statistic_relid_att_inh_index","stainherit"
|
||||
"pg_statistic_ext","pg_statistic_ext_name_index","stxname"
|
||||
"pg_statistic_ext","pg_statistic_ext_name_index","stxnamespace"
|
||||
"pg_statistic_ext","pg_statistic_ext_oid_index","oid"
|
||||
"pg_statistic_ext","pg_statistic_ext_relid_index","stxrelid"
|
||||
"pg_statistic_ext_data","pg_statistic_ext_data_stxoid_inh_index","stxdinherit"
|
||||
"pg_statistic_ext_data","pg_statistic_ext_data_stxoid_inh_index","stxoid"
|
||||
"pg_subscription","pg_subscription_oid_index","oid"
|
||||
"pg_subscription","pg_subscription_subname_index","subdbid"
|
||||
"pg_subscription","pg_subscription_subname_index","subname"
|
||||
"pg_subscription_rel","pg_subscription_rel_srrelid_srsubid_index","srsubid"
|
||||
"pg_subscription_rel","pg_subscription_rel_srrelid_srsubid_index","srrelid"
|
||||
"pg_tablespace","pg_tablespace_oid_index","oid"
|
||||
"pg_tablespace","pg_tablespace_spcname_index","spcname"
|
||||
"pg_toast_1213","pg_toast_1213_index","chunk_seq"
|
||||
"pg_toast_1213","pg_toast_1213_index","chunk_id"
|
||||
"pg_toast_1247","pg_toast_1247_index","chunk_id"
|
||||
"pg_toast_1247","pg_toast_1247_index","chunk_seq"
|
||||
"pg_toast_1255","pg_toast_1255_index","chunk_id"
|
||||
"pg_toast_1255","pg_toast_1255_index","chunk_seq"
|
||||
"pg_toast_1260","pg_toast_1260_index","chunk_seq"
|
||||
"pg_toast_1260","pg_toast_1260_index","chunk_id"
|
||||
"pg_toast_1262","pg_toast_1262_index","chunk_seq"
|
||||
"pg_toast_1262","pg_toast_1262_index","chunk_id"
|
||||
"pg_toast_13454","pg_toast_13454_index","chunk_seq"
|
||||
"pg_toast_13454","pg_toast_13454_index","chunk_id"
|
||||
"pg_toast_13459","pg_toast_13459_index","chunk_seq"
|
||||
"pg_toast_13459","pg_toast_13459_index","chunk_id"
|
||||
"pg_toast_13464","pg_toast_13464_index","chunk_seq"
|
||||
"pg_toast_13464","pg_toast_13464_index","chunk_id"
|
||||
"pg_toast_13469","pg_toast_13469_index","chunk_id"
|
||||
"pg_toast_13469","pg_toast_13469_index","chunk_seq"
|
||||
"pg_toast_1417","pg_toast_1417_index","chunk_seq"
|
||||
"pg_toast_1417","pg_toast_1417_index","chunk_id"
|
||||
"pg_toast_1418","pg_toast_1418_index","chunk_seq"
|
||||
"pg_toast_1418","pg_toast_1418_index","chunk_id"
|
||||
"pg_toast_2328","pg_toast_2328_index","chunk_id"
|
||||
"pg_toast_2328","pg_toast_2328_index","chunk_seq"
|
||||
"pg_toast_2396","pg_toast_2396_index","chunk_seq"
|
||||
"pg_toast_2396","pg_toast_2396_index","chunk_id"
|
||||
"pg_toast_2600","pg_toast_2600_index","chunk_seq"
|
||||
"pg_toast_2600","pg_toast_2600_index","chunk_id"
|
||||
"pg_toast_2604","pg_toast_2604_index","chunk_id"
|
||||
"pg_toast_2604","pg_toast_2604_index","chunk_seq"
|
||||
"pg_toast_2606","pg_toast_2606_index","chunk_id"
|
||||
"pg_toast_2606","pg_toast_2606_index","chunk_seq"
|
||||
"pg_toast_2609","pg_toast_2609_index","chunk_seq"
|
||||
"pg_toast_2609","pg_toast_2609_index","chunk_id"
|
||||
"pg_toast_2612","pg_toast_2612_index","chunk_seq"
|
||||
"pg_toast_2612","pg_toast_2612_index","chunk_id"
|
||||
"pg_toast_2615","pg_toast_2615_index","chunk_seq"
|
||||
"pg_toast_2615","pg_toast_2615_index","chunk_id"
|
||||
"pg_toast_2618","pg_toast_2618_index","chunk_seq"
|
||||
"pg_toast_2618","pg_toast_2618_index","chunk_id"
|
||||
"pg_toast_2619","pg_toast_2619_index","chunk_id"
|
||||
"pg_toast_2619","pg_toast_2619_index","chunk_seq"
|
||||
"pg_toast_2620","pg_toast_2620_index","chunk_id"
|
||||
"pg_toast_2620","pg_toast_2620_index","chunk_seq"
|
||||
"pg_toast_2964","pg_toast_2964_index","chunk_id"
|
||||
"pg_toast_2964","pg_toast_2964_index","chunk_seq"
|
||||
"pg_toast_3079","pg_toast_3079_index","chunk_seq"
|
||||
"pg_toast_3079","pg_toast_3079_index","chunk_id"
|
||||
"pg_toast_3118","pg_toast_3118_index","chunk_id"
|
||||
"pg_toast_3118","pg_toast_3118_index","chunk_seq"
|
||||
"pg_toast_3256","pg_toast_3256_index","chunk_id"
|
||||
"pg_toast_3256","pg_toast_3256_index","chunk_seq"
|
||||
"pg_toast_3350","pg_toast_3350_index","chunk_seq"
|
||||
"pg_toast_3350","pg_toast_3350_index","chunk_id"
|
||||
"pg_toast_3381","pg_toast_3381_index","chunk_seq"
|
||||
"pg_toast_3381","pg_toast_3381_index","chunk_id"
|
||||
"pg_toast_3394","pg_toast_3394_index","chunk_id"
|
||||
"pg_toast_3394","pg_toast_3394_index","chunk_seq"
|
||||
"pg_toast_3429","pg_toast_3429_index","chunk_id"
|
||||
"pg_toast_3429","pg_toast_3429_index","chunk_seq"
|
||||
"pg_toast_3456","pg_toast_3456_index","chunk_seq"
|
||||
"pg_toast_3456","pg_toast_3456_index","chunk_id"
|
||||
"pg_toast_3466","pg_toast_3466_index","chunk_id"
|
||||
"pg_toast_3466","pg_toast_3466_index","chunk_seq"
|
||||
"pg_toast_3592","pg_toast_3592_index","chunk_seq"
|
||||
"pg_toast_3592","pg_toast_3592_index","chunk_id"
|
||||
"pg_toast_3596","pg_toast_3596_index","chunk_seq"
|
||||
"pg_toast_3596","pg_toast_3596_index","chunk_id"
|
||||
"pg_toast_3600","pg_toast_3600_index","chunk_id"
|
||||
"pg_toast_3600","pg_toast_3600_index","chunk_seq"
|
||||
"pg_toast_6000","pg_toast_6000_index","chunk_id"
|
||||
"pg_toast_6000","pg_toast_6000_index","chunk_seq"
|
||||
"pg_toast_6100","pg_toast_6100_index","chunk_seq"
|
||||
"pg_toast_6100","pg_toast_6100_index","chunk_id"
|
||||
"pg_toast_6106","pg_toast_6106_index","chunk_id"
|
||||
"pg_toast_6106","pg_toast_6106_index","chunk_seq"
|
||||
"pg_toast_6243","pg_toast_6243_index","chunk_id"
|
||||
"pg_toast_6243","pg_toast_6243_index","chunk_seq"
|
||||
"pg_toast_79789","pg_toast_79789_index","chunk_id"
|
||||
"pg_toast_79789","pg_toast_79789_index","chunk_seq"
|
||||
"pg_toast_826","pg_toast_826_index","chunk_seq"
|
||||
"pg_toast_826","pg_toast_826_index","chunk_id"
|
||||
"pg_toast_88701","pg_toast_88701_index","chunk_seq"
|
||||
"pg_toast_88701","pg_toast_88701_index","chunk_id"
|
||||
"pg_toast_88771","pg_toast_88771_index","chunk_seq"
|
||||
"pg_toast_88771","pg_toast_88771_index","chunk_id"
|
||||
"pg_toast_88783","pg_toast_88783_index","chunk_seq"
|
||||
"pg_toast_88783","pg_toast_88783_index","chunk_id"
|
||||
"pg_toast_88794","pg_toast_88794_index","chunk_seq"
|
||||
"pg_toast_88794","pg_toast_88794_index","chunk_id"
|
||||
"pg_toast_88809","pg_toast_88809_index","chunk_id"
|
||||
"pg_toast_88809","pg_toast_88809_index","chunk_seq"
|
||||
"pg_toast_88827","pg_toast_88827_index","chunk_id"
|
||||
"pg_toast_88827","pg_toast_88827_index","chunk_seq"
|
||||
"pg_toast_88838","pg_toast_88838_index","chunk_id"
|
||||
"pg_toast_88838","pg_toast_88838_index","chunk_seq"
|
||||
"pg_toast_88851","pg_toast_88851_index","chunk_id"
|
||||
"pg_toast_88851","pg_toast_88851_index","chunk_seq"
|
||||
"pg_toast_88861","pg_toast_88861_index","chunk_id"
|
||||
"pg_toast_88861","pg_toast_88861_index","chunk_seq"
|
||||
"pg_toast_88902","pg_toast_88902_index","chunk_seq"
|
||||
"pg_toast_88902","pg_toast_88902_index","chunk_id"
|
||||
"pg_toast_88946","pg_toast_88946_index","chunk_seq"
|
||||
"pg_toast_88946","pg_toast_88946_index","chunk_id"
|
||||
"pg_toast_88971","pg_toast_88971_index","chunk_id"
|
||||
"pg_toast_88971","pg_toast_88971_index","chunk_seq"
|
||||
"pg_toast_89018","pg_toast_89018_index","chunk_id"
|
||||
"pg_toast_89018","pg_toast_89018_index","chunk_seq"
|
||||
"pg_toast_89064","pg_toast_89064_index","chunk_id"
|
||||
"pg_toast_89064","pg_toast_89064_index","chunk_seq"
|
||||
"pg_toast_89098","pg_toast_89098_index","chunk_seq"
|
||||
"pg_toast_89098","pg_toast_89098_index","chunk_id"
|
||||
"pg_toast_89129","pg_toast_89129_index","chunk_id"
|
||||
"pg_toast_89129","pg_toast_89129_index","chunk_seq"
|
||||
"pg_toast_89178","pg_toast_89178_index","chunk_seq"
|
||||
"pg_toast_89178","pg_toast_89178_index","chunk_id"
|
||||
"pg_toast_89231","pg_toast_89231_index","chunk_seq"
|
||||
"pg_toast_89231","pg_toast_89231_index","chunk_id"
|
||||
"pg_toast_89273","pg_toast_89273_index","chunk_seq"
|
||||
"pg_toast_89273","pg_toast_89273_index","chunk_id"
|
||||
"pg_toast_89295","pg_toast_89295_index","chunk_id"
|
||||
"pg_toast_89295","pg_toast_89295_index","chunk_seq"
|
||||
"pg_toast_89374","pg_toast_89374_index","chunk_seq"
|
||||
"pg_toast_89374","pg_toast_89374_index","chunk_id"
|
||||
"pg_toast_89400","pg_toast_89400_index","chunk_id"
|
||||
"pg_toast_89400","pg_toast_89400_index","chunk_seq"
|
||||
"pg_toast_89457","pg_toast_89457_index","chunk_id"
|
||||
"pg_toast_89457","pg_toast_89457_index","chunk_seq"
|
||||
"pg_toast_89482","pg_toast_89482_index","chunk_id"
|
||||
"pg_toast_89482","pg_toast_89482_index","chunk_seq"
|
||||
"pg_toast_89497","pg_toast_89497_index","chunk_seq"
|
||||
"pg_toast_89497","pg_toast_89497_index","chunk_id"
|
||||
"pg_toast_89513","pg_toast_89513_index","chunk_id"
|
||||
"pg_toast_89513","pg_toast_89513_index","chunk_seq"
|
||||
"pg_toast_89548","pg_toast_89548_index","chunk_id"
|
||||
"pg_toast_89548","pg_toast_89548_index","chunk_seq"
|
||||
"pg_toast_89597","pg_toast_89597_index","chunk_seq"
|
||||
"pg_toast_89597","pg_toast_89597_index","chunk_id"
|
||||
"pg_toast_90028","pg_toast_90028_index","chunk_id"
|
||||
"pg_toast_90028","pg_toast_90028_index","chunk_seq"
|
||||
"pg_toast_91674","pg_toast_91674_index","chunk_id"
|
||||
"pg_toast_91674","pg_toast_91674_index","chunk_seq"
|
||||
"pg_toast_98885","pg_toast_98885_index","chunk_id"
|
||||
"pg_toast_98885","pg_toast_98885_index","chunk_seq"
|
||||
"pg_transform","pg_transform_oid_index","oid"
|
||||
"pg_transform","pg_transform_type_lang_index","trflang"
|
||||
"pg_transform","pg_transform_type_lang_index","trftype"
|
||||
"pg_trigger","pg_trigger_oid_index","oid"
|
||||
"pg_trigger","pg_trigger_tgconstraint_index","tgconstraint"
|
||||
"pg_trigger","pg_trigger_tgrelid_tgname_index","tgname"
|
||||
"pg_trigger","pg_trigger_tgrelid_tgname_index","tgrelid"
|
||||
"pg_ts_config","pg_ts_config_cfgname_index","cfgname"
|
||||
"pg_ts_config","pg_ts_config_cfgname_index","cfgnamespace"
|
||||
"pg_ts_config","pg_ts_config_oid_index","oid"
|
||||
"pg_ts_config_map","pg_ts_config_map_index","mapcfg"
|
||||
"pg_ts_config_map","pg_ts_config_map_index","mapseqno"
|
||||
"pg_ts_config_map","pg_ts_config_map_index","maptokentype"
|
||||
"pg_ts_dict","pg_ts_dict_dictname_index","dictnamespace"
|
||||
"pg_ts_dict","pg_ts_dict_dictname_index","dictname"
|
||||
"pg_ts_dict","pg_ts_dict_oid_index","oid"
|
||||
"pg_ts_parser","pg_ts_parser_oid_index","oid"
|
||||
"pg_ts_parser","pg_ts_parser_prsname_index","prsname"
|
||||
"pg_ts_parser","pg_ts_parser_prsname_index","prsnamespace"
|
||||
"pg_ts_template","pg_ts_template_oid_index","oid"
|
||||
"pg_ts_template","pg_ts_template_tmplname_index","tmplname"
|
||||
"pg_ts_template","pg_ts_template_tmplname_index","tmplnamespace"
|
||||
"pg_type","pg_type_oid_index","oid"
|
||||
"pg_type","pg_type_typname_nsp_index","typnamespace"
|
||||
"pg_type","pg_type_typname_nsp_index","typname"
|
||||
"pg_user_mapping","pg_user_mapping_oid_index","oid"
|
||||
"pg_user_mapping","pg_user_mapping_user_server_index","umserver"
|
||||
"pg_user_mapping","pg_user_mapping_user_server_index","umuser"
|
||||
"point_rules","ix_data_point_rules_action_key","action_key"
|
||||
"point_rules","ix_data_point_rules_id","id"
|
||||
"point_rules","point_rules_pkey","id"
|
||||
"points_ledger","ix_data_points_ledger_id","id"
|
||||
"points_ledger","points_ledger_pkey","id"
|
||||
"ratings","idx_rating_branch","target_branch_id"
|
||||
"ratings","idx_rating_org","target_organization_id"
|
||||
"ratings","idx_rating_user","target_user_id"
|
||||
"ratings","ratings_pkey","id"
|
||||
"service_expertises","service_expertises_pkey","expertise_id"
|
||||
"service_expertises","service_expertises_pkey","service_id"
|
||||
"service_profiles","idx_service_fingerprint","fingerprint"
|
||||
"service_profiles","idx_service_profiles_location","location"
|
||||
"service_profiles","ix_data_service_profiles_fingerprint","fingerprint"
|
||||
"service_profiles","ix_data_service_profiles_id","id"
|
||||
"service_profiles","ix_data_service_profiles_location","location"
|
||||
"service_profiles","ix_data_service_profiles_status","status"
|
||||
"service_profiles","service_profiles_google_place_id_key","google_place_id"
|
||||
"service_profiles","service_profiles_organization_id_key","organization_id"
|
||||
"service_profiles","service_profiles_pkey","id"
|
||||
"service_specialties","ix_data_service_specialties_slug","slug"
|
||||
"service_specialties","service_specialties_pkey","id"
|
||||
"service_staging","idx_staging_fingerprint","fingerprint"
|
||||
"service_staging","ix_data_service_staging_city","city"
|
||||
"service_staging","ix_data_service_staging_id","id"
|
||||
"service_staging","ix_data_service_staging_name","name"
|
||||
"service_staging","ix_data_service_staging_postal_code","postal_code"
|
||||
"service_staging","ix_data_service_staging_status","status"
|
||||
"service_staging","service_staging_pkey","id"
|
||||
"social_accounts","ix_identity_social_accounts_id","id"
|
||||
"social_accounts","ix_identity_social_accounts_social_id","social_id"
|
||||
"social_accounts","social_accounts_pkey","id"
|
||||
"social_accounts","uix_social_provider_id","provider"
|
||||
"social_accounts","uix_social_provider_id","social_id"
|
||||
"spatial_ref_sys","spatial_ref_sys_pkey","srid"
|
||||
"subscription_tiers","ix_data_subscription_tiers_name","name"
|
||||
"subscription_tiers","subscription_tiers_pkey","id"
|
||||
"system_parameters","system_parameters_key_key","key"
|
||||
"system_parameters","system_parameters_pkey","id"
|
||||
"translations","ix_data_translations_id","id"
|
||||
"translations","ix_data_translations_key","key"
|
||||
"translations","ix_data_translations_lang","lang"
|
||||
"translations","translations_pkey","id"
|
||||
"user_badges","ix_data_user_badges_id","id"
|
||||
"user_badges","user_badges_pkey","id"
|
||||
"user_stats","user_stats_pkey","user_id"
|
||||
"users","ix_identity_users_email","email"
|
||||
"users","ix_identity_users_folder_slug","folder_slug"
|
||||
"users","ix_identity_users_id","id"
|
||||
"users","users_pkey","id"
|
||||
"users","users_referral_code_key","referral_code"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_engine_capacity","engine_capacity"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_engine_variant","engine_variant"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_fuel_type","fuel_type"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_generation","generation"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_id","id"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_make","make"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_model","model"
|
||||
"vehicle_catalog","ix_data_vehicle_catalog_power_kw","power_kw"
|
||||
"vehicle_catalog","uix_vehicle_catalog_full","year_from"
|
||||
"vehicle_catalog","uix_vehicle_catalog_full","make"
|
||||
"vehicle_catalog","uix_vehicle_catalog_full","model"
|
||||
"vehicle_catalog","uix_vehicle_catalog_full","engine_variant"
|
||||
"vehicle_catalog","uix_vehicle_catalog_full","fuel_type"
|
||||
"vehicle_catalog","vehicle_catalog_pkey","id"
|
||||
"vehicle_logbook","vehicle_logbook_pkey","id"
|
||||
"vehicle_model_definitions","idx_vmd_engine_code","engine_code"
|
||||
"vehicle_model_definitions","idx_vmd_lookup","make"
|
||||
"vehicle_model_definitions","idx_vmd_lookup","technical_code"
|
||||
"vehicle_model_definitions","idx_vmd_lookup_fast","normalized_name"
|
||||
"vehicle_model_definitions","idx_vmd_lookup_fast","make"
|
||||
"vehicle_model_definitions","idx_vmd_normalized_name","normalized_name"
|
||||
"vehicle_model_definitions","ix_data_vehicle_model_definitions_make","make"
|
||||
"vehicle_model_definitions","ix_data_vehicle_model_definitions_marketing_name","marketing_name"
|
||||
"vehicle_model_definitions","ix_data_vehicle_model_definitions_status","status"
|
||||
"vehicle_model_definitions","ix_data_vehicle_model_definitions_technical_code","technical_code"
|
||||
"vehicle_model_definitions","ix_data_vehicle_model_definitions_year_from","year_from"
|
||||
"vehicle_model_definitions","ix_data_vehicle_model_definitions_year_to","year_to"
|
||||
"vehicle_model_definitions","ix_vehicle_model_marketing_name","marketing_name"
|
||||
"vehicle_model_definitions","uix_make_tech_type","technical_code"
|
||||
"vehicle_model_definitions","uix_make_tech_type","make"
|
||||
"vehicle_model_definitions","uix_make_tech_type","vehicle_type_id"
|
||||
"vehicle_model_definitions","uix_vmd_precision","variant_code"
|
||||
"vehicle_model_definitions","uix_vmd_precision","make"
|
||||
"vehicle_model_definitions","uix_vmd_precision","version_code"
|
||||
"vehicle_model_definitions","uix_vmd_precision","fuel_type"
|
||||
"vehicle_model_definitions","uix_vmd_precision","normalized_name"
|
||||
"vehicle_model_definitions","vehicle_model_definitions_pkey","id"
|
||||
"vehicle_ownership_history","vehicle_ownership_history_pkey","id"
|
||||
"vehicle_ownerships","ix_data_vehicle_ownerships_id","id"
|
||||
"vehicle_ownerships","vehicle_ownerships_pkey","id"
|
||||
"vehicle_types","ix_data_vehicle_types_code","code"
|
||||
"vehicle_types","vehicle_types_pkey","id"
|
||||
"verification_tokens","ix_identity_verification_tokens_id","id"
|
||||
"verification_tokens","verification_tokens_pkey","id"
|
||||
"verification_tokens","verification_tokens_token_key","token"
|
||||
"wallets","ix_identity_wallets_id","id"
|
||||
"wallets","wallets_pkey","id"
|
||||
"wallets","wallets_user_id_key","user_id"
|
||||
|
Can't render this file because it contains an unexpected character in line 12 and column 41.
|
Reference in New Issue
Block a user