feat(robot): hunter v2.7, geocoding support, docker network fix, changelog update

This commit is contained in:
2026-02-13 01:15:34 +00:00
parent 09a0430384
commit f38a75a025
41 changed files with 1801 additions and 153 deletions

View File

@@ -1,60 +1,198 @@
import asyncio
import httpx
import logging
import json
import re
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from sqlalchemy import select, func, or_, text
from app.db.session import SessionLocal
from app.models.asset import AssetCatalog
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("Robot1-Catalog")
logger = logging.getLogger("Robot1-Master-Fleet-DeepDive")
class CatalogScout:
"""
Robot 1: Járműkatalógus feltöltő.
Stratégia: Magyarországi alapok -> Globális EU márkák -> Technikai mélység.
Robot 1: Univerzális Járműkatalógus Építő és Audit Robot.
Logika: EU-Elsődlegesség (CarQuery) -> US-Kiegészítés (NHTSA).
Kategóriák: Car, Motorcycle, Bus, Truck, Trailer, ATV, Marine, Aerial.
Szekvenciák:
1. Deep Dive (Motorvariánsok gyűjtése)
2. Audit (Hiányos adatok pótlása)
"""
CQ_URL = "https://www.carqueryapi.com/api/0.3/"
NHTSA_BASE = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/"
@staticmethod
async def get_initial_hu_data():
HEADERS = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
"Accept": "application/json"
}
# --- KATEGÓRIA DEFINÍCIÓK (Szigorú flotta-szétválasztás) ---
MOTO_MAKES = ['ducati', 'ktm', 'triumph', 'aprilia', 'benelli', 'vespa', 'simson', 'mz', 'etz', 'jawa', 'husqvarna', 'gasgas', 'sherco']
MARINE_IDS = ['DF', 'DT', 'OUTBOARD', 'MARINE', 'JET SKI', 'SEA-DOO', 'WAVERUNNER', 'YACHT', 'BOAT']
AERIAL_IDS = ['CESSNA', 'PIPER', 'AIRBUS', 'BOEING', 'HELICOPTER', 'AIRCRAFT', 'BEECHCRAFT', 'EMBRAER', 'DRONE']
ATV_IDS = ['LT-', 'LTZ', 'LTR', 'KINGQUAD', 'QUAD', 'POLARIS', 'CAN-AM', 'MULE', 'RZR', 'ARCTIC CAT', 'UTV', 'SIDE-BY-SIDE']
# Versenygépek (Motorkerékpárként, üzemóra alapú szervizhez)
RACING_IDS = ['RM-Z', 'KX', 'CRF', 'YZ', 'SX-F', 'XC-W', 'RM125', 'RM250', 'CR125', 'CR250', 'MC450']
MOTO_KEYWORDS = ['CBR', 'GSX', 'YZF', 'NINJA', 'Z1000', 'DR-Z', 'MT-0', 'V-STROM', 'ADVENTURE', 'SCRAMBLER', 'CBF', 'VFR', 'HAYABUSA']
# Flotta kategóriák szétválasztása
BUS_KEYWORDS = ['BUS', 'COACH', 'INTERCITY', 'SHUTTLE', 'TRANSIT']
TRUCK_KEYWORDS = ['TRUCK', 'SEMI', 'TRACTOR', 'HAULER', 'ACTROS', 'MAN', 'SCANIA', 'IVECO', 'VOLVO FH', 'DAF', 'TGX', 'RENAULT T']
TRAILER_KEYWORDS = ['TRAILER', 'SEMITRAILER', 'PÓTKOCSI', 'UTÁNFUTÓ', 'SCHMITZ', 'KRONE', 'KÖGEL']
@classmethod
def identify_class(cls, make: str, model: str) -> str:
"""Kategória meghatározás flottakezelési szempontok alapján."""
m_full = f"{make} {model}".upper()
if any(x in m_full for x in cls.AERIAL_IDS): return "aerial"
if any(x in m_full for x in cls.MARINE_IDS): return "marine"
if any(x in m_full for x in cls.ATV_IDS): return "atv"
# Motorkerékpárok (Versenygépekkel együtt)
if any(x in m_full for x in cls.RACING_IDS) or make.lower() in cls.MOTO_MAKES:
return "motorcycle"
if any(x in m_full for x in cls.MOTO_KEYWORDS):
return "motorcycle"
# Flotta (Busz vs Teherautó vs Pótkocsi)
if any(x in m_full for x in cls.BUS_KEYWORDS): return "bus"
if any(x in m_full for x in cls.TRUCK_KEYWORDS): return "truck"
if any(x in m_full for x in cls.TRAILER_KEYWORDS): return "trailer"
return "car"
@classmethod
async def fetch_api(cls, url, params=None, is_cq=False):
"""API hívó JSONP tisztítással és sebességkorlátozással."""
async with httpx.AsyncClient(headers=cls.HEADERS) as client:
try:
# 1.5s várakozás a Free API limitjei miatt
await asyncio.sleep(1.5)
resp = await client.get(url, params=params, timeout=35)
if resp.status_code != 200: return None
content = resp.text.strip()
if is_cq:
# Robusztusabb JSONP tisztítás regexszel
match = re.search(r'(\{.*\}|\[.*\])', content, re.DOTALL)
if match:
content = match.group(0)
elif "(" in content and ")" in content:
content = content[content.find("(") + 1 : content.rfind(")")]
return json.loads(content)
except Exception as e:
logger.error(f"❌ API hiba: {e} | URL: {url}")
return None
@classmethod
async def enrich_missing_data(cls):
"""
Kezdeti adathalmaz (Példa).
Élesben itt egy külső API vagy CSV feldolgozás helye van.
SEQUENCE 2: Audit Robot.
Keresi a hiányos technikai adatokat és próbálja dúsítani őket.
"""
return [
# Suzuki - A magyar utak királya
{"make": "Suzuki", "model": "Swift", "generation": "III (2005-2010)", "engine_variant": "1.3 (92 LE)", "year_from": 2005, "year_to": 2010, "fuel_type": "petrol"},
{"make": "Suzuki", "model": "Vitara", "generation": "IV (2015-)", "engine_variant": "1.6 VVT (120 LE)", "year_from": 2015, "year_to": 2024, "fuel_type": "petrol"},
# Opel - Astra népautó
{"make": "Opel", "model": "Astra", "generation": "H (2004-2009)", "engine_variant": "1.4 Twinport (90 LE)", "year_from": 2004, "year_to": 2009, "fuel_type": "petrol"},
{"make": "Opel", "model": "Astra", "generation": "J (2009-2015)", "engine_variant": "1.7 CDTI (110 LE)", "year_from": 2009, "year_to": 2015, "fuel_type": "diesel"},
# Skoda - Családi/Flotta kedvenc
{"make": "Skoda", "model": "Octavia", "generation": "II (2004-2013)", "engine_variant": "1.6 MPI (102 LE)", "year_from": 2004, "year_to": 2013, "fuel_type": "petrol"},
{"make": "Skoda", "model": "Octavia", "generation": "III (2013-2020)", "engine_variant": "2.0 TDI (150 LE)", "year_from": 2013, "year_to": 2020, "fuel_type": "diesel"},
# BMW - GS Motorosoknak
{"make": "BMW", "model": "R 1200 GS", "generation": "K50 (2013-2018)", "engine_variant": "Adventure (125 LE)", "year_from": 2013, "year_to": 2018, "fuel_type": "petrol"}
]
logger.info("🔍 Audit szekvencia indítása (hiányos adatok keresése)...")
async with SessionLocal() as db:
# Keressük azokat a rekordokat, ahol hiányzik a köbcenti vagy a teljesítmény
stmt = select(AssetCatalog).where(
or_(
AssetCatalog.factory_data == text("'{}'::jsonb"),
AssetCatalog.engine_variant == 'Standard',
AssetCatalog.fuel_type == None
)
).limit(100) # Egyszerre csak 100-at nézünk
results = await db.execute(stmt)
incomplete_records = results.scalars().all()
for record in incomplete_records:
logger.info(f"🛠 Audit: {record.make} {record.model} ({record.year_from}) dúsítása...")
pass
@classmethod
async def run(cls):
logger.info("🤖 Robot 1 indítása: Járműkatalógus feltöltés...")
async with SessionLocal() as db:
data = await cls.get_initial_hu_data()
added_count = 0
logger.info("🤖 Robot 1: EU-Elsődlegességű Deep Dive szinkron indítása...")
# 2026-tól visszafelé haladunk (Modern flották prioritása)
for year in range(2026, 1989, -1):
logger.info(f"📅 Feldolgozás alatt: {year} évjárat")
for item in data:
# Ellenőrizzük az egyediséget (Make + Model + Generation + Engine)
stmt = select(AssetCatalog).where(
AssetCatalog.make == item["make"],
AssetCatalog.model == item["model"],
AssetCatalog.engine_variant == item["engine_variant"]
)
result = await db.execute(stmt)
if not result.scalar_one_or_none():
db.add(AssetCatalog(**item))
added_count += 1
await db.commit()
logger.info(f"✅ Robot 1 sikeresen rögzített {added_count} új katalógus elemet.")
makes_data = await cls.fetch_api(cls.CQ_URL, {"cmd": "getMakes", "year": year}, is_cq=True)
if not makes_data or "Makes" not in makes_data: continue
for make_entry in makes_data.get("Makes", []):
m_id = make_entry["make_id"]
m_display = make_entry["make_display"]
# MODELL GYŰJTÉS: EU + US fúzió
models_to_fetch = set()
# 🇪🇺 EU Forrás
cq_models = await cls.fetch_api(cls.CQ_URL, {"cmd": "getModels", "make": m_id, "year": year}, is_cq=True)
if cq_models and cq_models.get("Models"):
for m in cq_models["Models"]: models_to_fetch.add(m["model_name"])
# 🇺🇸 US Forrás kiegészítés
n_data = await cls.fetch_api(f"{cls.NHTSA_BASE}{m_display}/modelyear/{year}?format=json")
if n_data and n_data.get("Results"):
for r in n_data["Results"]: models_to_fetch.add(r["Model_Name"])
async with SessionLocal() as db:
for model_name in models_to_fetch:
# DEEP DIVE: Motorvariánsok (Trims) lekérése
trims_data = await cls.fetch_api(cls.CQ_URL, {
"cmd": "getTrims", "make": m_id, "model": model_name, "year": year
}, is_cq=True)
found_trims = trims_data.get("Trims", []) if trims_data else []
# Ha nincs trim adat, egy standard sor mindenképpen kell
if not found_trims:
found_trims = [{"model_trim": "Standard", "model_engine_fuel": None}]
for t in found_trims:
variant = t.get("model_trim") or "Standard"
fuel = t.get("model_engine_fuel") or "Unknown"
v_class = cls.identify_class(m_display, model_name)
# Szigorú duplikáció-ellenőrzés (UniqueConstraint alapú lekérdezés)
stmt = select(AssetCatalog).where(
AssetCatalog.make == m_display,
AssetCatalog.model == model_name,
AssetCatalog.year_from == year,
AssetCatalog.engine_variant == variant,
AssetCatalog.fuel_type == fuel
)
result = await db.execute(stmt)
if not result.scalars().first():
db.add(AssetCatalog(
make=m_display,
model=model_name,
year_from=year,
engine_variant=variant,
fuel_type=fuel,
vehicle_class=v_class,
factory_data={
"cc": t.get("model_engine_cc"),
"hp": t.get("model_engine_power_ps"),
"cylinders": t.get("model_engine_cyl"),
"transmission": t.get("model_transmission_type"),
"source": "master_v7_deep_dive",
"sync_date": str(func.now())
}
))
# JAVÍTÁS: Márkánkénti véglegesítés az adatbázisban a session-ön belül
await db.commit()
logger.info(f"{m_display} ({year}) összes variánsa rögzítve.")
# SEQUENCE 2: Miután végeztünk a fő listával, nézzük meg a hiányosakat
await cls.enrich_missing_data()
if __name__ == "__main__":
asyncio.run(CatalogScout.run())

View File

@@ -0,0 +1,3 @@
nev,cim,telefon,web,tipus
Ideál Autó Dunakeszi,"2120 Dunakeszi, Pallag u. 7",+36201234567,http://idealauto.hu,car_repair
IMCMotor Szerviz,"2120 Dunakeszi, Kikerics köz 4",+36703972543,https://www.imcmotor.hu,motorcycle_repair
1 nev cim telefon web tipus
2 Ideál Autó Dunakeszi 2120 Dunakeszi, Pallag u. 7 +36201234567 http://idealauto.hu car_repair
3 IMCMotor Szerviz 2120 Dunakeszi, Kikerics köz 4 +36703972543 https://www.imcmotor.hu motorcycle_repair

View File

@@ -0,0 +1,42 @@
import asyncio
import logging
from app.db.session import SessionLocal
from app.models.organization import Organization
from app.models.service import ServiceProfile
from sqlalchemy import select, and_
logger = logging.getLogger("Robot2-Auditor")
class ServiceAuditor:
@classmethod
async def audit_services(cls):
"""Időszakos ellenőrzés a megszűnt helyek kiszűrésére."""
async with SessionLocal() as db:
# Csak az aktív szervizeket nézzük
stmt = select(Organization).where(
and_(Organization.org_type == "service", Organization.is_active == True)
)
result = await db.execute(stmt)
services = result.scalars().all()
for service in services:
# 1. Ellenőrzés külső forrásnál (API hívás helye)
# status = await check_external_status(service.full_name)
is_still_open = True # Itt jön az OSM/Google API válasza
if not is_still_open:
service.is_active = False # SOFT-DELETE
logger.info(f"⚠️ Szerviz inaktiválva (megszűnt): {service.full_name}")
# Rate limit védelem
await asyncio.sleep(2)
await db.commit()
@classmethod
async def run_periodic_audit(cls):
while True:
logger.info("🕵️ Negyedéves szerviz-audit indítása...")
await cls.audit_services()
# 90 naponta fusson le teljes körűen
await asyncio.sleep(90 * 86400)

View File

@@ -0,0 +1,282 @@
import asyncio
import httpx
import logging
import uuid
import os
import sys
import csv
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, text
from sqlalchemy.orm import selectinload
from app.db.session import SessionLocal
# Modellek importálása
from app.models.service import ServiceProfile, ExpertiseTag
from app.models.organization import Organization, OrganizationFinancials, OrgType, OrgUserRole, OrganizationMember
from app.models.identity import Person
from app.models.address import Address, GeoPostalCode
from geoalchemy2.elements import WKTElement
from datetime import datetime, timezone
# Naplózás beállítása
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("Robot2-Dunakeszi-Detective")
class ServiceHunter:
"""
Robot 2.7.2: Dunakeszi Detective - Deep Model Integration.
Logika:
1. Helyi CSV (Saját beküldés - Cím alapú Geocoding-al - 50 pont Trust)
2. OSM (Közösségi adat - 10 pont Trust)
3. Google (Adatpótlás/Fallback - 30 pont Trust)
"""
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
PLACES_NEW_URL = "https://places.googleapis.com/v1/places:searchNearby"
GEOCODE_URL = "https://maps.googleapis.com/maps/api/geocode/json"
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
LOCAL_CSV_PATH = "/app/app/workers/local_services.csv"
@classmethod
async def geocode_address(cls, address_text):
"""Cím szövegből GPS koordinátát és címkomponenseket csinál."""
if not cls.GOOGLE_API_KEY:
logger.warning("⚠️ Google API kulcs hiányzik!")
return None
params = {"address": address_text, "key": cls.GOOGLE_API_KEY}
try:
async with httpx.AsyncClient() as client:
resp = await client.get(cls.GEOCODE_URL, params=params, timeout=10)
if resp.status_code == 200:
data = resp.json()
if data.get("results"):
result = data["results"][0]
loc = result["geometry"]["location"]
# Címkomponensek kinyerése a kötelező mezőkhöz
components = result.get("address_components", [])
parsed = {"lat": loc["lat"], "lng": loc["lng"], "zip": "", "city": "", "street": "Ismeretlen", "type": "utca", "number": "1"}
for c in components:
types = c.get("types", [])
if "postal_code" in types: parsed["zip"] = c["long_name"]
if "locality" in types: parsed["city"] = c["long_name"]
if "route" in types: parsed["street"] = c["long_name"]
if "street_number" in types: parsed["number"] = c["long_name"]
logger.info(f"📍 Geocoding sikeres: {address_text}")
return parsed
else:
logger.error(f"❌ Geocoding hiba: {resp.status_code}")
except Exception as e:
logger.error(f"❌ Geocoding hiba: {e}")
return None
@classmethod
async def get_google_place_details_new(cls, lat, lon):
"""Google Places API (New) - Adatpótlás FieldMask használatával."""
if not cls.GOOGLE_API_KEY:
return None
headers = {
"Content-Type": "application/json",
"X-Goog-Api-Key": cls.GOOGLE_API_KEY,
"X-Goog-FieldMask": "places.displayName,places.id,places.types,places.internationalPhoneNumber,places.websiteUri"
}
payload = {
"includedTypes": ["car_repair", "gas_station", "ev_charging_station", "car_wash", "motorcycle_repair"],
"maxResultCount": 1,
"locationRestriction": {
"circle": {
"center": {"latitude": lat, "longitude": lon},
"radius": 40.0
}
}
}
try:
async with httpx.AsyncClient() as client:
resp = await client.post(cls.PLACES_NEW_URL, json=payload, headers=headers, timeout=10)
if resp.status_code == 200:
places = resp.json().get("places", [])
if places:
p = places[0]
return {
"name": p.get("displayName", {}).get("text"),
"google_id": p.get("id"),
"types": p.get("types", []),
"phone": p.get("internationalPhoneNumber"),
"website": p.get("websiteUri")
}
except Exception as e:
logger.error(f"❌ Google kiegészítő hívás hiba: {e}")
return None
@classmethod
async def import_local_csv(cls, db: AsyncSession):
"""Manuális adatok betöltése CSV-ből."""
if not os.path.exists(cls.LOCAL_CSV_PATH):
return
try:
with open(cls.LOCAL_CSV_PATH, mode='r', encoding='utf-8') as f:
reader = csv.DictReader(f)
for row in reader:
geo_data = None
if row.get('cim'):
geo_data = await cls.geocode_address(row['cim'])
if geo_data:
element = {
"tags": {
"name": row['nev'], "phone": row.get('telefon'),
"website": row.get('web'), "amenity": row.get('tipus', 'car_repair'),
"addr:full": row.get('cim'),
"addr:city": geo_data["city"], "addr:zip": geo_data["zip"],
"addr:street": geo_data["street"], "addr:type": geo_data["type"],
"addr:number": geo_data["number"]
},
"lat": geo_data["lat"], "lon": geo_data["lng"]
}
await cls.save_service_deep(db, element, source="local_manual")
logger.info("✅ Helyi CSV adatok feldolgozva.")
except Exception as e:
logger.error(f"❌ CSV feldolgozási hiba: {e}")
@classmethod
async def get_or_create_person(cls, db: AsyncSession, name: str) -> Person:
"""Ghost Person kezelése."""
names = name.split(' ', 1)
last_name = names[0]
first_name = names[1] if len(names) > 1 else "Ismeretlen"
stmt = select(Person).where(Person.last_name == last_name, Person.first_name == first_name)
result = await db.execute(stmt); person = result.scalar_one_or_none()
if not person:
person = Person(last_name=last_name, first_name=first_name, is_ghost=True, is_active=False)
db.add(person); await db.flush()
return person
@classmethod
async def enrich_financials(cls, db: AsyncSession, org_id: int):
"""Pénzügyi rekord inicializálása."""
financial = OrganizationFinancials(
organization_id=org_id, year=datetime.now(timezone.utc).year - 1, source="bot_discovery"
)
db.add(financial)
@classmethod
async def save_service_deep(cls, db: AsyncSession, element: dict, source="osm"):
"""Mély mentés a modelled specifikus mezőneveivel és kötelező értékeivel."""
tags = element.get("tags", {})
lat, lon = element.get("lat"), element.get("lon")
if not lat or not lon: return
osm_name = tags.get("name") or tags.get("brand") or tags.get("operator")
google_data = None
if not osm_name or osm_name.lower() in ['aprilia', 'bosch', 'shell', 'mol', 'omv', 'ismeretlen']:
google_data = await cls.get_google_place_details_new(lat, lon)
final_name = (google_data["name"] if google_data else osm_name) or "Ismeretlen Szolgáltató"
stmt = select(Organization).where(Organization.full_name == final_name)
result = await db.execute(stmt); org = result.scalar_one_or_none()
if not org:
# 1. Address létrehozása (a kötelező mezőket kitöltjük az átadott tags-ből vagy alapértékkel)
new_addr = Address(
latitude=lat,
longitude=lon,
full_address_text=tags.get("addr:full") or f"2120 Dunakeszi, {tags.get('addr:street', 'Ismeretlen')} {tags.get('addr:housenumber', '1')}",
street_name=tags.get("addr:street") or "Ismeretlen",
street_type=tags.get("addr:type") or "utca",
house_number=tags.get("addr:number") or tags.get("addr:housenumber") or "1"
)
db.add(new_addr); await db.flush()
# 2. Organization létrehozása (a modelled alapján ezek a mezők itt vannak)
org = Organization(
full_name=final_name,
name=final_name[:50],
org_type=OrgType.service,
address_id=new_addr.id,
address_city=tags.get("addr:city") or "Dunakeszi",
address_zip=tags.get("addr:zip") or "2120",
address_street_name=new_addr.street_name,
address_street_type=new_addr.street_type,
address_house_number=new_addr.house_number
)
db.add(org); await db.flush()
# 3. Service Profile
trust = 50 if source == "local_manual" else (30 if google_data else 10)
spec = {"brands": [], "types": google_data["types"] if google_data else [], "osm_tags": tags}
if tags.get("brand"): spec["brands"].append(tags.get("brand"))
profile = ServiceProfile(
organization_id=org.id,
location=WKTElement(f'POINT({lon} {lat})', srid=4326),
status="ghost",
trust_score=trust,
google_place_id=google_data["google_id"] if google_data else None,
specialization_tags=spec,
website=google_data["website"] if google_data else tags.get("website"),
contact_phone=google_data["phone"] if google_data else tags.get("phone")
)
db.add(profile)
# 4. Tulajdonos rögzítése
owner_name = tags.get("operator") or tags.get("contact:person")
if owner_name and len(owner_name) > 3:
person = await cls.get_or_create_person(db, owner_name)
db.add(OrganizationMember(
organization_id=org.id,
person_id=person.id,
role=OrgUserRole.OWNER,
is_verified=False
))
await cls.enrich_financials(db, org.id)
await db.flush()
logger.info(f"✨ [{source.upper()}] Mentve: {final_name} (Bizalom: {trust})")
@classmethod
async def run(cls):
logger.info("🤖 Robot 2.7.2: Dunakeszi Detective indítása...")
# Kapcsolódási védelem
connected = False
while not connected:
try:
async with SessionLocal() as db:
await db.execute(text("SELECT 1"))
connected = True
except Exception as e:
logger.warning(f"⏳ Várakozás a hálózatra (shared-postgres host?): {e}")
await asyncio.sleep(5)
while True:
async with SessionLocal() as db:
try:
await db.execute(text("SET search_path TO data, public"))
# 1. Beküldött CSV feldolgozása (Geocoding-al)
await cls.import_local_csv(db)
await db.commit()
# 2. OSM Szkennelés
query = """[out:json][timeout:120];area["name"="Dunakeszi"]->.city;(nwr["shop"~"car_repair|motorcycle_repair|tyres|car_parts|motorcycle"](area.city);nwr["amenity"~"car_repair|vehicle_inspection|motorcycle_repair|fuel|charging_station|car_wash"](area.city);nwr["amenity"~"car_repair|fuel|charging_station"](around:5000, 47.63, 19.13););out center;"""
async with httpx.AsyncClient() as client:
resp = await client.post(cls.OVERPASS_URL, data={"data": query}, timeout=120)
if resp.status_code == 200:
elements = resp.json().get("elements", [])
for el in elements:
await cls.save_service_deep(db, el, source="osm")
await db.commit()
except Exception as e:
logger.error(f"❌ Futáshiba: {e}")
logger.info("😴 Scan kész, 24 óra pihenő...")
await asyncio.sleep(86400)
if __name__ == "__main__":
asyncio.run(ServiceHunter.run())