import json
import logging
import os
import secrets
import threading
import time
from dataclasses import dataclass, field
from datetime import datetime
from hashlib import sha256
from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple
from urllib.parse import urlencode, urljoin

import pymysql
import requests
from flask import Blueprint, Flask, Request, Response, g, jsonify, request

from ocpi_payloads import map_incoming_payload, map_outgoing_payload, module_is_supported
from ocpi_utils import (
    BackendProfile,
    BackendRegistry,
    FailureNotifier,
    load_config,
    log_request,
    log_response,
    ocpi_timestamp,
    setup_logging,
    validate_json_payload,
    verify_token_header,
)
from services.location_repository import LocationRepository
from services.tariff_service import TariffService
from services.token_service import TokenService

CONFIG_PATH_ENV = "PIPELET_CONFIG"
DEFAULT_CONFIG_FILE = "config.json"
SUPPORTED_VERSIONS = ["2.1.1", "2.2", "2.3"]
DEFAULT_HOST = "0.0.0.0"
DEFAULT_PORT = 9760
SUCCESS_CODE = 1000
CLIENT_ERROR_CODE = 2001
DEFAULT_SYNC_INTERVAL_SECONDS = 300
DEFAULT_LOCATIONS_MODE = "pull"
DEFAULT_TARIFF_SYNC_INTERVAL_SECONDS = 600
DEFAULT_RETRY_INTERVAL_SECONDS = 120
DEFAULT_RETRY_BATCH_SIZE = 50
DEFAULT_RETRY_MAX_ATTEMPTS = 3
DEFAULT_OCPP_API_BASE_URL = "http://127.0.0.1:9751/api"
DEFAULT_OCPP_API_TIMEOUT = 10.0
AUTH_CACHE_TTL_SECONDS = 30.0

_authorize_cache: dict[tuple[str, str], float] = {}
_authorize_cache_lock = threading.Lock()


def ocpi_response(
    data: Any = None,
    *,
    status_code: int = SUCCESS_CODE,
    status_message: Optional[str] = None,
    error_code: Optional[str] = None,
    http_status: int = 200,
):
    body: Dict[str, Any] = {
        "status_code": status_code,
        "timestamp": ocpi_timestamp(),
    }
    if error_code:
        body["error_code"] = error_code
    if status_message:
        body["status_message"] = status_message
    if data is not None:
        body["data"] = data
    return jsonify(body), http_status


def _unsupported_version_response(version: str):
    return ocpi_response(
        status_code=CLIENT_ERROR_CODE,
        status_message="Unsupported version",
        error_code="UNSUPPORTED_VERSION",
        http_status=404,
    )


def _validate_version(version: str):
    if version not in SUPPORTED_VERSIONS:
        return _unsupported_version_response(version)
    return None


def _unsupported_module_response(module: str, version: str):
    return ocpi_response(
        status_code=CLIENT_ERROR_CODE,
        status_message=f"Module '{module}' is not available in version {version}",
        error_code="NOT_SUPPORTED",
        http_status=501,
    )


def _mysql_config(cfg: Mapping[str, Any]) -> Dict[str, Any]:
    mysql_cfg = {
        "host": "127.0.0.1",
        "user": "root",
        "password": "",
        "db": "op",
        "charset": "utf8mb4",
    }
    if isinstance(cfg, Mapping):
        mysql_cfg.update(cfg)
    return mysql_cfg


def _build_base_url(req: Request, ocpi_cfg: Dict[str, Any]) -> str:
    backend_profile = getattr(g, "backend_profile", None)
    override = os.environ.get("OCPI_API_BASE_URL")
    if backend_profile and backend_profile.base_url:
        override = backend_profile.base_url
    elif override is None:
        override = _select_env_value(ocpi_cfg, "base_url")
    if override:
        return str(override).rstrip("/")

    base = req.url_root.rstrip("/")
    backend_id = None
    if req.view_args:
        backend_id = req.view_args.get("backend_id")
    if backend_id:
        return f"{base}/b/{backend_id}"
    return base


def _connect_db(mysql_cfg: Mapping[str, Any]):
    return pymysql.connect(
        host=mysql_cfg.get("host", "127.0.0.1"),
        user=mysql_cfg.get("user", "root"),
        password=mysql_cfg.get("password", ""),
        db=mysql_cfg.get("db", "op"),
        charset=mysql_cfg.get("charset", "utf8mb4"),
        cursorclass=pymysql.cursors.DictCursor,
    )


def _timestamp_str(value: Optional[Any]) -> str:
    if isinstance(value, datetime):
        return value.replace(microsecond=0).isoformat() + "Z"
    if isinstance(value, str):
        return value
    return ocpi_timestamp()


def _table_columns(conn, table: str) -> set[str]:
    try:
        with conn.cursor() as cur:
            cur.execute(f"SHOW COLUMNS FROM {table}")
            return {row["Field"] for row in cur.fetchall()}
    except Exception:
        logging.getLogger("ocpi_api").exception("Failed to inspect table %s", table)
        return set()


def _as_bool(value: Any) -> bool:
    if isinstance(value, str):
        return value.strip().lower() in {"1", "true", "yes", "on"}
    return bool(value)


def _backend_auth_token(backend: Mapping[str, Any]) -> Optional[str]:
    for key in ("peer_token", "token"):
        token = backend.get(key)
        if token:
            return token
    return None


def _backend_profile_token(profile: Optional[BackendProfile]) -> Optional[str]:
    if profile is None:
        return None
    return profile.token


def _generate_token() -> str:
    return secrets.token_hex(32)


def _active_environment(cfg: Optional[Mapping[str, Any]]) -> str:
    env_raw = cfg.get("environment") if isinstance(cfg, Mapping) else None
    env = str(env_raw or "prod").lower()
    return "sandbox" if env == "sandbox" else "prod"


def _select_env_value(cfg: Optional[Mapping[str, Any]], key: str) -> Any:
    if not isinstance(cfg, Mapping):
        return None
    env = _active_environment(cfg)
    env_value = cfg.get(f"{key}_{env}")
    if env_value not in (None, ""):
        return env_value
    return cfg.get(key)


def _parse_date_param(raw: Optional[str]) -> Optional[datetime]:
    if not raw:
        return None
    try:
        cleaned = raw.strip().replace("Z", "")
        return datetime.fromisoformat(cleaned)
    except Exception:
        return None


def _auth_cache_key(token: Optional[str], station: Optional[str]) -> Optional[tuple[str, str]]:
    if token in (None, ""):
        return None
    normalized_token = str(token).strip().upper()
    station_key = str(station or "").strip().upper()
    return normalized_token, station_key


def _cache_authorization(token: Optional[str], station: Optional[str]) -> None:
    key = _auth_cache_key(token, station)
    if key is None:
        return
    expires_at = time.time() + AUTH_CACHE_TTL_SECONDS
    with _authorize_cache_lock:
        _authorize_cache[key] = expires_at


def _authorization_allowed_from_cache(token: Optional[str], station: Optional[str]) -> bool:
    key = _auth_cache_key(token, station)
    if key is None:
        return False
    now = time.time()
    with _authorize_cache_lock:
        expires_at = _authorize_cache.get(key)
        if expires_at is None:
            return False
        if expires_at < now:
            _authorize_cache.pop(key, None)
            return False
        return True


def _pagination_params() -> tuple[int, int]:
    try:
        offset = int(request.args.get("offset", 0))
    except (TypeError, ValueError):
        offset = 0
    try:
        limit = int(request.args.get("limit", 50))
    except (TypeError, ValueError):
        limit = 50
    offset = max(offset, 0)
    limit = max(min(limit, 100), 1)
    return offset, limit


def _backend_authorize(
    profile: Optional[BackendProfile],
    version: str,
    uid: str,
    payload: Mapping[str, Any],
) -> tuple[Optional[bool], Optional[dict[str, Any]], Optional[str], Optional[int]]:
    if profile is None or not profile.url:
        return None, None, None, None

    url = str(profile.url).rstrip("/") + f"/ocpi/{version}/tokens/{uid}/authorize"
    headers = {"Content-Type": "application/json"}
    token = _backend_profile_token(profile)
    if token:
        headers["Authorization"] = token if " " in token else f"Token {token}"

    try:
        response = requests.post(url, json=payload, headers=headers, timeout=10)
    except requests.Timeout as exc:
        return None, None, f"MSP authorization request timed out: {exc}", 504
    except requests.RequestException as exc:
        return None, None, f"MSP authorization request failed: {exc}", 502

    try:
        body = response.json()
    except ValueError:
        body = {}

    if response.status_code >= 500:
        return None, None, f"MSP authorization error {response.status_code}: {response.text}", response.status_code
    if response.status_code == 404:
        return False, None, None, 404
    if response.status_code >= 400:
        return None, None, f"MSP authorization rejected with status {response.status_code}: {response.text}", response.status_code

    data = body.get("data") if isinstance(body, Mapping) else None
    allowed_value = str(data.get("allowed") or data.get("status") or "").upper() if isinstance(data, Mapping) else ""
    token_payload = data.get("token") if isinstance(data, Mapping) else None
    allowed = None
    if allowed_value in {"ALLOWED", "ACCEPTED"}:
        allowed = True
    elif allowed_value:
        allowed = False
    return allowed, token_payload, None, response.status_code


def _station_identifier(payload: Mapping[str, Any]) -> str:
    for key in (
        "evse_uid",
        "evse_id",
        "location_id",
        "locationId",
        "station_id",
        "chargepoint_id",
    ):
        value = payload.get(key)
        if value not in (None, ""):
            return str(value)
    return ""


def _authorize_token_request(
    uid: str,
    version: str,
    payload: Mapping[str, Any],
    *,
    profile: Optional[BackendProfile] = None,
) -> tuple[Optional[bool], Optional[dict[str, Any]], Optional[str], Optional[int]]:
    backend_profile = profile or getattr(g, "backend_profile", None)
    allowed, token_payload, error_message, error_status = _backend_authorize(
        backend_profile,
        version,
        uid,
        payload,
    )
    if error_message:
        return None, None, error_message, error_status
    if allowed is not None or token_payload is not None:
        return allowed, token_payload, None, None

    allowed_local, token_local = data_repo.authorize_token(uid)
    return allowed_local, token_local, None, None


def _ensure_token_authorized(
    token: str,
    *,
    station_id: Optional[str],
    version: Optional[str],
    payload: Mapping[str, Any],
) -> None:
    if _authorization_allowed_from_cache(token, station_id):
        return

    target_version = version or SUPPORTED_VERSIONS[-1]
    allowed, token_payload, error_message, error_status = _authorize_token_request(
        token,
        target_version,
        payload,
    )
    if error_message:
        raise RuntimeError(error_message)
    if allowed:
        _cache_authorization(token, station_id)
    if token_payload and allowed:
        _cache_authorization(token_payload.get("uid") or token, station_id)
    if not allowed:
        raise RuntimeError("Token authorization rejected")


def _apply_pagination_headers(response: Response, total: int, limit: int, offset: int) -> None:
    response.headers["X-Total-Count"] = str(total)
    if total > offset + limit:
        params = request.args.to_dict()
        params["offset"] = offset + limit
        params["limit"] = limit
        response.headers["Link"] = f"<{request.base_url}?{urlencode(params)}>; rel=\"next\""


def _request_duration_ms(start: Optional[float] = None) -> Optional[int]:
    reference = start if start is not None else getattr(request, "start_time", None)
    if reference is None:
        return None
    try:
        return int((time.perf_counter() - reference) * 1000)
    except Exception:
        return None


@dataclass
class JobMetrics:
    location_pushes: int = 0
    tariff_pushes: int = 0
    retry_attempts: int = 0
    retry_successes: int = 0
    retry_failures: int = 0
    http_requests: int = 0
    http_errors: int = 0
    http_token_errors: int = 0
    http_latency_ms_sum: float = 0.0
    http_latency_ms_max: float = 0.0
    last_error_timestamp: float = 0.0

    def record_http_request(self, status_code: int, duration: float) -> None:
        self.http_requests += 1
        duration_ms = duration * 1000
        self.http_latency_ms_sum += duration_ms
        self.http_latency_ms_max = max(self.http_latency_ms_max, duration_ms)
        if status_code >= 400:
            self.http_errors += 1
            self.last_error_timestamp = time.time()
        if status_code in (401, 403):
            self.http_token_errors += 1

    def as_prometheus(self) -> str:
        lines = [
            f"pipelet_location_push_total {self.location_pushes}",
            f"pipelet_tariff_push_total {self.tariff_pushes}",
            f"pipelet_retry_attempt_total {self.retry_attempts}",
            f"pipelet_retry_success_total {self.retry_successes}",
            f"pipelet_retry_failure_total {self.retry_failures}",
            f"pipelet_ocpi_http_requests_total {self.http_requests}",
            f"pipelet_ocpi_http_errors_total {self.http_errors}",
            f"pipelet_ocpi_http_token_errors_total {self.http_token_errors}",
            f"pipelet_ocpi_http_latency_ms_sum {self.http_latency_ms_sum:.2f}",
            f"pipelet_ocpi_http_latency_ms_max {self.http_latency_ms_max:.2f}",
            f"pipelet_ocpi_last_error_timestamp {self.last_error_timestamp:.0f}",
        ]
        return "\n".join(lines) + "\n"


class SyncRunRepository:
    def __init__(self, mysql_cfg: Mapping[str, Any]):
        self.mysql_cfg = _mysql_config(mysql_cfg)
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for sync run setup")
        else:
            try:
                self._ensure_table(conn)
            finally:
                try:
                    conn.close()
                except Exception:
                    pass

    def _connect(self):
        return _connect_db(self.mysql_cfg)

    @staticmethod
    def _ensure_table(conn) -> None:
        with conn.cursor() as cur:
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS op_ocpi_sync_runs (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    job_name VARCHAR(128) NOT NULL,
                    module VARCHAR(64) NOT NULL,
                    direction VARCHAR(32) NOT NULL,
                    backend_id INT NULL,
                    backend_name VARCHAR(255) NULL,
                    record_type VARCHAR(32) NULL,
                    duration_ms INT NULL,
                    success TINYINT(1) NOT NULL DEFAULT 0,
                    status_code INT NULL,
                    detail TEXT NULL,
                    created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
                    KEY idx_module_direction (module, direction),
                    KEY idx_created_at (created_at)
                ) CHARACTER SET utf8mb4
                """
            )
            cur.execute("SHOW COLUMNS FROM op_ocpi_sync_runs LIKE 'record_type'")
            if cur.fetchone() is None:
                cur.execute(
                    "ALTER TABLE op_ocpi_sync_runs ADD COLUMN record_type VARCHAR(32) NULL AFTER backend_name"
                )
            cur.execute("SHOW COLUMNS FROM op_ocpi_sync_runs LIKE 'duration_ms'")
            if cur.fetchone() is None:
                cur.execute(
                    "ALTER TABLE op_ocpi_sync_runs ADD COLUMN duration_ms INT NULL AFTER record_type"
                )
        conn.commit()

    @staticmethod
    def _truncate_detail(detail: Any) -> str:
        if detail is None:
            return ""
        text = str(detail)
        return text[:1000]

    def record_run(
        self,
        job_name: str,
        module: str,
        direction: str,
        *,
        backend_id: Any = None,
        backend_name: Optional[str] = None,
        record_type: Optional[str] = None,
        duration_ms: Optional[int] = None,
        success: Optional[bool] = None,
        status_code: Optional[int] = None,
        detail: Any = None,
    ) -> None:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for sync run log")
            return

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    INSERT INTO op_ocpi_sync_runs (
                        job_name, module, direction, backend_id, backend_name, record_type,
                        duration_ms, success, status_code, detail
                    ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                    """,
                    (
                        job_name,
                        module,
                        direction,
                        backend_id,
                        backend_name,
                        record_type,
                        duration_ms,
                        1 if success else 0,
                        status_code,
                        self._truncate_detail(detail),
                    ),
                )
            conn.commit()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to record sync run")
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def latest_runs(self, modules: Optional[Sequence[str]] = None, *, limit: int = 200) -> list[dict[str, Any]]:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB for sync run listing")
            return []

        try:
            where = []
            params: list[Any] = []
            if modules:
                placeholders = ",".join(["%s"] * len(modules))
                where.append(f"module IN ({placeholders})")
                params.extend(modules)
            where_clause = f"WHERE {' AND '.join(where)}" if where else ""
            with conn.cursor() as cur:
                cur.execute(
                    f"""
                    SELECT id, job_name, module, direction, backend_id, backend_name, record_type,
                           duration_ms, success, status_code, detail, created_at
                    FROM op_ocpi_sync_runs
                    {where_clause}
                    ORDER BY created_at DESC, id DESC
                    LIMIT %s
                    """,
                    params + [limit],
                )
                rows = cur.fetchall()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to load sync run list")
            return []
        finally:
            try:
                conn.close()
            except Exception:
                pass

        seen: set[tuple[str, str, Optional[str]]] = set()
        latest: list[dict[str, Any]] = []
        for row in rows or []:
            key = (row.get("module"), row.get("direction"), row.get("record_type"))
            if key in seen:
                continue
            seen.add(key)
            created = row.get("created_at")
            if isinstance(created, datetime):
                row["created_at"] = created.isoformat()
            latest.append(row)
        return latest

    def list_runs(
        self,
        modules: Optional[Sequence[str]] = None,
        *,
        limit: int = 100,
    ) -> list[dict[str, Any]]:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB for sync run log listing")
            return []

        try:
            where = []
            params: list[Any] = []
            if modules:
                placeholders = ",".join(["%s"] * len(modules))
                where.append(f"module IN ({placeholders})")
                params.extend(modules)
            where_clause = f"WHERE {' AND '.join(where)}" if where else ""
            with conn.cursor() as cur:
                cur.execute(
                    f"""
                    SELECT id, job_name, module, direction, backend_id, backend_name, record_type,
                           duration_ms, success, status_code, detail, created_at
                    FROM op_ocpi_sync_runs
                    {where_clause}
                    ORDER BY created_at DESC, id DESC
                    LIMIT %s
                    """,
                    params + [limit],
                )
                rows = cur.fetchall()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to load sync run logs")
            return []
        finally:
            try:
                conn.close()
            except Exception:
                pass

        entries: list[dict[str, Any]] = []
        for row in rows or []:
            created = row.get("created_at")
            if isinstance(created, datetime):
                row["created_at"] = created.isoformat()
            entries.append(row)
        return entries


class BackendRepository:
    def __init__(self, mysql_cfg: Mapping[str, Any]):
        self.mysql_cfg = _mysql_config(mysql_cfg)

        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for backend setup")
        else:
            try:
                self._ensure_columns(conn)
            finally:
                try:
                    conn.close()
                except Exception:
                    pass

    def _ensure_columns(self, conn) -> None:
        columns = _table_columns(conn, "op_ocpi_backends")
        alter_statements: list[tuple[str, tuple[Any, ...]]] = []
        if "peer_token" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN peer_token TEXT NULL AFTER token",
                    (),
                )
            )
        if "credentials_token" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN credentials_token TEXT NULL AFTER peer_token",
                    (),
                )
            )
        if "remote_versions_url" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN remote_versions_url VARCHAR(1024) NULL AFTER url",
                    (),
                )
            )
        if "peer_versions_url" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN peer_versions_url VARCHAR(1024) NULL AFTER remote_versions_url",
                    (),
                )
            )
        if "active_version" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN active_version VARCHAR(16) NULL AFTER peer_versions_url",
                    (),
                )
            )
        if "last_credentials_status" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN last_credentials_status VARCHAR(255) NULL AFTER enabled",
                    (),
                )
            )
        if "last_credentials_at" not in columns:
            alter_statements.append(
                (
                    "ALTER TABLE op_ocpi_backends ADD COLUMN last_credentials_at TIMESTAMP NULL DEFAULT NULL AFTER last_credentials_status",
                    (),
                )
            )

        if not alter_statements:
            return

        with conn.cursor() as cur:
            for statement, params in alter_statements:
                try:
                    cur.execute(statement, params)
                except Exception:
                    logging.getLogger("ocpi_api").exception("Failed to apply schema change: %s", statement)
        conn.commit()

    @staticmethod
    def _modules_from_row(row: Mapping[str, Any]) -> set[str]:
        modules_raw = row.get("modules") or ""
        return {m.strip().lower() for m in modules_raw.replace(";", ",").split(",") if m.strip()}

    def get_backend(self, backend_id: Optional[Any]) -> Optional[dict[str, Any]]:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for backend lookup")
            return None

        try:
            with conn.cursor() as cur:
                if backend_id is None:
                    cur.execute(
                        "SELECT backend_id, name, url, remote_versions_url, peer_versions_url, active_version, token, peer_token, credentials_token, modules, enabled, last_credentials_status, last_credentials_at FROM op_ocpi_backends ORDER BY backend_id LIMIT 1"
                    )
                else:
                    cur.execute(
                        "SELECT backend_id, name, url, remote_versions_url, peer_versions_url, active_version, token, peer_token, credentials_token, modules, enabled, last_credentials_status, last_credentials_at FROM op_ocpi_backends WHERE backend_id=%s",
                        (backend_id,),
                    )
                row = cur.fetchone()
                return dict(row) if row else None
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to fetch backend %s", backend_id)
            return None
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def find_by_token(self, token: Optional[str]) -> Optional[dict[str, Any]]:
        if not token:
            return None
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for backend token lookup")
            return None

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    SELECT backend_id, name, url, remote_versions_url, peer_versions_url, active_version, token, peer_token, credentials_token, modules, enabled, last_credentials_status, last_credentials_at
                    FROM op_ocpi_backends
                    WHERE token=%s OR peer_token=%s OR credentials_token=%s
                    LIMIT 1
                    """,
                    (token, token, token),
                )
                row = cur.fetchone()
                return dict(row) if row else None
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to find backend by token")
            return None
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def save_credentials_token(self, backend_id: Optional[Any], token: str) -> None:
        if not token:
            return
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for credentials token update")
            return

        try:
            with conn.cursor() as cur:
                if backend_id is None:
                    cur.execute(
                        """
                        UPDATE op_ocpi_backends
                        SET credentials_token=%s, last_credentials_at=NOW()
                        ORDER BY backend_id
                        LIMIT 1
                        """,
                        (token,),
                    )
                else:
                    cur.execute(
                        """
                        UPDATE op_ocpi_backends
                        SET credentials_token=%s, last_credentials_at=NOW()
                        WHERE backend_id=%s
                        """,
                        (token, backend_id),
                    )
            conn.commit()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to store credentials token for backend %s", backend_id)
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def update_credentials_exchange(
        self,
        backend_id: Optional[Any],
        *,
        peer_token: Optional[str],
        peer_url: Optional[str],
        remote_versions_url: Optional[str],
        active_version: Optional[str],
        status: Optional[str],
        clear_missing: bool = False,
    ) -> None:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for credentials result")
            return

        try:
            with conn.cursor() as cur:
                peer_token_expr = "%s" if clear_missing else "COALESCE(%s, peer_token)"
                peer_url_expr = "%s" if clear_missing else "COALESCE(%s, peer_versions_url)"
                remote_versions_expr = "%s" if clear_missing else "COALESCE(%s, remote_versions_url)"
                active_version_expr = "%s" if clear_missing else "COALESCE(%s, active_version)"
                cur.execute(
                    f"""
                    UPDATE op_ocpi_backends
                    SET peer_token={peer_token_expr},
                        peer_versions_url={peer_url_expr},
                        remote_versions_url={remote_versions_expr},
                        active_version={active_version_expr},
                        last_credentials_status=%s,
                        last_credentials_at=NOW()
                    WHERE backend_id=%s
                    """,
                    (
                        peer_token,
                        peer_url,
                        remote_versions_url,
                        active_version,
                        status,
                        backend_id or self._default_backend_id(conn),
                    ),
                )
            conn.commit()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to update credentials result for backend %s", backend_id)
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def _default_backend_id(self, conn) -> Optional[int]:
        with conn.cursor() as cur:
            cur.execute("SELECT backend_id FROM op_ocpi_backends ORDER BY backend_id LIMIT 1")
            row = cur.fetchone()
            return row.get("backend_id") if row else None

    def list_backends(self, module: Optional[str] = None) -> list[dict[str, Any]]:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for backends")
            return []

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    SELECT backend_id, name, url, remote_versions_url, peer_versions_url, active_version, token, peer_token, credentials_token, modules, enabled
                    FROM op_ocpi_backends
                    WHERE enabled = 1
                    """
                )
                rows = cur.fetchall()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to load OCPI backends")
            return []
        finally:
            try:
                conn.close()
            except Exception:
                pass

        if module:
            filtered: list[dict[str, Any]] = []
            for row in rows:
                modules_raw = row.get("modules") or ""
                modules = {m.strip().lower() for m in modules_raw.replace(";", ",").split(",")}
                if module.lower() in modules or not modules:
                    filtered.append(row)
            return filtered
        return rows


class HandshakeRepository:
    def __init__(self, mysql_cfg: Mapping[str, Any]):
        self.mysql_cfg = _mysql_config(mysql_cfg)
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for handshake setup")
        else:
            try:
                with conn.cursor() as cur:
                    cur.execute(
                        """
                        CREATE TABLE IF NOT EXISTS op_ocpi_handshakes (
                            id INT AUTO_INCREMENT PRIMARY KEY,
                            backend_id INT NOT NULL,
                            state ENUM('open','closed') NOT NULL DEFAULT 'open',
                            status VARCHAR(255) NOT NULL,
                            detail TEXT NULL,
                            token TEXT NULL,
                            peer_url VARCHAR(1024) NULL,
                            created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
                            updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                            KEY idx_backend_state (backend_id, state),
                            CONSTRAINT fk_ocpi_handshake_backend FOREIGN KEY (backend_id) REFERENCES op_ocpi_backends(backend_id) ON DELETE CASCADE
                        ) CHARACTER SET utf8mb4
                        """
                    )
                conn.commit()
            finally:
                try:
                    conn.close()
                except Exception:
                    pass

    def record(
        self,
        backend_id: Optional[Any],
        *,
        state: str,
        status: str,
        detail: Optional[str] = None,
        token: Optional[str] = None,
        peer_url: Optional[str] = None,
    ) -> None:
        if backend_id is None:
            return
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for handshake log")
            return
        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    INSERT INTO op_ocpi_handshakes (backend_id, state, status, detail, token, peer_url)
                    VALUES (%s, %s, %s, %s, %s, %s)
                    """,
                    (backend_id, state, status, detail, token, peer_url),
                )
            conn.commit()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to record handshake event")
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def list(self, backend_id: Optional[Any] = None, *, limit: int = 200) -> list[dict[str, Any]]:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for handshake list")
            return []

        try:
            where = []
            params: list[Any] = []
            if backend_id is not None:
                where.append("backend_id=%s")
                params.append(backend_id)
            where_clause = f"WHERE {' AND '.join(where)}" if where else ""
            with conn.cursor() as cur:
                cur.execute(
                    f"""
                    SELECT id, backend_id, state, status, detail, token, peer_url, created_at, updated_at
                    FROM op_ocpi_handshakes
                    {where_clause}
                    ORDER BY updated_at DESC, id DESC
                    LIMIT %s
                    """,
                    params + [limit],
                )
                return cur.fetchall()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to load handshake list")
            return []
        finally:
            try:
                conn.close()
            except Exception:
                pass


class TariffRepository:
    def __init__(
        self,
        ocpi_cfg: Mapping[str, Any],
        mysql_cfg: Optional[Mapping[str, Any]] = None,
        tariff_service: Optional[TariffService] = None,
    ):
        self.mysql_cfg = _mysql_config(mysql_cfg or {})
        fallback_tariffs = ocpi_cfg.get("tariffs") or []
        self.service = tariff_service or TariffService(self.mysql_cfg, fallback_tariffs=fallback_tariffs)
        self.service.ensure_schema()

    def list_tariffs(
        self,
        *,
        date_from: Optional[datetime] = None,
        date_to: Optional[datetime] = None,
        offset: int = 0,
        limit: int = 50,
    ) -> tuple[list[dict[str, Any]], int]:
        try:
            return self.service.list_tariffs(
                date_from=date_from,
                date_to=date_to,
                offset=offset,
                limit=limit,
            )
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to read tariffs")
            return [], 0

    def get_tariff(self, tariff_id: str) -> Optional[dict[str, Any]]:
        try:
            return self.service.get_tariff(tariff_id)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to load tariff %s", tariff_id)
            return None


class CommandResultRepository:
    def __init__(self, mysql_cfg: Mapping[str, Any]):
        self.mysql_cfg = _mysql_config(mysql_cfg)

        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for command table setup")
            return

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    CREATE TABLE IF NOT EXISTS op_ocpi_command_results (
                        id INT AUTO_INCREMENT PRIMARY KEY,
                        backend_id INT NULL,
                        command VARCHAR(64) NOT NULL,
                        module VARCHAR(64) NOT NULL,
                        ocpi_command_id VARCHAR(64) NULL,
                        success TINYINT(1) NOT NULL DEFAULT 0,
                        response_status VARCHAR(255) NULL,
                        response_body TEXT NULL,
                        created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
                        KEY idx_backend_module (backend_id, module)
                    ) CHARACTER SET utf8mb4
                    """
                )
            conn.commit()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to initialize command result table")
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def record(
        self,
        *,
        backend_id: Optional[Any],
        command: str,
        module: str,
        ocpi_command_id: str,
        success: bool,
        response_status: Optional[str],
        response_body: Any,
    ) -> None:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for command log")
            return

        try:
            serialized_body = response_body
            if isinstance(response_body, (dict, list)):
                try:
                    serialized_body = json.dumps(response_body, ensure_ascii=False)
                except Exception:
                    serialized_body = str(response_body)
            with conn.cursor() as cur:
                cur.execute(
                    """
                    INSERT INTO op_ocpi_command_results (backend_id, command, module, ocpi_command_id, success, response_status, response_body)
                    VALUES (%s, %s, %s, %s, %s, %s, %s)
                    """,
                    (
                        backend_id,
                        command,
                        module,
                        ocpi_command_id,
                        1 if success else 0,
                        response_status,
                        serialized_body,
                    ),
                )
            conn.commit()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to record OCPI command result")
        finally:
            try:
                conn.close()
            except Exception:
                pass


class CommandQueueRepository:
    def __init__(self, mysql_cfg: Mapping[str, Any]):
        self.mysql_cfg = _mysql_config(mysql_cfg)
        self._log = logging.getLogger("ocpi_api")

        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            self._log.exception("Failed to open DB connection for command queue setup")
            return

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    CREATE TABLE IF NOT EXISTS op_ocpi_command_queue (
                        id INT AUTO_INCREMENT PRIMARY KEY,
                        backend_id INT NULL,
                        ocpi_command_id VARCHAR(64) NOT NULL,
                        ocpi_version VARCHAR(16) NULL,
                        command VARCHAR(64) NOT NULL,
                        status ENUM('queued','in_progress','succeeded','failed','cancelled') NOT NULL DEFAULT 'queued',
                        failure_count INT NOT NULL DEFAULT 0,
                        attempt_count INT NOT NULL DEFAULT 0,
                        station_id VARCHAR(255) NULL,
                        evse_id VARCHAR(255) NULL,
                        connector_id INT NULL,
                        transaction_id VARCHAR(64) NULL,
                        reservation_id VARCHAR(64) NULL,
                        ocpp_status VARCHAR(255) NULL,
                        ocpp_message TEXT NULL,
                        payload TEXT NULL,
                        last_error TEXT NULL,
                        created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
                        updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                        KEY idx_status (status),
                        KEY idx_backend_command (backend_id, command)
                    ) CHARACTER SET utf8mb4
                    """
                )
                columns = _table_columns(conn, "op_ocpi_command_queue")
                if "ocpi_version" not in columns:
                    cur.execute(
                        "ALTER TABLE op_ocpi_command_queue ADD COLUMN ocpi_version VARCHAR(16) NULL AFTER ocpi_command_id"
                    )
            conn.commit()
        except Exception:
            self._log.exception("Failed to initialize OCPI command queue table")
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def enqueue(
        self,
        *,
        backend_id: Optional[Any],
        command: str,
        ocpi_command_id: str,
        ocpi_version: Optional[str],
        payload: Mapping[str, Any],
    ) -> Optional[int]:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            self._log.exception("Failed to open DB connection for command queue insert")
            return None

        station_id = payload.get("station_id") or payload.get("chargepoint_id")
        evse_id = payload.get("evse_id") or payload.get("evse_uid") or payload.get("location_id")
        connector_id = payload.get("connector_id") or payload.get("connectorId")
        transaction_id = payload.get("session_id") or payload.get("transaction_id")
        reservation_id = payload.get("reservation_id")
        try:
            connector_id = int(connector_id) if connector_id not in (None, "") else None
        except (TypeError, ValueError):
            connector_id = None

        try:
            serialized_payload = json.dumps(payload, ensure_ascii=False)
        except Exception:
            serialized_payload = str(payload)

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    INSERT INTO op_ocpi_command_queue
                        (backend_id, ocpi_command_id, ocpi_version, command, status, payload, station_id, evse_id, connector_id, transaction_id, reservation_id)
                    VALUES (%s, %s, %s, %s, 'queued', %s, %s, %s, %s, %s, %s)
                    """,
                    (
                        backend_id,
                        ocpi_command_id,
                        ocpi_version,
                        command.upper(),
                        serialized_payload,
                        station_id,
                        evse_id,
                        connector_id,
                        transaction_id,
                        reservation_id,
                    ),
                )
            conn.commit()
            return cur.lastrowid
        except Exception:
            self._log.exception("Failed to enqueue OCPI command")
            return None
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def _update_entry(
        self,
        identifier: Any,
        *,
        status: Optional[str] = None,
        failure_increment: bool = False,
        attempt_increment: bool = False,
        ocpp_status: Optional[str] = None,
        ocpp_message: Optional[str] = None,
        reservation_id: Optional[Any] = None,
        transaction_id: Optional[Any] = None,
        last_error: Optional[str] = None,
    ) -> None:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            self._log.exception("Failed to open DB connection for command queue update")
            return

        updates = []
        params: list[Any] = []
        if status:
            updates.append("status=%s")
            params.append(status)
        if failure_increment:
            updates.append("failure_count=failure_count+1")
        if attempt_increment:
            updates.append("attempt_count=attempt_count+1")
        if ocpp_status is not None:
            updates.append("ocpp_status=%s")
            params.append(ocpp_status)
        if ocpp_message is not None:
            updates.append("ocpp_message=%s")
            params.append(ocpp_message)
        if reservation_id is not None:
            updates.append("reservation_id=%s")
            params.append(reservation_id)
        if transaction_id is not None:
            updates.append("transaction_id=%s")
            params.append(transaction_id)
        if last_error is not None:
            updates.append("last_error=%s")
            params.append(last_error)
        if not updates:
            try:
                conn.close()
            except Exception:
                pass
            return

        params.append(identifier)
        try:
            with conn.cursor() as cur:
                cur.execute(
                    f"UPDATE op_ocpi_command_queue SET {', '.join(updates)} WHERE id=%s OR ocpi_command_id=%s",
                    params + [identifier],
                )
            conn.commit()
        except Exception:
            self._log.exception("Failed to update OCPI command queue entry")
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def mark_in_progress(self, identifier: Any) -> None:
        self._update_entry(identifier, status="in_progress", attempt_increment=True)

    def mark_success(
        self,
        identifier: Any,
        *,
        ocpp_status: Optional[str],
        ocpp_message: Optional[str],
        reservation_id: Optional[Any] = None,
        transaction_id: Optional[Any] = None,
    ) -> None:
        self._update_entry(
            identifier,
            status="succeeded",
            ocpp_status=ocpp_status,
            ocpp_message=ocpp_message,
            reservation_id=reservation_id,
            transaction_id=transaction_id,
        )

    def mark_failure(
        self,
        identifier: Any,
        *,
        ocpp_status: Optional[str],
        ocpp_message: Optional[str],
        error: Optional[str] = None,
    ) -> None:
        self._update_entry(
            identifier,
            status="failed",
            failure_increment=True,
            ocpp_status=ocpp_status,
            ocpp_message=ocpp_message,
            last_error=error,
        )

    def cancel(self, identifier: Any) -> None:
        self._update_entry(identifier, status="cancelled")

    def get(self, identifier: Any) -> Optional[dict[str, Any]]:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            self._log.exception("Failed to open DB connection for command queue lookup")
            return None

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    SELECT id, backend_id, ocpi_command_id, ocpi_version, command, status, failure_count, attempt_count,
                           station_id, evse_id, connector_id, transaction_id, reservation_id,
                           ocpp_status, ocpp_message, payload, last_error, created_at, updated_at
                    FROM op_ocpi_command_queue
                    WHERE id=%s OR ocpi_command_id=%s
                    LIMIT 1
                    """,
                    (identifier, identifier),
                )
                return cur.fetchone()
        except Exception:
            self._log.exception("Failed to load OCPI command queue entry")
            return None
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def list(
        self,
        *,
        backend_id: Optional[Any] = None,
        status: Optional[str] = None,
        limit: int = 200,
    ) -> list[dict[str, Any]]:
        try:
            conn = _connect_db(self.mysql_cfg)
        except Exception:
            self._log.exception("Failed to open DB connection for command queue list")
            return []

        try:
            where: list[str] = []
            params: list[Any] = []
            if backend_id is not None:
                where.append("backend_id=%s")
                params.append(backend_id)
            if status:
                where.append("status=%s")
                params.append(status)
            where_clause = f"WHERE {' AND '.join(where)}" if where else ""
            with conn.cursor() as cur:
                cur.execute(
                    f"""
                    SELECT id, backend_id, ocpi_command_id, ocpi_version, command, status, failure_count, attempt_count,
                           station_id, evse_id, connector_id, transaction_id, reservation_id,
                           ocpp_status, ocpp_message, payload, last_error, created_at, updated_at
                    FROM op_ocpi_command_queue
                    {where_clause}
                    ORDER BY updated_at DESC, id DESC
                    LIMIT %s
                    """,
                    params + [limit],
                )
                return cur.fetchall()
        except Exception:
            self._log.exception("Failed to list OCPI command queue entries")
            return []
        finally:
            try:
                conn.close()
            except Exception:
                pass


class OcpiDataRepository:
    def __init__(self, mysql_cfg: Mapping[str, Any]):
        self.mysql_cfg = _mysql_config(mysql_cfg)
        self.token_service = TokenService(lambda: _connect_db(self.mysql_cfg))

    def _connect(self):
        return _connect_db(self.mysql_cfg)

    @staticmethod
    def _parse_payload(raw_payload: Any) -> dict[str, Any]:
        if isinstance(raw_payload, str):
            try:
                raw_payload = json.loads(raw_payload)
            except Exception:
                raw_payload = {}
        if not isinstance(raw_payload, Mapping):
            return {}
        return dict(raw_payload)

    def _table_exists(self, conn, table: str) -> bool:
        with conn.cursor() as cur:
            cur.execute("SHOW TABLES LIKE %s", (table,))
            return cur.fetchone() is not None

    def _apply_time_filters(
        self,
        column: str,
        date_from: Optional[datetime],
        date_to: Optional[datetime],
        where: list[str],
        params: list[Any],
    ) -> None:
        if date_from:
            where.append(f"{column} >= %s")
            params.append(date_from)
        if date_to:
            where.append(f"{column} <= %s")
            params.append(date_to)

    def list_exports(
        self,
        record_type: str,
        *,
        backend_id: Optional[Any] = None,
        date_from: Optional[datetime] = None,
        date_to: Optional[datetime] = None,
        offset: int = 0,
        limit: int = 50,
    ) -> tuple[list[dict[str, Any]], int]:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB for %s list", record_type)
            return [], 0

        try:
            if not self._table_exists(conn, "op_ocpi_exports"):
                return [], 0

            where = ["record_type=%s"]
            params: list[Any] = [record_type]
            if backend_id is not None:
                where.append("backend_id=%s")
                params.append(backend_id)
            self._apply_time_filters("created_at", date_from, date_to, where, params)
            where_clause = " AND ".join(where)

            with conn.cursor() as cur:
                cur.execute(
                    f"SELECT COUNT(*) AS total FROM op_ocpi_exports WHERE {where_clause}",
                    params,
                )
                count_row = cur.fetchone() or {"total": 0}
                total = int(count_row.get("total") or 0)

                cur.execute(
                    f"""
                    SELECT id, transaction_id, payload, created_at
                    FROM op_ocpi_exports
                    WHERE {where_clause}
                    ORDER BY created_at DESC
                    LIMIT %s OFFSET %s
                    """,
                    params + [limit, offset],
                )
                rows = cur.fetchall()

            entries: list[dict[str, Any]] = []
            for row in rows:
                payload = self._parse_payload(row.get("payload"))
                identifier = payload.get("id") or payload.get("uid")
                if not identifier:
                    identifier = row.get("transaction_id") or row.get("id")
                    payload["id"] = identifier
                if row.get("created_at"):
                    payload.setdefault("last_updated", _timestamp_str(row.get("created_at")))
                entries.append(payload)
            return entries, total
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def _candidate_rows_for_item(
        self,
        conn,
        record_type: str,
        item_id: str,
        backend_id: Optional[Any],
    ) -> list[Mapping[str, Any]]:
        where = ["record_type=%s"]
        params: list[Any] = [record_type]
        if backend_id is not None:
            where.append("backend_id=%s")
            params.append(backend_id)
        where_clause = " AND ".join(where)
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT id, transaction_id, payload, created_at
                FROM op_ocpi_exports
                WHERE {where_clause} AND (transaction_id=%s OR id=%s)
                ORDER BY created_at DESC
                LIMIT 50
                """,
                params + [item_id, item_id],
            )
            rows = cur.fetchall()
        return rows

    def get_export_item(
        self, record_type: str, item_id: str, *, backend_id: Optional[Any] = None
    ) -> Optional[dict[str, Any]]:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB for %s lookup", record_type)
            return None

        try:
            if not self._table_exists(conn, "op_ocpi_exports"):
                return None
            rows = self._candidate_rows_for_item(conn, record_type, item_id, backend_id)
            for row in rows:
                payload = self._parse_payload(row.get("payload"))
                identifier = payload.get("id") or payload.get("uid")
                if not identifier:
                    identifier = row.get("transaction_id") or row.get("id")
                if str(identifier) == str(item_id):
                    if row.get("created_at"):
                        payload.setdefault("last_updated", _timestamp_str(row.get("created_at")))
                    return payload

            # Fallback: scan most recent entries for matching payload id
            with conn.cursor() as cur:
                cur.execute(
                    """
                    SELECT id, transaction_id, payload, created_at
                    FROM op_ocpi_exports
                    WHERE record_type=%s
                    ORDER BY created_at DESC
                    LIMIT 200
                    """,
                    (record_type,),
                )
                rows = cur.fetchall()
            for row in rows:
                payload = self._parse_payload(row.get("payload"))
                identifier = payload.get("id") or payload.get("uid")
                if str(identifier) == str(item_id):
                    if row.get("created_at"):
                        payload.setdefault("last_updated", _timestamp_str(row.get("created_at")))
                    return payload
            return None
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def list_tokens(
        self,
        *,
        date_from: Optional[datetime] = None,
        date_to: Optional[datetime] = None,
        offset: int = 0,
        limit: int = 50,
    ) -> tuple[list[dict[str, Any]], int]:
        tokens, total = self.token_service.list_tokens(
            date_from=date_from,
            date_to=date_to,
            offset=offset,
            limit=limit,
        )
        return [TokenService.serialize_ocpi(t) for t in tokens], total

    def list_dead_letter_exports(
        self,
        *,
        record_types: Optional[Sequence[str]] = None,
        limit: int = 100,
    ) -> list[dict[str, Any]]:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB for dead-letter list")
            return []

        try:
            if not self._table_exists(conn, "op_ocpi_exports"):
                return []
            where = ["(success = 0 OR should_retry = 1)"]
            params: list[Any] = []
            if record_types:
                placeholders = ",".join(["%s"] * len(record_types))
                where.append(f"record_type IN ({placeholders})")
                params.extend(record_types)
            where_clause = " AND ".join(where)
            with conn.cursor() as cur:
                cur.execute(
                    f"""
                    SELECT id, record_type, station_id, backend_id, backend_name, transaction_id,
                           retry_count, should_retry, success, response_status, response_body, created_at
                    FROM op_ocpi_exports
                    WHERE {where_clause}
                    ORDER BY created_at DESC
                    LIMIT %s
                    """,
                    params + [limit],
                )
                rows = cur.fetchall()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to query dead-letter exports")
            return []
        finally:
            try:
                conn.close()
            except Exception:
                pass

        entries: list[dict[str, Any]] = []
        for row in rows or []:
            created = row.get("created_at")
            if isinstance(created, datetime):
                row["created_at"] = created.isoformat()
            entries.append(row)
        return entries

    def get_token(self, uid: str) -> Optional[dict[str, Any]]:
        token = self.token_service.get_token(uid)
        return TokenService.serialize_ocpi(token) if token else None

    def upsert_token(self, payload: Mapping[str, Any]) -> Optional[dict[str, Any]]:
        if not isinstance(payload, Mapping):
            return None
        token_uid = payload.get("uid")
        normalized = TokenService.normalize_payload(payload) or {}
        if token_uid and not normalized.get("uid"):
            normalized["uid"] = token_uid
        normalized.setdefault("auth_id", normalized.get("contract_id") or normalized.get("uid"))
        stored = self.token_service.upsert_tokens([normalized])
        if stored:
            return self.get_token(str(normalized.get("uid")))
        return None

    def delete_token(self, uid: str) -> bool:
        return self.token_service.delete_token(uid)

    def authorize_token(self, uid: str) -> tuple[bool, Optional[dict[str, Any]]]:
        token = self.token_service.get_token(uid)
        if not token:
            return False, None
        return bool(token.get("valid")), TokenService.serialize_ocpi(token)


def _chunked(items: Sequence[Any], size: int) -> Iterable[Sequence[Any]]:
    for idx in range(0, len(items), max(size, 1)):
        yield items[idx : idx + max(size, 1)]


class LocationSyncJob:
    def __init__(
        self,
        repo: LocationRepository,
        ocpi_cfg: Mapping[str, Any],
        *,
        metrics: Optional[JobMetrics] = None,
        interval: Optional[int] = None,
        sync_run_repo: Optional["SyncRunRepository"] = None,
    ):
        self.repo = repo
        self.ocpi_cfg = ocpi_cfg
        self.metrics = metrics
        self.mode = str(ocpi_cfg.get("locations_mode") or DEFAULT_LOCATIONS_MODE).lower()
        self.interval = int(
            interval
            or ocpi_cfg.get("locations_sync_interval_seconds", DEFAULT_SYNC_INTERVAL_SECONDS)
        )
        self.sync_run_repo = sync_run_repo
        self._stop = threading.Event()
        self._thread: Optional[threading.Thread] = None
        self._hash_cache: dict[Tuple[int, str], str] = {}

    def start(self) -> None:
        if self.mode != "push":
            logging.getLogger("ocpi_api").info(
                "Skipping location sync job because mode is %s", self.mode
            )
            return
        if self._thread and self._thread.is_alive():
            return
        self._thread = threading.Thread(target=self._run, daemon=True)
        self._thread.start()
        logging.getLogger("ocpi_api").info(
            "Started location sync job with interval %s seconds", self.interval
        )

    def stop(self) -> None:
        self._stop.set()
        if self._thread:
            self._thread.join(timeout=1)

    def is_running(self) -> bool:
        return bool(self._thread and self._thread.is_alive())

    def _run(self):
        while not self._stop.wait(self.interval):
            try:
                self.sync_once()
            except Exception:
                logging.getLogger("ocpi_api").exception(
                    "Unexpected error during location sync"
                )

    def _should_push(self, backend_row: Mapping[str, Any]) -> bool:
        modules_raw = backend_row.get("modules") or ""
        modules = [m.strip().lower() for m in modules_raw.split(",")]
        return "locations" in modules and _as_bool(backend_row.get("backend_enabled", 0))

    def _hash_payload(self, payload: Mapping[str, Any]) -> str:
        serialized = json.dumps(payload, sort_keys=True).encode("utf-8")
        return sha256(serialized).hexdigest()

    def _push_location(self, backend: Mapping[str, Any], payload: Mapping[str, Any]) -> None:
        backend_id = backend.get("backend_id")
        location_id = payload.get("id")
        start_time = time.perf_counter()
        status_code = None
        response_body = None
        success = False
        try:
            headers = {"Content-Type": "application/json"}
            token = _backend_auth_token(backend)
            if token:
                headers["Authorization"] = f"Token {token}" if "Token " not in token else token
            url = (backend.get("url") or "").rstrip("/") + f"/locations/{location_id}"
            response = requests.put(url, json=payload, headers=headers, timeout=10)
            status_code = response.status_code
            response_body = response.text[:1000]
            success = response.status_code < 400
            self.repo.log_sync_result(
                backend_id,
                location_id,
                None,
                success,
                status_code=status_code,
                error_message=None if success else response.text,
            )
            if not success:
                logging.getLogger("ocpi_api").warning(
                    "Location push to backend %s failed with status %s", backend_id, status_code
                )
            elif self.metrics:
                self.metrics.location_pushes += 1
        except Exception as exc:
            logging.getLogger("ocpi_api").exception(
                "Location push to backend %s failed", backend_id
            )
            response_body = str(exc)
            self.repo.log_sync_result(
                backend_id,
                location_id,
                None,
                False,
                error_message=str(exc),
            )
        finally:
            duration_ms = int((time.perf_counter() - start_time) * 1000)
            if self.sync_run_repo:
                self.sync_run_repo.record_run(
                    "location_sync",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    backend_name=backend.get("name"),
                    record_type="location",
                    duration_ms=duration_ms,
                    success=success,
                    status_code=status_code,
                    detail=response_body,
                )

    def sync_once(self) -> None:
        if self.mode != "push":
            return
        for record in self.repo.location_records():
            payload = record.get("location") or {}
            location_id = payload.get("id")
            for backend in record.get("backends") or []:
                backend_id = backend.get("backend_id")
                if backend_id is None or not self._should_push(backend):
                    continue
                cache_key = (int(backend_id), str(location_id))
                payload_hash = self._hash_payload(payload)
                if self._hash_cache.get(cache_key) == payload_hash:
                    continue
                self._hash_cache[cache_key] = payload_hash
                self._push_location(backend, payload)


class TariffSyncJob:
    def __init__(
        self,
        tariff_repo: TariffRepository,
        backend_repo: BackendRepository,
        *,
        interval: int = DEFAULT_TARIFF_SYNC_INTERVAL_SECONDS,
        batch_size: int = 50,
        metrics: Optional[JobMetrics] = None,
        sync_run_repo: Optional["SyncRunRepository"] = None,
    ):
        self.tariff_repo = tariff_repo
        self.backend_repo = backend_repo
        self.interval = interval
        self.batch_size = batch_size
        self.metrics = metrics
        self.sync_run_repo = sync_run_repo
        self._stop = threading.Event()
        self._thread: Optional[threading.Thread] = None

    def start(self) -> None:
        if self._thread and self._thread.is_alive():
            return
        self._thread = threading.Thread(target=self._run, daemon=True)
        self._thread.start()
        logging.getLogger("ocpi_api").info(
            "Started tariff sync job with interval %s seconds", self.interval
        )

    def stop(self) -> None:
        self._stop.set()
        if self._thread:
            self._thread.join(timeout=1)

    def is_running(self) -> bool:
        return bool(self._thread and self._thread.is_alive())

    def _run(self) -> None:
        while not self._stop.wait(self.interval):
            try:
                self.sync_once()
            except Exception:
                logging.getLogger("ocpi_api").exception(
                    "Unexpected error during tariff sync"
                )

    def _push_tariff(self, backend: Mapping[str, Any], payload: Sequence[Mapping[str, Any]]):
        start_time = time.perf_counter()
        status_code = None
        response_body = None
        success = False
        try:
            headers = {"Content-Type": "application/json"}
            token = _backend_auth_token(backend)
            if token:
                headers["Authorization"] = f"Token {token}" if "Token " not in token else token
            url = (backend.get("url") or "").rstrip("/") + "/tariffs"
            response = requests.put(url, json=payload, headers=headers, timeout=10)
            status_code = response.status_code
            response_body = response.text[:1000]
            success = response.status_code < 400
            if not success:
                logging.getLogger("ocpi_api").warning(
                    "Tariff push failed for backend %s with status %s", backend.get("backend_id"), status_code
                )
            elif self.metrics:
                self.metrics.tariff_pushes += 1
        except Exception:
            logging.getLogger("ocpi_api").exception(
                "Tariff push failed for backend %s", backend.get("backend_id")
            )
            response_body = "exception during tariff push"
        finally:
            duration_ms = int((time.perf_counter() - start_time) * 1000)
            if self.sync_run_repo:
                self.sync_run_repo.record_run(
                    "tariff_sync",
                    "tariffs",
                    "push",
                    backend_id=backend.get("backend_id"),
                    backend_name=backend.get("name"),
                    record_type="tariff",
                    duration_ms=duration_ms,
                    success=success,
                    status_code=status_code,
                    detail=response_body,
                )

    def sync_once(self) -> None:
        tariffs, _ = self.tariff_repo.list_tariffs()
        if not tariffs:
            return

        backends = self.backend_repo.list_backends(module="tariffs")
        if not backends:
            logging.getLogger("ocpi_api").debug("No OCPI backends enabled for tariffs")
            return

        for backend in backends:
            for chunk in _chunked(tariffs, self.batch_size):
                self._push_tariff(backend, chunk)


class ExportRetryJob:
    def __init__(
        self,
        mysql_cfg: Mapping[str, Any],
        ocpi_cfg: Mapping[str, Any],
        *,
        interval: int = DEFAULT_RETRY_INTERVAL_SECONDS,
        batch_size: int = DEFAULT_RETRY_BATCH_SIZE,
        max_attempts: int = DEFAULT_RETRY_MAX_ATTEMPTS,
        metrics: Optional[JobMetrics] = None,
        sync_run_repo: Optional["SyncRunRepository"] = None,
    ):
        self.mysql_cfg = _mysql_config(mysql_cfg)
        self.ocpi_cfg = ocpi_cfg
        self.interval = interval
        self.batch_size = batch_size
        self.max_attempts = max_attempts
        self.metrics = metrics
        self.sync_run_repo = sync_run_repo
        self._stop = threading.Event()
        self._thread: Optional[threading.Thread] = None

    def start(self) -> None:
        if self._thread and self._thread.is_alive():
            return
        self._thread = threading.Thread(target=self._run, daemon=True)
        self._thread.start()
        logging.getLogger("ocpi_api").info(
            "Started export retry job with interval %s seconds", self.interval
        )

    def stop(self) -> None:
        self._stop.set()
        if self._thread:
            self._thread.join(timeout=1)

    def _run(self):
        while not self._stop.wait(self.interval):
            try:
                self.retry_once()
            except Exception:
                logging.getLogger("ocpi_api").exception(
                    "Unexpected error during export retry"
                )

    def _connect(self):
        return _connect_db(self.mysql_cfg)

    def _fetch_pending(self, conn):
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT id, backend_id, backend_name, payload, retry_count, record_type
                FROM op_ocpi_exports
                WHERE success = 0 AND should_retry = 1 AND retry_count < %s
                ORDER BY created_at ASC
                LIMIT %s
                """,
                (self.max_attempts, self.batch_size),
            )
            return cur.fetchall()

    def _load_backend(self, conn, backend_id: Any) -> Optional[Mapping[str, Any]]:
        if backend_id is None:
            return None
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT backend_id, name, url, remote_versions_url, peer_versions_url, active_version, token, peer_token, modules
                FROM op_ocpi_backends
                WHERE backend_id = %s AND enabled = 1
                """,
                (backend_id,),
            )
            row = cur.fetchone()
            return row

    def _update_result(
        self,
        conn,
        export_id: Any,
        *,
        success: bool,
        retry_count: int,
        response_status: Optional[int],
        response_body: Optional[str],
    ) -> None:
        should_retry = 1 if not success and retry_count < self.max_attempts else 0
        with conn.cursor() as cur:
            cur.execute(
                """
                UPDATE op_ocpi_exports
                SET success=%s, retry_count=%s, should_retry=%s, response_status=%s, response_body=%s
                WHERE id=%s
                """,
                (
                    1 if success else 0,
                    retry_count,
                    should_retry,
                    response_status,
                    response_body,
                    export_id,
                ),
            )
        conn.commit()

    @staticmethod
    def _build_ocpi_endpoint(
        backend: Mapping[str, Any],
        record_type: str,
        *,
        override: Optional[str] = None,
    ) -> str:
        if override:
            return str(override).rstrip("/")

        suffix = "sessions" if record_type == "session" else "cdrs"
        base_url = str(
            backend.get("remote_versions_url")
            or backend.get("peer_versions_url")
            or backend.get("url")
            or ""
        ).strip().rstrip("/")
        if not base_url:
            return base_url

        version = str(backend.get("active_version") or "").strip()
        if base_url.endswith("/versions"):
            base_url = base_url[: -len("/versions")]
        if not version:
            return base_url if base_url.endswith(f"/{suffix}") else f"{base_url}/{suffix}"
        if base_url.endswith(f"/{suffix}"):
            return base_url
        if "/ocpi/" in base_url:
            return f"{base_url}/{suffix}"
        return f"{base_url}/ocpi/{version}/{suffix}"

    def _send_payload(self, backend: Mapping[str, Any], record_type: str, payload: Any):
        headers = {"Content-Type": "application/json"}
        token = _backend_auth_token(backend)
        if token:
            headers["Authorization"] = f"Token {token}" if "Token " not in token else token

        endpoint_override = self.ocpi_cfg.get("sessions_endpoint") if record_type == "session" else None
        if record_type == "cdr":
            endpoint_override = self.ocpi_cfg.get("cdrs_endpoint")

        url = self._build_ocpi_endpoint(backend, record_type, override=endpoint_override)

        response = requests.post(url, json=payload, headers=headers, timeout=10)
        return response

    def _record_run(
        self,
        *,
        backend: Optional[Mapping[str, Any]],
        record_type: str,
        success: bool,
        status_code: Optional[int],
        detail: Any,
        duration_ms: int,
        direction: str = "retry",
    ) -> None:
        if not self.sync_run_repo:
            return
        module = "sessions" if record_type == "session" else "cdrs"
        self.sync_run_repo.record_run(
            "export_retry",
            module,
            direction,
            backend_id=(backend or {}).get("backend_id"),
            backend_name=(backend or {}).get("name") or (backend or {}).get("backend_name"),
            record_type=record_type,
            duration_ms=duration_ms,
            success=success,
            status_code=status_code,
            detail=detail,
        )

    def _process_export(
        self,
        conn,
        export: Mapping[str, Any],
        *,
        direction: str = "retry",
    ) -> dict[str, Any]:
        export_id = export.get("id")
        retry_count = int(export.get("retry_count") or 0) + 1
        backend = self._load_backend(conn, export.get("backend_id"))
        record_type = (export.get("record_type") or "cdr").lower()
        response_status = None
        response_body = None
        success = False
        start_time = time.perf_counter()

        if backend is None:
            self._update_result(
                conn,
                export_id,
                success=False,
                retry_count=retry_count,
                response_status=404,
                response_body="backend missing",
            )
            if self.metrics:
                self.metrics.retry_failures += 1
            duration_ms = int((time.perf_counter() - start_time) * 1000)
            self._record_run(
                backend=None,
                record_type=record_type,
                success=False,
                status_code=404,
                detail="backend missing",
                duration_ms=duration_ms,
                direction=direction,
            )
            return {
                "success": False,
                "status_code": 404,
                "response_body": "backend missing",
                "should_retry": False,
                "record_type": record_type,
            }

        if self.metrics:
            self.metrics.retry_attempts += 1

        try:
            payload = json.loads(export.get("payload") or "{}")
        except Exception:
            payload = export.get("payload")

        try:
            response = self._send_payload(backend, record_type, payload)
            response_status = response.status_code
            response_body = response.text[:1000]
            success = response.ok
        except Exception as exc:
            response_body = str(exc)
            logging.getLogger("ocpi_api").exception(
                "Retry send failed for export_id=%s", export_id
            )

        self._update_result(
            conn,
            export_id,
            success=success,
            retry_count=retry_count,
            response_status=response_status,
            response_body=response_body,
        )

        if self.metrics:
            if success:
                self.metrics.retry_successes += 1
            else:
                self.metrics.retry_failures += 1

        duration_ms = int((time.perf_counter() - start_time) * 1000)
        self._record_run(
            backend=backend,
            record_type=record_type,
            success=success,
            status_code=response_status,
            detail=response_body,
            duration_ms=duration_ms,
            direction=direction,
        )
        should_retry = 1 if not success and retry_count < self.max_attempts else 0
        return {
            "success": success,
            "status_code": response_status,
            "response_body": response_body,
            "should_retry": bool(should_retry),
            "record_type": record_type,
        }

    def retry_once(self) -> None:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for retries")
            return

        try:
            exports = self._fetch_pending(conn)
            if not exports:
                return

            for export in exports:
                self._process_export(conn, export)
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def retry_export(self, export_id: Any) -> dict[str, Any]:
        try:
            conn = self._connect()
        except Exception:
            logging.getLogger("ocpi_api").exception("Failed to open DB connection for retry export")
            return {"success": False, "status_code": 500, "response_body": "connection failed", "found": False}

        try:
            with conn.cursor() as cur:
                cur.execute(
                    """
                    SELECT id, backend_id, backend_name, payload, retry_count, record_type
                    FROM op_ocpi_exports
                    WHERE id=%s
                    LIMIT 1
                    """,
                    (export_id,),
                )
                row = cur.fetchone()
            if not row:
                return {"success": False, "status_code": 404, "response_body": "not found", "found": False}
            result = self._process_export(conn, row, direction="replay")
            result["found"] = True
            result["export_id"] = export_id
            return result
        finally:
            try:
                conn.close()
            except Exception:
                pass

    def is_running(self) -> bool:
        return bool(self._thread and self._thread.is_alive())


class JobScheduler:
    def __init__(self):
        self.jobs: list[Any] = []

    def add(self, job) -> None:
        self.jobs.append(job)

    def start(self):
        for job in self.jobs:
            try:
                job.start()
            except Exception:
                logging.getLogger("ocpi_api").exception("Failed to start job %s", job)

    def stop(self):
        for job in self.jobs:
            try:
                job.stop()
            except Exception:
                logging.getLogger("ocpi_api").exception("Failed to stop job %s", job)


class OCPPApiClient:
    def __init__(
        self,
        base_url: str,
        *,
        timeout: float = DEFAULT_OCPP_API_TIMEOUT,
        verify: bool | str = True,
    ) -> None:
        normalized_base = base_url.rstrip("/") + "/" if base_url else ""
        self.base_url = normalized_base
        self.timeout = timeout
        self.verify = verify
        self._log = logging.getLogger(self.__class__.__name__)

    def _post(self, path: str, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        if not self.base_url:
            raise RuntimeError("OCPP API base URL not configured")

        normalized_path = path.lstrip("/")
        url = urljoin(self.base_url, normalized_path)
        headers = {"Content-Type": "application/json"}

        self._log.info("POST %s", url)
        try:
            serialized = json.dumps(payload, ensure_ascii=False)
        except Exception:
            serialized = str(payload)
        self._log.debug("payload=%s", serialized)

        try:
            response = requests.post(
                url,
                json=dict(payload),
                timeout=self.timeout,
                verify=self.verify,
                headers=headers,
            )
        except requests.RequestException as exc:
            raise RuntimeError(f"OCPP API request failed: {exc}") from exc

        self._log.debug("response_status=%s body=%s", response.status_code, response.text)
        if response.status_code >= 400:
            raise RuntimeError(
                f"OCPP API returned {response.status_code} for {url}: {response.text}"
            )

        try:
            data = response.json()
        except ValueError as exc:
            raise RuntimeError("OCPP API did not return JSON") from exc

        return data

    def remote_start(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        return self._post("hubject/remote-start", payload)

    def remote_stop(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        return self._post("hubject/remote-stop", payload)

    def unlock_connector(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        return self._post("hubject/unlock-connector", payload)

    def reserve_now(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        return self._post("hubject/reserve-now", payload)

    def set_charging_profile(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        return self._post("smart-charging/set-profile", payload)

    def clear_charging_profile(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
        return self._post("smart-charging/clear-profile", payload)


def create_module_blueprint(module: str, version: str, *, suffix: str = "") -> Blueprint:
    bp = Blueprint(f"{module}{suffix}", __name__ + f".{module}{suffix}")

    @bp.before_request
    def _auth_guard():
        return _guard_backend(module=module)

    @bp.route("/", methods=["GET"])
    def list_entries(backend_id: Optional[str] = None):
        date_from = _parse_date_param(request.args.get("date_from"))
        date_to = _parse_date_param(request.args.get("date_to"))
        offset, limit = _pagination_params()
        profile = getattr(g, "backend_profile", None)
        backend_filter = backend_id or (profile.backend_id if profile else None)
        try:
            backend_filter = int(backend_filter) if backend_filter is not None else None
        except (TypeError, ValueError):
            backend_filter = backend_filter

        entries: list[dict[str, Any]] = []
        total = 0
        record_type_for_log: Optional[str] = None
        if module in {"sessions", "cdrs"}:
            record_type = "session" if module == "sessions" else "cdr"
            record_type_for_log = record_type
            entries, total = data_repo.list_exports(
                record_type,
                backend_id=backend_filter,
                date_from=date_from,
                date_to=date_to,
                offset=offset,
                limit=limit,
            )
        elif module == "tariffs":
            entries, total = tariff_repo.list_tariffs(
                date_from=date_from,
                date_to=date_to,
                offset=offset,
                limit=limit,
            )
        elif module == "tokens":
            entries, total = data_repo.list_tokens(
                date_from=date_from,
                date_to=date_to,
                offset=offset,
                limit=limit,
            )

        resp, status = ocpi_response(
            data=map_outgoing_payload(module, version, entries)
        )
        _apply_pagination_headers(resp, total, limit, offset)
        if sync_run_repo:
            sync_run_repo.record_run(
                f"{module}_{version}",
                module,
                "pull",
                backend_id=backend_filter,
                record_type=record_type_for_log or module.rstrip("s"),
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    if module == "tokens":
        @bp.route("/<string:item_id>", methods=["GET", "POST", "PUT"])
        def token_detail(item_id: str, backend_id: Optional[str] = None):
            profile = getattr(g, "backend_profile", None)
            backend_filter = backend_id or (profile.backend_id if profile else None)
            try:
                backend_filter = int(backend_filter) if backend_filter is not None else None
            except (TypeError, ValueError):
                backend_filter = backend_filter

            if request.method == "GET":
                entry = data_repo.get_token(item_id)
                if entry is None:
                    resp, http_status = ocpi_response(
                        data=None,
                        status_message=f"{module.capitalize()} '{item_id}' not found",
                        status_code=CLIENT_ERROR_CODE,
                        error_code="NOT_FOUND",
                        http_status=404,
                    )
                    if sync_run_repo:
                        sync_run_repo.record_run(
                            f"{module}_{version}",
                            module,
                            "pull",
                            backend_id=backend_filter,
                            record_type="token",
                            duration_ms=_request_duration_ms(),
                            success=False,
                            status_code=resp.status_code,
                            detail="not found",
                        )
                    return resp, http_status

                resp, status = ocpi_response(data=map_outgoing_payload(module, version, entry))
                if sync_run_repo:
                    sync_run_repo.record_run(
                        f"{module}_{version}",
                        module,
                        "pull",
                        backend_id=backend_filter,
                        record_type="token",
                        duration_ms=_request_duration_ms(),
                        success=status < 400,
                        status_code=resp.status_code,
                    )
                return resp, status

            raw_payload = request.get_json(force=True, silent=True) or {}
            mapping = map_incoming_payload("tokens", version, raw_payload)
            if mapping.error or mapping.missing_fields:
                missing_field = mapping.missing_fields[0] if mapping.missing_fields else None
                status_message = mapping.error or f"Missing field: {missing_field}"
                error_code = "FORMAT_ERROR" if mapping.error else "MISSING_FIELD"
                resp, http_status = ocpi_response(
                    status_code=CLIENT_ERROR_CODE,
                    status_message=status_message,
                    error_code=error_code,
                    http_status=400,
                )
                return resp, http_status

            payload = {**mapping.payload, "uid": item_id}
            stored = data_repo.upsert_token(payload)
            resp, status = ocpi_response(
                data=map_outgoing_payload("tokens", version, stored or payload)
            )
            return resp, status

        @bp.route("/<string:item_id>/authorize", methods=["POST"])
        def authorize_token(item_id: str, backend_id: Optional[str] = None):
            raw_payload = request.get_json(force=True, silent=True) or {}
            allowed, token, error_message, error_status = _authorize_token_request(
                item_id,
                version,
                raw_payload,
            )
            if error_message:
                return ocpi_response(
                    status_code=CLIENT_ERROR_CODE,
                    status_message=error_message,
                    error_code="TIMEOUT" if error_status == 504 else "COMMUNICATION_ERROR",
                    http_status=error_status or 502,
                )
            if token is None:
                return ocpi_response(
                    status_code=CLIENT_ERROR_CODE,
                    status_message=f"{module.capitalize()} '{item_id}' not found",
                    error_code="NOT_FOUND",
                    http_status=404,
                )
            station_ref = _station_identifier(raw_payload)
            if allowed:
                _cache_authorization(item_id, station_ref)
            status_value = "ALLOWED" if allowed else "REJECTED"
            return ocpi_response(
                data={"allowed": status_value, "token": map_outgoing_payload("tokens", version, token)}
            )
    else:
        @bp.route("/<path:item_id>", methods=["GET"])
        def get_entry(item_id: str, backend_id: Optional[str] = None):
            profile = getattr(g, "backend_profile", None)
            backend_filter = backend_id or (profile.backend_id if profile else None)
            try:
                backend_filter = int(backend_filter) if backend_filter is not None else None
            except (TypeError, ValueError):
                backend_filter = backend_filter

            entry: Optional[dict[str, Any]] = None
            record_type_for_log: Optional[str] = None
            if module in {"sessions", "cdrs"}:
                record_type = "session" if module == "sessions" else "cdr"
                record_type_for_log = record_type
                entry = data_repo.get_export_item(record_type, item_id, backend_id=backend_filter)
            elif module == "tariffs":
                entry = tariff_repo.get_tariff(item_id)

            if entry is None:
                resp, http_status = ocpi_response(
                    data=None,
                    status_message=f"{module.capitalize()} '{item_id}' not found",
                    status_code=CLIENT_ERROR_CODE,
                    error_code="NOT_FOUND",
                    http_status=404,
                )
                if sync_run_repo:
                    sync_run_repo.record_run(
                        f"{module}_{version}",
                        module,
                        "pull",
                        backend_id=backend_filter,
                        record_type=record_type_for_log or module.rstrip("s"),
                        duration_ms=_request_duration_ms(),
                        success=False,
                        status_code=resp.status_code,
                        detail="not found",
                    )
                return resp, http_status

            resp, status = ocpi_response(data=map_outgoing_payload(module, version, entry))
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"{module}_{version}",
                    module,
                    "pull",
                    backend_id=backend_filter,
                    record_type=record_type_for_log or module.rstrip("s"),
                    duration_ms=_request_duration_ms(),
                    success=status < 400,
                    status_code=resp.status_code,
                )
            return resp, status

    return bp


app = Flask(__name__)
app_config = load_config()
setup_logging(app_config, logger_name="ocpi_api")
logger = logging.getLogger("ocpi_api")

ocpi_cfg = app_config.get("ocpi_api", {}) if isinstance(app_config, dict) else {}
if isinstance(app_config, dict):
    ocpi_cfg = {**ocpi_cfg, "tariffs": app_config.get("tariffs", [])}
scheduler_cfg = app_config.get("scheduler", {}) if isinstance(app_config, dict) else {}
listen_host = os.environ.get("OCPI_API_HOST") or ocpi_cfg.get("host", DEFAULT_HOST)
listen_port = int(os.environ.get("OCPI_API_PORT") or ocpi_cfg.get("port", DEFAULT_PORT))
mysql_cfg = _mysql_config(app_config.get("mysql", {}) if isinstance(app_config, dict) else {})
active_base_url = _select_env_value(ocpi_cfg, "base_url")
fallback_token = os.environ.get("OCPI_API_TOKEN") or _select_env_value(ocpi_cfg, "token")
registry_cfg = dict(ocpi_cfg) if isinstance(ocpi_cfg, Mapping) else {}
if active_base_url:
    registry_cfg["base_url"] = active_base_url
backend_registry = BackendRegistry.from_config(registry_cfg, fallback_token=fallback_token)
metrics = JobMetrics()
alert_cfg: Mapping[str, Any] = {}
if isinstance(app_config, dict):
    alert_cfg = app_config.get("alerts", {}) or {}
    if isinstance(ocpi_cfg, dict) and isinstance(ocpi_cfg.get("alerts"), Mapping):
        alert_cfg = {**alert_cfg, **ocpi_cfg.get("alerts", {})}
alert_notifier = FailureNotifier.from_config(alert_cfg, logger=logger)
tariff_service = TariffService(mysql_cfg, fallback_tariffs=ocpi_cfg.get("tariffs") or [])
location_repo = LocationRepository(mysql_cfg, tariff_service=tariff_service)
backend_repo = BackendRepository(mysql_cfg)
tariff_repo = TariffRepository(ocpi_cfg, mysql_cfg, tariff_service=tariff_service)
data_repo = OcpiDataRepository(mysql_cfg)
command_repo = CommandResultRepository(mysql_cfg)
command_queue_repo = CommandQueueRepository(mysql_cfg)
handshake_repo = HandshakeRepository(mysql_cfg)
sync_run_repo = SyncRunRepository(mysql_cfg)
ocpp_client = OCPPApiClient(
    ocpi_cfg.get("ocpp_api_base_url", DEFAULT_OCPP_API_BASE_URL),
    timeout=float(ocpi_cfg.get("ocpp_api_timeout", DEFAULT_OCPP_API_TIMEOUT)),
    verify=ocpi_cfg.get("ocpp_api_verify_tls", True),
)


def _business_details() -> dict[str, Any]:
    details: dict[str, Any] = {}
    raw = ocpi_cfg.get("business_details") if isinstance(ocpi_cfg, Mapping) else None
    if isinstance(raw, Mapping):
        details.update(raw)
    if not details.get("name"):
        details["name"] = ocpi_cfg.get("business_name") or "Pipelet"
    website = ocpi_cfg.get("business_website")
    if website and not details.get("website"):
        details["website"] = website
    return details


def _credentials_payload(base_url: str, backend_row: Optional[Mapping[str, Any]]) -> dict[str, Any]:
    backend_id = backend_row.get("backend_id") if backend_row else None
    token = None
    if backend_row:
        token = backend_row.get("credentials_token") or backend_row.get("peer_token") or backend_row.get("token")
    if not token:
        token = _generate_token()
        backend_repo.save_credentials_token(backend_id, token)

    country_code = ocpi_cfg.get("country_code") or "DE"
    party_id = ocpi_cfg.get("party_id") or "WAL"
    payload: dict[str, Any] = {
        "token": token,
        "url": f"{base_url}/ocpi/versions",
        "roles": [
            {
                "role": "CPO",
                "party_id": party_id,
                "country_code": country_code,
                "business_details": _business_details(),
            }
        ],
    }
    return payload


def _handshake_payload(
    row: Mapping[str, Any],
    *,
    mtls_ready: bool,
    backend_row: Optional[Mapping[str, Any]] = None,
) -> dict[str, Any]:
    payload = {
        "id": row.get("id"),
        "backend_id": row.get("backend_id"),
        "state": row.get("state"),
        "status": row.get("status"),
        "detail": row.get("detail"),
        "token_hint": (row.get("token") or "")[-6:] if row.get("token") else None,
        "peer_url": row.get("peer_url"),
        "updated_at": _timestamp_str(row.get("updated_at")),
        "created_at": _timestamp_str(row.get("created_at")),
        "mtls_ready": bool(mtls_ready),
    }
    if backend_row:
        for key, label in (
            ("token", "backend_token"),
            ("peer_token", "peer_token"),
            ("credentials_token", "credentials_token"),
        ):
            token_val = backend_row.get(key)
            payload[f"{label}_present"] = bool(token_val)
            payload[f"{label}_hint"] = str(token_val)[-6:] if token_val else None
    return payload


def _select_backend(module: Optional[str] = None):
    backend_id = None
    if request.view_args:
        backend_id = request.view_args.get("backend_id")
    backend_row = backend_repo.get_backend(backend_id)
    if backend_id and backend_row is None:
        return None, ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Backend not found",
            error_code="NOT_FOUND",
            http_status=404,
        )
    if backend_row:
        profile = _profile_from_backend_row(backend_row)
        if module and not profile.supports_module(module):
            return None, ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message="Module not supported",
                error_code="NOT_SUPPORTED",
                http_status=501,
            )
        g.backend_profile = profile
        return profile, None

    profile = backend_registry.select_backend(request, backend_id, module=module)
    if profile is None:
        return None, ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Module not supported",
            error_code="NOT_SUPPORTED",
            http_status=501,
        )
    g.backend_profile = profile
    return profile, None


def _guard_backend(module: Optional[str] = None):
    profile, error = _select_backend(module=module)
    if error:
        return error

    if verify_token_header(request, profile.token):
        return None

    backend_row = None
    auth_header = request.headers.get("Authorization", "")
    provided_token = auth_header.split(" ", 1)[1].strip() if " " in auth_header else None
    backend_id = None
    if request.view_args:
        backend_id = request.view_args.get("backend_id")
    if backend_id is not None:
        backend_row = backend_repo.get_backend(backend_id)
    if backend_row is None:
        backend_row = backend_repo.find_by_token(provided_token)

    if backend_row:
        for candidate in (
            backend_row.get("credentials_token"),
            backend_row.get("peer_token"),
            backend_row.get("token"),
        ):
            if verify_token_header(request, candidate):
                g.backend_profile = _profile_from_backend_row(backend_row)
                return None

    alert_notifier.record_failure(
        "OCPI token validation failed",
        {"backend_id": profile.backend_id, "path": request.path},
    )
    return ocpi_response(
        status_code=CLIENT_ERROR_CODE,
        status_message="Unauthorized",
        error_code="UNAUTHORIZED",
        http_status=401,
    )


def _profile_from_backend_row(row: Mapping[str, Any]) -> BackendProfile:
    modules_raw = row.get("modules") or ""
    modules = {m.strip().lower() for m in modules_raw.replace(";", ",").split(",") if m.strip()}
    return BackendProfile(
        backend_id=str(row.get("backend_id", "default")),
        url=row.get("url"),
        token=row.get("credentials_token") or row.get("peer_token") or row.get("token"),
        country_code=ocpi_cfg.get("country_code"),
        party_id=ocpi_cfg.get("party_id"),
        modules=modules,
        base_url=None,
    )


def _command_id() -> str:
    return secrets.token_hex(8)


def _dispatch_ocpp_command(
    command: str,
    payload: Mapping[str, Any],
    *,
    version: Optional[str] = None,
) -> Mapping[str, Any]:
    normalized_command = command.upper()
    if normalized_command == "START_SESSION":
        evse_id = payload.get("evse_uid") or payload.get("location_id") or payload.get("evse_id")
        ocpp_payload: dict[str, Any] = {"evse_id": evse_id}
        for key in ("token", "auth_id", "authorization_id"):
            token = payload.get(key)
            if token:
                ocpp_payload["token"] = token
                break
        if "token" not in ocpp_payload:
            raise RuntimeError("Token is required for remote start")
        if ocpp_payload.get("token"):
            _ensure_token_authorized(
                str(ocpp_payload["token"]),
                station_id=str(evse_id or ""),
                version=version,
                payload=payload,
            )
        connector = payload.get("connector_id") or payload.get("connectorId")
        if connector not in (None, ""):
            try:
                ocpp_payload["connector_id"] = int(str(connector))
            except (TypeError, ValueError):
                ocpp_payload["connector_id"] = connector
        if payload.get("charging_profile"):
            ocpp_payload["charging_profile"] = payload.get("charging_profile")
        return ocpp_client.remote_start(ocpp_payload)

    if normalized_command == "STOP_SESSION":
        transaction = payload.get("session_id") or payload.get("transaction_id")
        ocpp_payload = {"transaction_id": transaction}
        evse_id = payload.get("evse_uid") or payload.get("location_id")
        if evse_id:
            ocpp_payload["evse_id"] = evse_id
        return ocpp_client.remote_stop(ocpp_payload)

    if normalized_command == "UNLOCK_CONNECTOR":
        ocpp_payload = {
            "evse_id": payload.get("evse_uid") or payload.get("location_id") or payload.get("evse_id"),
            "connector_id": payload.get("connector_id") or payload.get("connectorId"),
        }
        return ocpp_client.unlock_connector(ocpp_payload)

    if normalized_command == "RESERVE_NOW":
        ocpp_payload = {
            "evse_id": payload.get("evse_uid") or payload.get("location_id") or payload.get("evse_id"),
            "connector_id": payload.get("connector_id") or payload.get("connectorId"),
            "id_tag": payload.get("token") or payload.get("auth_id") or payload.get("authorization_id"),
            "parent_id_tag": payload.get("parent_id_tag"),
            "reservation_id": payload.get("reservation_id"),
            "expiry_date": payload.get("expiry_date") or payload.get("expiryDate"),
        }
        return ocpp_client.reserve_now(ocpp_payload)

    raise RuntimeError(f"Unsupported command {command}")


def _serialize_command_body(body: Any) -> str:
    if isinstance(body, (dict, list)):
        try:
            return json.dumps(body, ensure_ascii=False)
        except Exception:
            pass
    return str(body)


def _extract_command_ids(payload: Mapping[str, Any], response: Any) -> tuple[Any, Any]:
    reservation_id = payload.get("reservation_id")
    transaction_id = payload.get("session_id") or payload.get("transaction_id")
    if isinstance(response, Mapping):
        reservation_id = response.get("reservationId") or response.get("reservation_id") or reservation_id
        transaction_id = (
            response.get("transactionId")
            or response.get("transaction_id")
            or response.get("session_id")
            or transaction_id
        )
    return reservation_id, transaction_id


def _perform_command_dispatch(
    command: str,
    payload: Mapping[str, Any],
    *,
    backend_id: Optional[Any],
    ocpi_command_id: str,
    version: Optional[str] = None,
    queue_identifier: Optional[Any] = None,
) -> tuple[bool, Optional[str], Any, Optional[Any], Optional[Any], Optional[str]]:
    success = True
    ocpp_status: str | None = None
    ocpp_response: Any = None
    error_message: str | None = None
    reservation_id: Any | None = None
    transaction_id: Any | None = None

    identifier = queue_identifier or ocpi_command_id
    if queue_identifier is not None and command_queue_repo:
        command_queue_repo.mark_in_progress(identifier)

    try:
        ocpp_response = _dispatch_ocpp_command(command, payload, version=version)
        ocpp_status = ocpp_response.get("status") if isinstance(ocpp_response, Mapping) else None
        reservation_id, transaction_id = _extract_command_ids(payload, ocpp_response)
        if queue_identifier is not None and command_queue_repo:
            command_queue_repo.mark_success(
                identifier,
                ocpp_status=ocpp_status,
                ocpp_message=_serialize_command_body(ocpp_response),
                reservation_id=reservation_id,
                transaction_id=transaction_id,
            )
    except Exception as exc:  # pragma: no cover - defensive
        success = False
        error_message = str(exc)
        ocpp_response = {"error": error_message}
        if queue_identifier is not None and command_queue_repo:
            command_queue_repo.mark_failure(
                identifier,
                ocpp_status=None,
                ocpp_message=_serialize_command_body(ocpp_response),
                error=error_message,
            )

    try:
        command_repo.record(
            backend_id=backend_id,
            command=command.upper(),
            module="commands",
            ocpi_command_id=ocpi_command_id,
            success=success,
            response_status=ocpp_status,
            response_body=ocpp_response,
        )
    except Exception:  # pragma: no cover - defensive
        logging.getLogger("ocpi_api").exception("Failed to record OCPI command result")

    return success, ocpp_status, ocpp_response, reservation_id, transaction_id, error_message


def _dispatch_charging_profile(payload: Mapping[str, Any]) -> Mapping[str, Any]:
    target = payload.get("session_id") or payload.get("evse_uid") or payload.get("location_id")
    profile = payload.get("charging_profile") or payload.get("ChargingProfile")
    ocpp_payload: dict[str, Any] = {"evse_id": target}
    if profile:
        ocpp_payload["charging_profile"] = profile
    return ocpp_client.set_charging_profile(ocpp_payload)


def _clear_charging_profile(payload: Mapping[str, Any]) -> Mapping[str, Any]:
    ocpp_payload: dict[str, Any] = {}
    for key in ("session_id", "evse_uid", "location_id", "evse_id"):
        value = payload.get(key)
        if value:
            ocpp_payload["evse_id"] = value
            break
    stack_level = payload.get("stack_level")
    if stack_level not in (None, ""):
        try:
            ocpp_payload["stack_level"] = int(str(stack_level))
        except (TypeError, ValueError):
            ocpp_payload["stack_level"] = stack_level
    return ocpp_client.clear_charging_profile(ocpp_payload)
location_sync_job = LocationSyncJob(
    location_repo,
    ocpi_cfg,
    metrics=metrics,
    interval=int(
        scheduler_cfg.get("location_interval_seconds", DEFAULT_SYNC_INTERVAL_SECONDS)
    ),
    sync_run_repo=sync_run_repo,
)
tariff_sync_job = TariffSyncJob(
    tariff_repo,
    backend_repo,
    interval=int(
        scheduler_cfg.get(
            "tariff_interval_seconds", DEFAULT_TARIFF_SYNC_INTERVAL_SECONDS
        )
    ),
    batch_size=int(scheduler_cfg.get("tariff_batch_size", 50)),
    metrics=metrics,
    sync_run_repo=sync_run_repo,
)
export_retry_job = ExportRetryJob(
    mysql_cfg,
    ocpi_cfg,
    interval=int(
        scheduler_cfg.get("retry_interval_seconds", DEFAULT_RETRY_INTERVAL_SECONDS)
    ),
    batch_size=int(scheduler_cfg.get("retry_batch_size", DEFAULT_RETRY_BATCH_SIZE)),
    max_attempts=int(
        scheduler_cfg.get("retry_max_attempts", DEFAULT_RETRY_MAX_ATTEMPTS)
    ),
    metrics=metrics,
    sync_run_repo=sync_run_repo,
)
scheduler = JobScheduler()
scheduler.add(location_sync_job)
scheduler.add(tariff_sync_job)
scheduler.add(export_retry_job)


@app.before_request
def before_request_logging():
    request.start_time = time.perf_counter()
    log_request(logger, request)


@app.after_request
def after_request_logging(response):
    duration = time.perf_counter() - getattr(request, "start_time", time.perf_counter())
    log_response(logger, request, response.status_code, duration)
    metrics.record_http_request(response.status_code, duration)
    if response.status_code >= 400:
        alert_notifier.record_failure(
            "OCPI API request failed",
            {
                "status": response.status_code,
                "path": request.path,
                "method": request.method,
            },
        )
    else:
        alert_notifier.record_success()
    return response


def _version_endpoints(version: str, base_url: str) -> list[dict[str, str]]:
    endpoints = [{"identifier": "credentials", "url": f"{base_url}/ocpi/{version}/credentials"}]
    for module in (
        "locations",
        "sessions",
        "cdrs",
        "tariffs",
        "tokens",
        "commands",
        "chargingprofiles",
    ):
        if not module_is_supported(version, module):
            continue
        endpoints.append({"identifier": module, "url": f"{base_url}/ocpi/{version}/{module}"})
    return endpoints


def create_locations_blueprint(version: str, *, suffix: str = "") -> Blueprint:
    bp = Blueprint(f"locations{suffix}", __name__ + f".locations{suffix}")

    @bp.before_request
    def _locations_guard():
        return _guard_backend(module="locations")

    @bp.get("/")
    def list_locations(backend_id: Optional[str] = None):
        since = request.args.get("since")
        entries = location_repo.list_locations()
        if since:
            entries = [
                entry
                for entry in entries
                if not since or str(entry.get("last_updated", "")) >= since
            ]
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, entries))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "pull",
                backend_id=backend_id,
                record_type="location",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    @bp.put("/")
    def upsert_location(backend_id: Optional[str] = None):
        raw_payload = request.get_json(force=True, silent=True)
        mapping = map_incoming_payload("locations", version, raw_payload)
        if mapping.error:
            status_message = mapping.error
            resp, http_status = ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message=status_message,
                error_code="FORMAT_ERROR",
                http_status=400,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    record_type="location",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail=status_message,
                )
            return resp, http_status
        if mapping.missing_fields:
            missing_field = mapping.missing_fields[0]
            status_message = (
                "Location id is required" if missing_field == "id" else f"Missing field: {missing_field}"
            )
            resp, http_status = ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message=status_message,
                error_code="MISSING_FIELD",
                http_status=400,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    record_type="location",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail=status_message,
                )
            return resp, http_status
        payload = mapping.payload
        location_id = str(payload.get("id") or "").strip()
        location_repo.upsert_location_override(location_id, payload)
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, payload))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "push",
                backend_id=backend_id,
                record_type="location",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    @bp.get("/<string:location_id>/<string:evse_uid>")
    def get_evse(location_id: str, evse_uid: str, backend_id: Optional[str] = None):
        location = location_repo.get_location(location_id)
        if not location:
            resp, http_status = ocpi_response(
                data=None,
                status_code=CLIENT_ERROR_CODE,
                status_message="Location not found",
                error_code="NOT_FOUND",
                http_status=404,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "pull",
                    backend_id=backend_id,
                    record_type="location",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail="Location not found",
                )
            return resp, http_status
        evse = None
        for item in location.get("evses") or []:
            if str(item.get("uid")) == evse_uid:
                evse = item
                break
        if not evse:
            resp, http_status = ocpi_response(
                data=None,
                status_code=CLIENT_ERROR_CODE,
                status_message="EVSE not found",
                error_code="NOT_FOUND",
                http_status=404,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "pull",
                    backend_id=backend_id,
                    record_type="evse",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail="EVSE not found",
                )
            return resp, http_status
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, evse))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "pull",
                backend_id=backend_id,
                record_type="evse",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    @bp.get("/<string:location_id>")
    def get_location(location_id: str, backend_id: Optional[str] = None):
        location = location_repo.get_location(location_id)
        if not location:
            resp, http_status = ocpi_response(
                data=None,
                status_code=CLIENT_ERROR_CODE,
                status_message="Location not found",
                error_code="NOT_FOUND",
                http_status=404,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "pull",
                    backend_id=backend_id,
                    record_type="location",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail="Location not found",
                )
            return resp, http_status
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, location))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "pull",
                backend_id=backend_id,
                record_type="location",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    @bp.put("/<string:location_id>/<string:evse_uid>")
    def upsert_evse(
        location_id: str, evse_uid: str, backend_id: Optional[str] = None
    ):
        raw_payload = request.get_json(force=True, silent=True)
        mapping = map_incoming_payload(
            "locations", version, raw_payload, validate_required=False
        )
        if mapping.error:
            resp, http_status = ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message="JSON body with an EVSE object is required",
                error_code="FORMAT_ERROR",
                http_status=400,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    record_type="evse",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail="JSON body with an EVSE object is required",
                )
            return resp, http_status
        payload = {**mapping.payload, "uid": evse_uid}
        location_repo.upsert_location_override(location_id, payload, evse_uid=evse_uid)
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, payload))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "push",
                backend_id=backend_id,
                record_type="evse",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    @bp.patch("/")
    def patch_location(backend_id: Optional[str] = None):
        raw_payload = request.get_json(force=True, silent=True)
        mapping = map_incoming_payload("locations", version, raw_payload)
        if mapping.error:
            status_message = mapping.error
            resp, http_status = ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message=status_message,
                error_code="FORMAT_ERROR",
                http_status=400,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    record_type="location",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail=status_message,
                )
            return resp, http_status
        if mapping.missing_fields:
            missing_field = mapping.missing_fields[0]
            status_message = (
                "Location id is required" if missing_field == "id" else f"Missing field: {missing_field}"
            )
            resp, http_status = ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message=status_message,
                error_code="MISSING_FIELD",
                http_status=400,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    record_type="location",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail=status_message,
                )
            return resp, http_status
        payload = mapping.payload
        location_id = str(payload.get("id") or "").strip()
        merged = location_repo.merge_location_override(location_id, payload)
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, merged))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "push",
                backend_id=backend_id,
                record_type="location",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    @bp.patch("/<string:location_id>/<string:evse_uid>")
    def patch_evse(
        location_id: str, evse_uid: str, backend_id: Optional[str] = None
    ):
        raw_payload = request.get_json(force=True, silent=True)
        mapping = map_incoming_payload(
            "locations", version, raw_payload, validate_required=False
        )
        if mapping.error:
            resp, http_status = ocpi_response(
                status_code=CLIENT_ERROR_CODE,
                status_message="JSON body with an EVSE object is required",
                error_code="FORMAT_ERROR",
                http_status=400,
            )
            if sync_run_repo:
                sync_run_repo.record_run(
                    f"locations_{version}",
                    "locations",
                    "push",
                    backend_id=backend_id,
                    record_type="evse",
                    duration_ms=_request_duration_ms(),
                    success=False,
                    status_code=resp.status_code,
                    detail="JSON body with an EVSE object is required",
                )
            return resp, http_status
        payload = {**mapping.payload, "uid": evse_uid}
        merged = location_repo.merge_location_override(location_id, payload, evse_uid=evse_uid)
        resp, status = ocpi_response(data=map_outgoing_payload("locations", version, merged))
        if sync_run_repo:
            sync_run_repo.record_run(
                f"locations_{version}",
                "locations",
                "push",
                backend_id=backend_id,
                record_type="evse",
                duration_ms=_request_duration_ms(),
                success=status < 400,
                status_code=resp.status_code,
            )
        return resp, status

    return bp


@app.get("/health")
def health() -> Response:
    body = {
        "status": "ok",
        "jobs": {
            "location_sync_interval": location_sync_job.interval,
            "tariff_sync_interval": tariff_sync_job.interval,
            "retry_interval": export_retry_job.interval,
            "retry_batch_size": export_retry_job.batch_size,
            "retry_max_attempts": export_retry_job.max_attempts,
        },
    }
    return jsonify(body)


@app.get("/metrics")
def metrics_endpoint() -> Response:
    return Response(metrics.as_prometheus(), mimetype="text/plain")


@app.errorhandler(404)
def handle_not_found(error):
    if request.path.startswith("/ocpi/"):
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Not supported",
            error_code="NOT_SUPPORTED",
            http_status=404,
        )
    return error


@app.get("/ocpi/versions")
@app.get("/b/<backend_id>/ocpi/versions")
def list_versions(backend_id: Optional[str] = None):
    auth_error = _guard_backend()
    if auth_error:
        return auth_error

    base_url = _build_base_url(request, ocpi_cfg)
    return ocpi_response(
        data=[{"version": version, "url": f"{base_url}/ocpi/{version}"} for version in SUPPORTED_VERSIONS],
    )


@app.get("/ocpi/<version>")
@app.get("/b/<backend_id>/ocpi/<version>")
def version_details(version: str, backend_id: Optional[str] = None):
    auth_error = _guard_backend()
    if auth_error:
        return auth_error

    version_error = _validate_version(version)
    if version_error:
        return version_error

    base_url = _build_base_url(request, ocpi_cfg)
    endpoints = _version_endpoints(version, base_url)
    return ocpi_response(data={"version": version, "endpoints": endpoints})


@app.route("/ocpi/<version>/credentials", methods=["GET", "POST", "PUT", "DELETE"])
@app.route("/b/<backend_id>/ocpi/<version>/credentials", methods=["GET", "POST", "PUT", "DELETE"])
def credentials(version: str, backend_id: Optional[str] = None):
    auth_error = _guard_backend()
    if auth_error:
        return auth_error

    version_error = _validate_version(version)
    if version_error:
        return version_error

    backend_row = backend_repo.get_backend(backend_id)
    base_url = _build_base_url(request, ocpi_cfg)

    if request.method == "GET":
        return ocpi_response(data=_credentials_payload(base_url, backend_row))

    if request.method == "DELETE":
        if not backend_row:
            return ocpi_response(
                data=None,
                status_code=CLIENT_ERROR_CODE,
                status_message="Backend not found",
                error_code="NOT_FOUND",
                http_status=404,
            )
        backend_repo.update_credentials_exchange(
            backend_id,
            peer_token=None,
            peer_url=None,
            remote_versions_url=None,
            active_version=None,
            status="credentials revoked",
            clear_missing=True,
        )
        try:
            backend_key = backend_row.get("backend_id") if backend_row else backend_id
            handshake_repo.record(
                backend_key,
                state="open",
                status="credentials revoked",
                detail=f"version={version}",
                token=None,
                peer_url=None,
            )
        except Exception:
            logger.debug("Handshake logging failed", exc_info=True)
        updated_backend = backend_repo.get_backend(backend_id)
        return ocpi_response(data=_credentials_payload(base_url, updated_backend))

    payload = request.get_json(force=True, silent=True)
    valid, missing = validate_json_payload(payload, ["token", "url", "roles"])
    if not valid:
        status_message = missing if missing == "JSON body is required" else f"Missing field: {missing}"
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message=status_message,
            error_code="FORMAT_ERROR" if missing == "JSON body is required" else "MISSING_FIELD",
            http_status=400,
        )

    peer_token = str(payload.get("token") or "").strip() or None
    peer_url = str(payload.get("url") or "").strip() or None
    backend_repo.update_credentials_exchange(
        backend_id,
        peer_token=peer_token,
        peer_url=peer_url,
        remote_versions_url=peer_url,
        active_version=version,
        status="credentials updated",
    )
    try:
        backend_key = backend_row.get("backend_id") if backend_row else backend_id
        handshake_repo.record(
            backend_key,
            state="closed" if peer_token else "open",
            status="credentials updated",
            detail=f"version={version}",
            token=peer_token,
            peer_url=peer_url,
        )
    except Exception:
        logger.debug("Handshake logging failed", exc_info=True)
    updated_backend = backend_repo.get_backend(backend_id)
    return ocpi_response(data=_credentials_payload(base_url, updated_backend))


def _trigger_rehandshake(backend_id: Optional[int]) -> tuple[bool, str, Optional[str]]:
    backend = backend_repo.get_backend(backend_id)
    if not backend:
        return False, "Backend not found", None

    versions_url = (
        (backend.get("remote_versions_url") or "").strip()
        or (backend.get("peer_versions_url") or "").strip()
        or (backend.get("url") or "").strip()
    )
    token = (backend.get("token") or backend.get("peer_token") or "").strip()
    if not versions_url:
        return False, "versions url missing", None
    if not token:
        return False, "bootstrap token missing", None

    headers = {"Authorization": token if token.startswith("Token ") else f"Token {token}"}
    try:
        versions_resp = requests.get(versions_url, headers=headers, timeout=15)
        versions_payload = versions_resp.json()
    except Exception as exc:
        return False, f"versions failed: {exc}", versions_url
    versions_data = _extract_ocpi_data(versions_payload) or []
    if versions_resp.status_code >= 400:
        return False, f"versions status {versions_resp.status_code}", versions_url
    selected_version, version_url = _select_supported_version(versions_data)
    if not selected_version or not version_url:
        return False, "no compatible version", versions_url

    try:
        details_resp = requests.get(version_url, headers=headers, timeout=15)
        details_payload = details_resp.json()
    except Exception as exc:
        return False, f"details failed: {exc}", versions_url

    details_data = _extract_ocpi_data(details_payload) or {}
    if details_resp.status_code >= 400:
        return False, f"details status {details_resp.status_code}", versions_url

    endpoints = details_data.get("endpoints") or [] if isinstance(details_data, Mapping) else []
    credentials_url = None
    for endpoint in endpoints:
        if isinstance(endpoint, Mapping) and endpoint.get("identifier") == "credentials":
            credentials_url = endpoint.get("url")
            break
    if not credentials_url:
        return False, "credentials endpoint missing", versions_url

    base_url = _build_base_url(request, ocpi_cfg)
    payload = _credentials_payload(base_url, backend)
    try:
        cred_resp = requests.post(credentials_url, json=payload, headers=headers, timeout=15)
        if cred_resp.status_code in (404, 405):
            cred_resp = requests.put(credentials_url, json=payload, headers=headers, timeout=15)
        cred_data = cred_resp.json()
    except Exception as exc:
        return False, f"credentials failed: {exc}", credentials_url

    response_data = _extract_ocpi_data(cred_data) or {}
    peer_token = response_data.get("token") if isinstance(response_data, Mapping) else None
    peer_url = response_data.get("url") if isinstance(response_data, Mapping) else None
    backend_repo.update_credentials_exchange(
        backend_id,
        peer_token=peer_token,
        peer_url=peer_url,
        remote_versions_url=versions_url,
        active_version=selected_version,
        status=f"credentials response {cred_resp.status_code}",
    )
    handshake_repo.record(
        backend_id,
        state="closed" if peer_token else "open",
        status=f"credentials response {cred_resp.status_code}",
        detail=f"version={selected_version}",
        token=peer_token,
        peer_url=peer_url,
    )
    if peer_token:
        return True, "credentials updated", credentials_url
    return False, "credentials finished without peer token", credentials_url


@app.get("/ocpi/handshakes")
@app.get("/b/<backend_id>/ocpi/handshakes")
def list_handshakes(backend_id: Optional[str] = None):
    auth_error = _guard_backend()
    if auth_error:
        return auth_error

    mtls_ready = ocpi_cfg.get("ocpp_api_verify_tls", True)
    backend_filter = None
    try:
        backend_filter = int(backend_id) if backend_id is not None else None
    except (TypeError, ValueError):
        backend_filter = backend_id
    rows = handshake_repo.list(backend_filter)
    backend_cache: dict[Any, Mapping[str, Any]] = {}
    payload = []
    for row in rows:
        b_id = row.get("backend_id")
        backend_row = backend_cache.get(b_id)
        if b_id not in backend_cache:
            backend_row = backend_repo.get_backend(b_id)
            backend_cache[b_id] = backend_row
        payload.append(_handshake_payload(row, mtls_ready=mtls_ready, backend_row=backend_row))
    return ocpi_response(data={"handshakes": payload})


@app.post("/ocpi/handshakes/<int:backend_id>/retrigger")
def retrigger_handshake(backend_id: int):
    auth_error = _guard_backend()
    if auth_error:
        return auth_error

    success, status, target_url = _trigger_rehandshake(backend_id)
    handshake_repo.record(
        backend_id,
        state="closed" if success else "open",
        status=status,
        detail=target_url,
    )
    if not success:
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message=status,
            error_code="HANDSHAKE_FAILED",
            http_status=502,
        )
    return ocpi_response(data={"result": "ACCEPTED", "status": status})


@app.post("/ocpi/<version>/commands/<command>")
@app.post("/b/<backend_id>/ocpi/<version>/commands/<command>")
def ocpi_commands(version: str, command: str, backend_id: Optional[str] = None):
    auth_error = _guard_backend(module="commands")
    if auth_error:
        return auth_error

    version_error = _validate_version(version)
    if version_error:
        return version_error

    if not module_is_supported(version, "commands"):
        return _unsupported_module_response("commands", version)

    raw_payload = request.get_json(force=True, silent=True) or {}
    mapping = map_incoming_payload(
        "commands", version, raw_payload, validate_required=False
    )
    if mapping.error:
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message=mapping.error,
            error_code="FORMAT_ERROR",
            http_status=400,
        )
    payload = mapping.payload
    command_id = _command_id()
    try:
        backend_row = backend_repo.get_backend(backend_id)
        backend_key = backend_row.get("backend_id") if backend_row else None
    except Exception:
        backend_key = backend_id

    queue_id = command_queue_repo.enqueue(
        backend_id=backend_key,
        command=command,
        ocpi_command_id=command_id,
        ocpi_version=version,
        payload=payload,
    )

    queue_identifier: Any = queue_id if queue_id is not None else command_id
    success, response_status, response_body, reservation_id, transaction_id, error_message = _perform_command_dispatch(
        command,
        payload,
        backend_id=backend_key,
        ocpi_command_id=command_id,
        queue_identifier=queue_identifier,
        version=version,
    )

    result = "ACCEPTED" if success else "REJECTED"
    response_data: dict[str, Any] = {"result": result, "command_id": command_id}
    if response_status:
        response_data["status"] = response_status
    if reservation_id is not None:
        response_data["reservation_id"] = reservation_id
    if transaction_id is not None:
        response_data["transaction_id"] = transaction_id
    if error_message:
        response_data["error"] = error_message

    return ocpi_response(data=response_data)


@app.get("/api/command-queue")
def api_command_queue_list():
    status_filter = request.args.get("status")
    limit_param = request.args.get("limit")
    backend_param = request.args.get("backend_id")
    try:
        limit = int(limit_param) if limit_param else 200
    except (TypeError, ValueError):
        limit = 200
    try:
        backend_filter = int(backend_param) if backend_param else None
    except (TypeError, ValueError):
        backend_filter = backend_param

    entries_raw = command_queue_repo.list(backend_id=backend_filter, status=status_filter, limit=limit)
    entries = [_serialize_queue_entry(entry) for entry in entries_raw]
    return ocpi_response(data={"commands": entries})


@app.get("/api/command-queue/<command_ref>")
def api_command_queue_detail(command_ref: str):
    entry = command_queue_repo.get(command_ref)
    if entry is None:
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Command not found",
            error_code="NOT_FOUND",
            http_status=404,
        )
    return ocpi_response(data=_serialize_queue_entry(entry))


def _retry_command_entry(entry: Mapping[str, Any]) -> tuple[bool, Optional[str], Any, Optional[Any], Optional[Any], Optional[str]]:
    raw_payload = entry.get("payload")
    parsed_payload: Mapping[str, Any]
    if isinstance(raw_payload, str):
        try:
            parsed_payload = json.loads(raw_payload)
        except Exception:
            parsed_payload = {}
    elif isinstance(raw_payload, Mapping):
        parsed_payload = raw_payload
    else:
        parsed_payload = {}
    version = entry.get("ocpi_version")
    return _perform_command_dispatch(
        str(entry.get("command") or ""),
        parsed_payload,
        backend_id=entry.get("backend_id"),
        ocpi_command_id=str(entry.get("ocpi_command_id") or ""),
        queue_identifier=entry.get("id"),
        version=version,
    )


@app.post("/api/command-queue/<command_ref>/retry")
def api_command_queue_retry(command_ref: str):
    entry = command_queue_repo.get(command_ref)
    if entry is None:
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Command not found",
            error_code="NOT_FOUND",
            http_status=404,
        )
    if entry.get("status") == "cancelled":
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Command is cancelled",
            error_code="CANCELLED",
            http_status=409,
        )
    success, status_value, response_body, reservation_id, transaction_id, error_message = _retry_command_entry(entry)
    result = "ACCEPTED" if success else "REJECTED"
    payload: dict[str, Any] = {
        "result": result,
        "status": status_value,
        "command_id": entry.get("ocpi_command_id"),
        "reservation_id": reservation_id,
        "transaction_id": transaction_id,
        "response": response_body,
    }
    if error_message:
        payload["error"] = error_message
    return ocpi_response(data=payload)


def _serialize_queue_entry(entry: Mapping[str, Any]) -> dict[str, Any]:
    serialized = dict(entry)
    for key in ("created_at", "updated_at"):
        value = serialized.get(key)
        if isinstance(value, datetime):
            serialized[key] = value.isoformat()
    return serialized


@app.post("/api/command-queue/<command_ref>/cancel")
def api_command_queue_cancel(command_ref: str):
    entry = command_queue_repo.get(command_ref)
    if entry is None:
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Command not found",
            error_code="NOT_FOUND",
            http_status=404,
        )
    if entry.get("status") == "succeeded":
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Command already finished",
            error_code="ALREADY_DONE",
            http_status=409,
        )
    command_queue_repo.cancel(entry.get("id"))
    return ocpi_response(data={"result": "CANCELLED", "command_id": entry.get("ocpi_command_id")})


@app.get("/api/sync-runs")
def api_sync_runs():
    module_filter_raw = request.args.get("modules") or ""
    module_filter = [
        part.strip().lower() for part in module_filter_raw.split(",") if part.strip()
    ] or None
    runs = sync_run_repo.latest_runs(modules=module_filter)
    logs = sync_run_repo.list_runs(modules=module_filter, limit=200)
    dead_letter = data_repo.list_dead_letter_exports(record_types=["session", "cdr"], limit=200)
    jobs = [
        {
            "name": "location_sync",
            "interval": location_sync_job.interval,
            "mode": getattr(location_sync_job, "mode", None),
            "running": location_sync_job.is_running(),
        },
        {
            "name": "tariff_sync",
            "interval": tariff_sync_job.interval,
            "running": tariff_sync_job.is_running(),
        },
        {
            "name": "export_retry",
            "interval": export_retry_job.interval,
            "running": export_retry_job.is_running(),
            "max_attempts": export_retry_job.max_attempts,
        },
    ]
    return ocpi_response(data={"runs": runs, "dead_letter": dead_letter, "jobs": jobs, "logs": logs})


@app.post("/api/exports/<int:export_id>/retry")
def api_retry_export(export_id: int):
    result = export_retry_job.retry_export(export_id)
    if not result.get("found", False):
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message="Export not found",
            error_code="NOT_FOUND",
            http_status=404,
        )
    success = bool(result.get("success"))
    status_message = "Replay accepted" if success else "Replay failed"
    http_status = 200 if success else 502
    status_code_value = SUCCESS_CODE if success else CLIENT_ERROR_CODE
    payload = {
        "export_id": export_id,
        "success": success,
        "status_code": result.get("status_code"),
        "response": result.get("response_body"),
        "should_retry": result.get("should_retry"),
        "record_type": result.get("record_type"),
    }
    return ocpi_response(
        data=payload,
        status_code=status_code_value,
        status_message=status_message,
        http_status=http_status,
    )


@app.route("/ocpi/<version>/chargingprofiles/<session_id>", methods=["POST", "DELETE"])
@app.route("/b/<backend_id>/ocpi/<version>/chargingprofiles/<session_id>", methods=["POST", "DELETE"])
def ocpi_charging_profiles(
    version: str, session_id: str, backend_id: Optional[str] = None
):
    auth_error = _guard_backend(module="chargingprofiles")
    if auth_error:
        return auth_error

    version_error = _validate_version(version)
    if version_error:
        return version_error

    if not module_is_supported(version, "chargingprofiles"):
        return _unsupported_module_response("chargingprofiles", version)

    raw_payload = request.get_json(force=True, silent=True) or {}
    mapping = map_incoming_payload(
        "chargingprofiles", version, raw_payload, validate_required=False
    )
    if mapping.error:
        return ocpi_response(
            status_code=CLIENT_ERROR_CODE,
            status_message=mapping.error,
            error_code="FORMAT_ERROR",
            http_status=400,
        )
    payload = mapping.payload
    payload.setdefault("session_id", session_id)
    command_id = _command_id()
    result = "ACCEPTED"
    success = True
    response_status = None
    response_body: Any = None
    try:
        if request.method == "DELETE":
            ocpp_response = _clear_charging_profile(payload)
        else:
            ocpp_response = _dispatch_charging_profile(payload)
        response_status = ocpp_response.get("status") if isinstance(ocpp_response, Mapping) else None
        response_body = ocpp_response
    except Exception as exc:  # pragma: no cover - defensive
        success = False
        result = "REJECTED"
        response_body = {"error": str(exc)}

    try:
        backend_row = backend_repo.get_backend(backend_id)
        backend_key = backend_row.get("backend_id") if backend_row else None
    except Exception:
        backend_key = backend_id

    command_repo.record(
        backend_id=backend_key,
        command="SET_PROFILE" if request.method == "POST" else "CLEAR_PROFILE",
        module="chargingprofiles",
        ocpi_command_id=command_id,
        success=success,
        response_status=response_status,
        response_body=response_body,
    )

    return ocpi_response(data={"result": result, "command_id": command_id})


def _register_versioned_blueprints():
    for version in SUPPORTED_VERSIONS:
        suffix = f"_{version.replace('.', '_')}"
        if module_is_supported(version, "locations"):
            locations_bp = create_locations_blueprint(version, suffix=suffix)
            app.register_blueprint(
                locations_bp, url_prefix=f"/ocpi/{version}/locations"
            )
            locations_backend_bp = create_locations_blueprint(
                version, suffix=f"{suffix}_backend"
            )
            app.register_blueprint(
                locations_backend_bp,
                url_prefix=f"/b/<backend_id>/ocpi/{version}/locations",
            )

        for module_name in ("sessions", "cdrs", "tariffs", "tokens"):
            if not module_is_supported(version, module_name):
                continue
            module_bp = create_module_blueprint(module_name, version, suffix=suffix)
            module_backend_bp = create_module_blueprint(
                module_name, version, suffix=f"{suffix}_backend"
            )
            app.register_blueprint(
                module_bp,
                url_prefix=f"/ocpi/{version}/{module_name}",
            )
            app.register_blueprint(
                module_backend_bp,
                url_prefix=f"/b/<backend_id>/ocpi/{version}/{module_name}",
            )


_register_versioned_blueprints()

scheduler.start()


if __name__ == "__main__":
    logger.info("Starting OCPI API on %s:%s", listen_host, listen_port)
    app.run(host=listen_host, port=listen_port)
