from flask import (
    Flask,
    render_template,
    request,
    redirect,
    url_for,
    Response,
    stream_with_context,
    jsonify,
    send_file,
    send_from_directory,
    abort,
)
import copy
import csv
import pymysql
import requests
import os
import datetime
import json
import re
import ftplib
from decimal import Decimal, InvalidOperation
from io import BytesIO, StringIO
import uuid
import logging
import ssl
from pathlib import Path
import threading
import time
import math
from collections import OrderedDict, defaultdict
from typing import Any, Dict, Iterable, List, Optional

try:
    from zoneinfo import ZoneInfo
except ImportError:  # pragma: no cover - Python < 3.9 fallback
    try:
        from backports.zoneinfo import ZoneInfo  # type: ignore
    except ImportError:  # pragma: no cover - best-effort fallback
        ZoneInfo = None  # type: ignore

# ---------------------------------------------------------------------------
# Station identifier normalization
# ---------------------------------------------------------------------------


def normalize_station_id(path: str) -> str:
    """Return a station identifier without leading or trailing slashes."""
    return path.strip("/")

# ---------------------------------------------------------------------------
# Helper functions for op_config storage
# ---------------------------------------------------------------------------


def get_config_value(key):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "SELECT config_value FROM op_config WHERE config_key=%s",
                (key,),
            )
            row = cur.fetchone()
            return row["config_value"] if row else None
    finally:
        conn.close()


def set_config_value(key, value):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                INSERT INTO op_config (config_key, config_value)
                VALUES (%s, %s)
                ON DUPLICATE KEY UPDATE config_value=VALUES(config_value)
                """,
                (key, value),
            )
            conn.commit()
    finally:
        conn.close()

# ---------------------------------------------------------------------------
# Configuration handling
# ---------------------------------------------------------------------------

# Optional global configuration file used by the other scripts as well
CONFIG_FILE = "config.json"

try:
    with open(CONFIG_FILE, "r", encoding="utf-8") as f:
        _config = json.load(f)
except FileNotFoundError:
    _config = {}

log_cfg = _config.get("log_levels", {})
level_name = log_cfg.get("dashboard", "INFO").upper()
LOG_LEVEL = getattr(logging, level_name, logging.INFO)
logging.basicConfig(
    level=LOG_LEVEL,
    format="%(asctime)s %(levelname)s:%(name)s:%(message)s",
)

logger = logging.getLogger(__name__)

# Database credentials can be provided via environment or config.json
_mysql_cfg = {
    "host": "82.165.76.205",
    "port": 3306,
    "user": "walle",
    "password": "zm0dem123",
    "db": "ocppproxy",
    "charset": "utf8mb4",
}
_mysql_cfg.update(_config.get("mysql", {}))

DB_HOST = os.getenv("DB_HOST", _mysql_cfg.get("host"))
DB_PORT = int(os.getenv("DB_PORT", _mysql_cfg.get("port", 3306)))
DB_USER = os.getenv("DB_USER", _mysql_cfg.get("user"))
DB_PASSWORD = os.getenv("DB_PASSWORD", _mysql_cfg.get("password"))
DB_NAME = os.getenv("DB_NAME", _mysql_cfg.get("db"))
DB_CHARSET = _mysql_cfg.get("charset", "utf8mb4")


# Only enforce a collation when one is explicitly configured.  Previous
# iterations tried to guess a sensible default for utf8mb4 deployments, but
# that still produced ``Illegal mix of collations`` errors whenever the actual
# database default differed from our guess.  Leaving the decision to MySQL keeps
# the dashboard aligned with the live schema unless the operator overrides it in
# config.json.
DB_COLLATION = _mysql_cfg.get("collation")

# The RFID comparisons inherit the configured override when present; otherwise
# the database column collation is used which avoids mismatches with the live
# schema.  Operators can explicitly request a different collation via the
# ``rfid_collation`` config entry.
_rfid_collation = _mysql_cfg.get("rfid_collation") or DB_COLLATION
RFID_COLLATE_CLAUSE = None

_chargepoint_collation = _mysql_cfg.get("chargepoint_collation") or DB_COLLATION
CHARGEPOINT_COLLATE_CLAUSE = ""

# ftp://diag:rem0tec0nnect@217.160.79.201/home/diag
FTP_HOST = os.getenv('FTP_HOST', '217.160.79.201')
FTP_USER = os.getenv('FTP_USER', 'diag')
FTP_PASSWORD = os.getenv('FTP_PASSWORD', 'rem0tec0nnect')
FTP_DIR = os.getenv('FTP_DIR', '/')

VIRTUAL_CHARGER_API = _config.get("virtual_charger_api", "http://127.0.0.1:9752")

# Time zone helpers
UTC = datetime.timezone.utc
if ZoneInfo is not None:
    EUROPE_BERLIN = ZoneInfo("Europe/Berlin")
else:  # pragma: no cover - degraded timezone handling without zoneinfo data
    EUROPE_BERLIN = datetime.timezone(datetime.timedelta(hours=1), name="Europe/Berlin")


# Utility to get a new DB connection
def get_db_conn():
    init_cmd = f"SET NAMES {DB_CHARSET}"
    if DB_COLLATION:
        init_cmd += f" COLLATE {DB_COLLATION}"

    return pymysql.connect(
        host=DB_HOST,
        port=DB_PORT,
        user=DB_USER,
        password=DB_PASSWORD,
        db=DB_NAME,
        charset=DB_CHARSET,
        init_command=init_cmd,
        cursorclass=pymysql.cursors.DictCursor,
    )


def _get_column_collation(table_name: str, column_name: str):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT collation_name
                FROM information_schema.columns
                WHERE table_schema = DATABASE()
                    AND table_name = %s
                    AND column_name = %s
                """,
                (table_name, column_name),
            )
            row = cur.fetchone()
            if not row:
                return None

            # Some MySQL drivers return column names in uppercase. Work with
            # whatever casing we get instead of assuming a specific one.
            for key, value in row.items():
                if key.lower() == "collation_name":
                    return value

            return None
    except pymysql.MySQLError as exc:
        logging.getLogger(__name__).warning(
            "Could not determine collation for %s.%s: %s",
            table_name,
            column_name,
            exc,
        )
        return None
    finally:
        conn.close()


def _get_connection_collation():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute("SELECT @@collation_connection AS collation")
            row = cur.fetchone()
            if not row:
                return None

            # Support any casing PyMySQL decides to return.
            for key, value in row.items():
                if key.lower() == "collation":
                    return value

            return None
    except pymysql.MySQLError as exc:
        logging.getLogger(__name__).warning(
            "Could not determine connection collation: %s",
            exc,
        )
        return None
    finally:
        conn.close()


def _get_collation_charset(collation_name: str):
    """Return the character set associated with *collation_name*."""

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT character_set_name
                FROM information_schema.collations
                WHERE collation_name = %s
                """,
                (collation_name,),
            )
            row = cur.fetchone()
            if not row:
                return None

            for key, value in row.items():
                if key.lower() == "character_set_name":
                    return value

            return None
    except pymysql.MySQLError as exc:
        logging.getLogger(__name__).warning(
            "Could not determine charset for collation %s: %s",
            collation_name,
            exc,
        )
        return None
    finally:
        conn.close()


def _refresh_rfid_collation():
    """Attempt to determine the collation used for RFID comparisons."""

    global _rfid_collation

    if _rfid_collation:
        return _rfid_collation

    logger = logging.getLogger(__name__)

    try:
        session_collation = _get_column_collation("op_charging_sessions", "id_tag")
    except Exception as exc:
        logger.warning(
            "Unable to detect collation for op_charging_sessions.id_tag: %s", exc
        )
        session_collation = None

    try:
        vehicle_collation = _get_column_collation("bhi_vehicle_list", "id_tag")
    except Exception as exc:
        logger.warning(
            "Unable to detect collation for bhi_vehicle_list.id_tag: %s", exc
        )
        vehicle_collation = None

    detected = [c for c in (session_collation, vehicle_collation) if c]
    if detected:
        _rfid_collation = detected[0]
        unique = {c for c in detected if c}
        if len(unique) > 1:
            logger.warning(
                "RFID column collations differ (%s); using %s for comparisons",
                ", ".join(sorted(unique)),
                _rfid_collation,
            )
        return _rfid_collation

    try:
        _rfid_collation = _get_connection_collation()
    except Exception as exc:
        logger.warning("Unable to detect connection collation: %s", exc)
        _rfid_collation = None
    return _rfid_collation


def _ensure_rfid_collate_clause():
    """Populate :data:`RFID_COLLATE_CLAUSE` when it is missing."""

    global RFID_COLLATE_CLAUSE

    if RFID_COLLATE_CLAUSE is not None:
        return RFID_COLLATE_CLAUSE

    collation = _refresh_rfid_collation()
    if collation:
        charset = _get_collation_charset(collation)
        if charset and DB_CHARSET and charset.lower() != DB_CHARSET.lower():
            logging.getLogger(__name__).warning(
                "RFID collation %s uses charset %s incompatible with connection charset %s;"
                " falling back to binary comparisons",
                collation,
                charset,
                DB_CHARSET,
            )
            RFID_COLLATE_CLAUSE = ""
            return RFID_COLLATE_CLAUSE

        RFID_COLLATE_CLAUSE = f" COLLATE {collation}"
        return RFID_COLLATE_CLAUSE

    logging.getLogger(__name__).error(
        "Unable to determine RFID collation clause; ensure the database is reachable "
        "and the relevant tables are present. Falling back to binary comparisons.",
    )
    RFID_COLLATE_CLAUSE = ""
    return RFID_COLLATE_CLAUSE


def _build_rfid_match_condition(lhs: str, rhs: str) -> str:
    """Return the SQL snippet used to match RFID values across tables."""

    if RFID_COLLATE_CLAUSE:
        return f"{lhs}{RFID_COLLATE_CLAUSE} = {rhs}{RFID_COLLATE_CLAUSE}"

    # When the database does not expose a compatible collation we fall back to a
    # binary comparison so MySQL stops trying to implicitly reconcile
    # incompatible collations (which triggered the production failure).
    return f"BINARY {lhs} = BINARY {rhs}"


def _refresh_chargepoint_collation():
    """Determine the collation used for chargepoint identifiers."""

    global _chargepoint_collation

    if _chargepoint_collation:
        return _chargepoint_collation

    try:
        session_collation = _get_column_collation(
            "op_charging_sessions", "station_id"
        )
    except Exception as exc:  # pragma: no cover - defensive fallback
        logging.getLogger(__name__).warning(
            "Unable to detect collation for op_charging_sessions.station_id: %s",
            exc,
        )
        session_collation = None

    try:
        chargepoint_collation = _get_column_collation(
            "bhi_chargepoint_list", "chargepoint_id"
        )
    except Exception as exc:  # pragma: no cover - defensive fallback
        logging.getLogger(__name__).warning(
            "Unable to detect collation for bhi_chargepoint_list.chargepoint_id: %s",
            exc,
        )
        chargepoint_collation = None

    detected = [c for c in (session_collation, chargepoint_collation) if c]
    if detected:
        unique = sorted(set(detected))
        if len(unique) == 1:
            _chargepoint_collation = unique[0]
            return _chargepoint_collation

        logging.getLogger(__name__).warning(
            "Chargepoint column collations differ (%s); falling back to binary comparisons",
            ", ".join(unique),
        )
        _chargepoint_collation = None
        return None

    try:
        _chargepoint_collation = _get_connection_collation()
    except Exception as exc:  # pragma: no cover - defensive fallback
        logging.getLogger(__name__).warning(
            "Unable to detect connection collation for chargepoint matching: %s",
            exc,
        )
        _chargepoint_collation = None

    return _chargepoint_collation


def _ensure_chargepoint_collate_clause():
    """Populate :data:`CHARGEPOINT_COLLATE_CLAUSE` when necessary."""

    global CHARGEPOINT_COLLATE_CLAUSE

    if CHARGEPOINT_COLLATE_CLAUSE:
        return CHARGEPOINT_COLLATE_CLAUSE

    collation = _refresh_chargepoint_collation()
    if collation:
        charset = _get_collation_charset(collation)
        if charset and DB_CHARSET and charset.lower() != DB_CHARSET.lower():
            logging.getLogger(__name__).warning(
                "Chargepoint collation %s uses charset %s incompatible with connection charset %s;"
                " falling back to binary comparisons",
                collation,
                charset,
                DB_CHARSET,
            )
            CHARGEPOINT_COLLATE_CLAUSE = ""
        else:
            CHARGEPOINT_COLLATE_CLAUSE = f" COLLATE {collation}"
    else:
        CHARGEPOINT_COLLATE_CLAUSE = ""

    return CHARGEPOINT_COLLATE_CLAUSE


def _build_chargepoint_match_condition(lhs: str, rhs: str) -> str:
    """Return the SQL snippet used to match chargepoint identifiers."""

    clause = _ensure_chargepoint_collate_clause()
    if clause:
        return f"{lhs}{clause} = {rhs}{clause}"

    return f"BINARY {lhs} = BINARY {rhs}"


def _build_session_validity_clause(alias: str = "s") -> str:
    """Return the SQL snippet used to ensure sessions are marked as valid."""

    return f"COALESCE({alias}.is_valid, 1) <> 0"


try:
    _ensure_rfid_collate_clause()
except RuntimeError:
    # The application can continue starting up; the helper will be retried when
    # queries requiring the clause run again.
    pass

def load_runtime_config():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute("SELECT config_key, config_value FROM op_config")
            rows = cur.fetchall()
    finally:
        conn.close()
    return {r["config_key"]: r["config_value"] for r in rows}

_runtime_cfg = load_runtime_config()
_proxy_ip = _runtime_cfg.get("ocpp_proxy_ip", "82.165.103.236")
_proxy_port = int(_runtime_cfg.get("ocpp_proxy_port", 7020))
PROXY_BASE_URL = f"http://{_proxy_ip}:{_proxy_port}"
CONNECTED_ENDPOINT = PROXY_BASE_URL + "/getConnecteEVSE"
STATS_ENDPOINT = PROXY_BASE_URL + "/connectedWallboxes"

BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_FOLDER = os.path.join(BASE_DIR, "html", "static")

app = Flask(
    __name__,
    static_url_path="/cf/static",
    static_folder=STATIC_FOLDER,
    template_folder=os.path.join(BASE_DIR, "templates"),
)

COOKIE_NAME = "auth_token"
PASSWORD_KEY = "dashboard_password"
TOKEN_KEY = "dashboard_token"
BASIC_AUTH_USERNAME = "charge"
BASIC_AUTH_PASSWORD = "athome"


DEMO_TEMPLATES = {
    "dashboard": "demo_bhi_dashboard.html",
    "degradation-tool": "demo_bhi_degradation_tool.html",
    "manual-odometer-entry": "demo_bhi_manual_odometer_entry.html",
    "manual-soh-entry": "demo_bhi_manual_soh_entry.html",
    "methodology": "demo_bhi_methodology.html",
}

VEHICLE_DATA_SOURCES = [
    "Wallbox only",
    "FleetLite",
    "Tesla Fleet API"
]
VEHICLE_DATA_SOURCE_SET = set(VEHICLE_DATA_SOURCES)

CHARGEPOINT_POWER_TYPES = [
    "AC",
    "DC",
]
CHARGEPOINT_POWER_TYPE_SET = set(CHARGEPOINT_POWER_TYPES)

CHARGEPOINT_INSTALL_LOCATIONS = [
    "indoor",
    "outdoor",
]
CHARGEPOINT_INSTALL_LOCATION_SET = set(CHARGEPOINT_INSTALL_LOCATIONS)

CHARGEPOINT_COUNTRY_CODES = [
    "DE",
    "FR",
    "IT",
    "NL",
]
CHARGEPOINT_COUNTRY_CODE_SET = set(CHARGEPOINT_COUNTRY_CODES)

CHARGEPOINT_BOOLEAN_OPTIONS = [
    "no",
    "yes",
]
CHARGEPOINT_BOOLEAN_OPTION_SET = set(CHARGEPOINT_BOOLEAN_OPTIONS)


SESSION_HIGHLIGHT_TABLE = "bhi_charging_session_highlights"
SESSION_CUSTOM_VALUES_TABLE = "bhi_session_custom_values"
OBD_MEASUREMENTS_TABLE = "bhi_obd_measurements"


def _detect_session_id_column_type():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT column_type
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'op_charging_sessions'
                  AND column_name = 'id'
                """
            )
            row = cur.fetchone()
            if row:
                for key, value in row.items():
                    if key.lower() == "column_type":
                        return value
    finally:
        conn.close()

    logging.getLogger(__name__).warning(
        "Could not determine type of op_charging_sessions.id; defaulting to BIGINT",
    )
    return "BIGINT"


def ensure_charging_session_highlights_table():
    """Ensure the highlight helper table exists with the expected columns."""

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT column_type
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'op_charging_sessions'
                  AND column_name = 'id'
                """
            )
            row = cur.fetchone()
            session_id_column_type = None
            if row:
                for key, value in row.items():
                    if key.lower() == "column_type":
                        session_id_column_type = value
                        break

            if not session_id_column_type:
                logging.getLogger(__name__).warning(
                    "Could not determine type of op_charging_sessions.id; defaulting to BIGINT"
                )
                session_id_column_type = "BIGINT"

            cur.execute(
                f"""
                CREATE TABLE IF NOT EXISTS {SESSION_HIGHLIGHT_TABLE} (
                    session_id {session_id_column_type} NOT NULL,
                    highlighted TINYINT(1) NOT NULL DEFAULT 0,
                    updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    PRIMARY KEY (session_id),
                    CONSTRAINT fk_{SESSION_HIGHLIGHT_TABLE}_session
                        FOREIGN KEY (session_id) REFERENCES op_charging_sessions (id)
                        ON DELETE CASCADE
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """
            )

            cur.execute(
                """
                SELECT column_name
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = %s
                  AND column_name = 'highlighted'
                """,
                (SESSION_HIGHLIGHT_TABLE,),
            )
            has_highlighted = cur.fetchone()
            if not has_highlighted:
                cur.execute(
                    f"ALTER TABLE {SESSION_HIGHLIGHT_TABLE} ADD COLUMN highlighted TINYINT(1) NOT NULL DEFAULT 0 AFTER session_id"
                )

            cur.execute(
                """
                SELECT column_name
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = %s
                  AND column_name = 'updated_at'
                """,
                (SESSION_HIGHLIGHT_TABLE,),
            )
            has_updated_at = cur.fetchone()
            if not has_updated_at:
                cur.execute(
                    f"""
                    ALTER TABLE {SESSION_HIGHLIGHT_TABLE}
                        ADD COLUMN updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
                        ON UPDATE CURRENT_TIMESTAMP
                """
                )

            cur.execute(
                """
                SELECT column_type
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = %s
                  AND column_name = 'session_id'
                """,
                (SESSION_HIGHLIGHT_TABLE,),
            )
            row = cur.fetchone()
            highlight_session_column_type = None
            if row:
                for key, value in row.items():
                    if key.lower() == "column_type":
                        highlight_session_column_type = value
                        break

            if highlight_session_column_type:
                normalize = lambda value: value.replace(" ", "").lower()
                if normalize(highlight_session_column_type) != normalize(session_id_column_type):
                    cur.execute(
                        f"ALTER TABLE {SESSION_HIGHLIGHT_TABLE} MODIFY COLUMN session_id {session_id_column_type} NOT NULL"
                    )

            constraint_name = f"fk_{SESSION_HIGHLIGHT_TABLE}_session"
            cur.execute(
                """
                SELECT constraint_name
                FROM information_schema.TABLE_CONSTRAINTS
                WHERE table_schema = DATABASE()
                  AND table_name = %s
                  AND constraint_name = %s
                  AND constraint_type = 'FOREIGN KEY'
                """,
                (SESSION_HIGHLIGHT_TABLE, constraint_name),
            )
            has_fk = cur.fetchone()
            if not has_fk:
                cur.execute(
                    f"""
                    ALTER TABLE {SESSION_HIGHLIGHT_TABLE}
                        ADD CONSTRAINT {constraint_name}
                            FOREIGN KEY (session_id) REFERENCES op_charging_sessions (id)
                            ON DELETE CASCADE
                    """
                )

        conn.commit()
    finally:
        conn.close()


def ensure_session_custom_values_table():
    session_id_column_type = _detect_session_id_column_type()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                CREATE TABLE IF NOT EXISTS {SESSION_CUSTOM_VALUES_TABLE} (
                    session_id {session_id_column_type} NOT NULL,
                    soc_analysis_start DECIMAL(5,2) DEFAULT NULL,
                    soc_analysis_stop DECIMAL(5,2) DEFAULT NULL,
                    updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
                        ON UPDATE CURRENT_TIMESTAMP,
                    PRIMARY KEY (session_id),
                    CONSTRAINT fk_{SESSION_CUSTOM_VALUES_TABLE}_session
                        FOREIGN KEY (session_id) REFERENCES op_charging_sessions (id)
                        ON DELETE CASCADE
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """
            )

            column_checks = {
                "soc_analysis_start": (
                    f"ALTER TABLE {SESSION_CUSTOM_VALUES_TABLE} ADD COLUMN soc_analysis_start DECIMAL(5,2) DEFAULT NULL AFTER session_id",
                ),
                "soc_analysis_stop": (
                    f"ALTER TABLE {SESSION_CUSTOM_VALUES_TABLE} ADD COLUMN soc_analysis_stop DECIMAL(5,2) DEFAULT NULL AFTER soc_analysis_start",
                ),
                "updated_at": (
                    f"""
                        ALTER TABLE {SESSION_CUSTOM_VALUES_TABLE}
                            ADD COLUMN updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
                            ON UPDATE CURRENT_TIMESTAMP
                    """,
                ),
            }

            for column_name, statements in column_checks.items():
                cur.execute(
                    """
                    SELECT column_name
                    FROM information_schema.COLUMNS
                    WHERE table_schema = DATABASE()
                      AND table_name = %s
                      AND column_name = %s
                    """,
                    (SESSION_CUSTOM_VALUES_TABLE, column_name),
                )
                if not cur.fetchone():
                    for statement in statements:
                        cur.execute(statement)

        conn.commit()
    finally:
        conn.close()


def _fetch_session_custom_values(session_id):
    ensure_session_custom_values_table()

    result = {
        "soc_analysis_start": None,
        "soc_analysis_stop": None,
    }

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT soc_analysis_start, soc_analysis_stop
                FROM {SESSION_CUSTOM_VALUES_TABLE}
                WHERE session_id = %s
                """,
                (session_id,),
            )
            row = cur.fetchone()
            if row:
                result["soc_analysis_start"] = _normalize_numeric_value(row.get("soc_analysis_start"))
                result["soc_analysis_stop"] = _normalize_numeric_value(row.get("soc_analysis_stop"))
    finally:
        conn.close()

    result["has_values"] = (
        result["soc_analysis_start"] is not None and result["soc_analysis_stop"] is not None
    )
    return result


def _save_session_custom_values(session_id, soc_start, soc_stop):
    ensure_session_custom_values_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                INSERT INTO {SESSION_CUSTOM_VALUES_TABLE} (session_id, soc_analysis_start, soc_analysis_stop)
                VALUES (%s, %s, %s)
                ON DUPLICATE KEY UPDATE
                    soc_analysis_start = VALUES(soc_analysis_start),
                    soc_analysis_stop = VALUES(soc_analysis_stop)
                """,
                (session_id, soc_start, soc_stop),
            )
        conn.commit()
    finally:
        conn.close()


def _delete_session_custom_values(session_id):
    ensure_session_custom_values_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"DELETE FROM {SESSION_CUSTOM_VALUES_TABLE} WHERE session_id = %s",
                (session_id,),
            )
        conn.commit()
    finally:
        conn.close()


def ensure_obd_measurements_table():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                CREATE TABLE IF NOT EXISTS {OBD_MEASUREMENTS_TABLE} (
                    transaction_id VARCHAR(191) NOT NULL,
                    csv_data LONGBLOB NOT NULL,
                    row_count INT NOT NULL,
                    updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
                        ON UPDATE CURRENT_TIMESTAMP,
                    PRIMARY KEY (transaction_id)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """
            )
        conn.commit()
    finally:
        conn.close()


def _fetch_obd_measurement(transaction_id):
    ensure_obd_measurements_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT transaction_id, row_count
                FROM {OBD_MEASUREMENTS_TABLE}
                WHERE transaction_id = %s
                """,
                (str(transaction_id),),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _fetch_obd_measurement_file(transaction_id):
    ensure_obd_measurements_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT csv_data, row_count
                FROM {OBD_MEASUREMENTS_TABLE}
                WHERE transaction_id = %s
                """,
                (str(transaction_id),),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _save_obd_measurement(transaction_id, csv_bytes, row_count):
    ensure_obd_measurements_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                INSERT INTO {OBD_MEASUREMENTS_TABLE} (transaction_id, csv_data, row_count)
                VALUES (%s, %s, %s)
                ON DUPLICATE KEY UPDATE
                    csv_data = VALUES(csv_data),
                    row_count = VALUES(row_count)
                """,
                (str(transaction_id), csv_bytes, row_count),
            )
        conn.commit()
    finally:
        conn.close()


def _delete_obd_measurement(transaction_id):
    ensure_obd_measurements_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"DELETE FROM {OBD_MEASUREMENTS_TABLE} WHERE transaction_id = %s",
                (str(transaction_id),),
            )
        conn.commit()
    finally:
        conn.close()


def _count_csv_rows(csv_bytes: bytes) -> int:
    if not csv_bytes:
        return 0

    try:
        text = csv_bytes.decode("utf-8-sig")
    except UnicodeDecodeError:
        text = csv_bytes.decode("utf-8", errors="ignore")

    reader = csv.reader(StringIO(text))
    count = 0
    for row in reader:
        if any((str(cell).strip() for cell in row)):
            count += 1

    return count


def ensure_vehicle_list_table():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS bhi_vehicle_list (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    name VARCHAR(255) NOT NULL,
                    manufacturer VARCHAR(255) NOT NULL,
                    model VARCHAR(255) NOT NULL,
                    vehicle_model_id INT NULL,
                    build_year SMALLINT NULL,
                    id_tag VARCHAR(255) NOT NULL,
                    autocharge_id VARCHAR(25) NULL,
                    battery_capacity_net_kwh DECIMAL(6,2) NULL,
                    battery_capacity_gross_kwh DECIMAL(6,2) NULL,
                    ac_charging_losses_percent DECIMAL(5,2) NOT NULL DEFAULT 8,
                    warmup_soc_percent DECIMAL(5,2) NOT NULL DEFAULT 3.00,
                    warmup_minutes DECIMAL(5,2) NOT NULL DEFAULT 20.00,
                    vin VARCHAR(64) NOT NULL,
                    license_plate VARCHAR(64) NULL,
                    data_source VARCHAR(64) NOT NULL,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """
            )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'license_plate'
                """
            )
            has_license_plate = cur.fetchone()
            if not has_license_plate:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN license_plate VARCHAR(64) NULL AFTER vin"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'id_tag'
                """
            )
            has_id_tag = cur.fetchone()
            if not has_id_tag:
                cur.execute(
                    """
                    SELECT COLUMN_NAME
                    FROM information_schema.COLUMNS
                    WHERE table_schema = DATABASE()
                      AND table_name = 'bhi_vehicle_list'
                      AND column_name = 'rfid_card'
                    """
                )
                has_rfid_card = cur.fetchone()
                if has_rfid_card:
                    cur.execute(
                        "ALTER TABLE bhi_vehicle_list CHANGE COLUMN rfid_card id_tag VARCHAR(255) NOT NULL AFTER model"
                    )
                else:
                    cur.execute(
                        "ALTER TABLE bhi_vehicle_list ADD COLUMN id_tag VARCHAR(255) NOT NULL AFTER model"
                    )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'vehicle_model_id'
                """
            )
            has_vehicle_model_id = cur.fetchone()
            if not has_vehicle_model_id:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN vehicle_model_id INT NULL AFTER model"
                )
            cur.execute(
                """
                SELECT 1
                FROM information_schema.STATISTICS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND index_name = 'idx_bhi_vehicle_list_vehicle_model_id'
                """
            )
            has_vehicle_model_index = cur.fetchone()
            if not has_vehicle_model_index:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD INDEX idx_bhi_vehicle_list_vehicle_model_id (vehicle_model_id)"
                )
            cur.execute(
                """
                SELECT COUNT(*) AS table_count
                FROM information_schema.TABLES
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_models'
                """
            )
            models_table = cur.fetchone() or {}
            models_table_exists = False
            for key, value in models_table.items():
                if key.lower() == 'table_count':
                    models_table_exists = bool(value)
                    break
                if isinstance(value, (int, float)):
                    models_table_exists = bool(value)
                    break
            if models_table_exists:
                cur.execute(
                    """
                    SELECT constraint_name
                    FROM information_schema.TABLE_CONSTRAINTS
                    WHERE table_schema = DATABASE()
                      AND table_name = 'bhi_vehicle_list'
                      AND constraint_name = 'fk_bhi_vehicle_list_vehicle_model'
                      AND constraint_type = 'FOREIGN KEY'
                    """
                )
                has_vehicle_model_fk = cur.fetchone()
                if not has_vehicle_model_fk:
                    cur.execute(
                        """
                        ALTER TABLE bhi_vehicle_list
                            ADD CONSTRAINT fk_bhi_vehicle_list_vehicle_model
                                FOREIGN KEY (vehicle_model_id)
                                REFERENCES bhi_vehicle_models (model_id)
                                ON DELETE SET NULL
                        """
                    )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'autocharge_id'
                """
            )
            has_autocharge_id = cur.fetchone()
            if not has_autocharge_id:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN autocharge_id VARCHAR(25) NULL AFTER id_tag"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'build_year'
                """
            )
            has_build_year = cur.fetchone()
            if not has_build_year:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN build_year SMALLINT NULL AFTER vehicle_model_id"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'battery_capacity_net_kwh'
                """
            )
            has_battery_capacity = cur.fetchone()
            if not has_battery_capacity:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN battery_capacity_net_kwh DECIMAL(6,2) NULL AFTER autocharge_id"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'battery_capacity_gross_kwh'
                """
            )
            has_battery_capacity_gross = cur.fetchone()
            if not has_battery_capacity_gross:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN battery_capacity_gross_kwh DECIMAL(6,2) NULL AFTER battery_capacity_net_kwh"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'ac_charging_losses_percent'
                """
            )
            has_ac_losses = cur.fetchone()
            if not has_ac_losses:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN ac_charging_losses_percent DECIMAL(5,2) NOT NULL DEFAULT 8 AFTER battery_capacity_gross_kwh"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'warmup_soc_percent'
                """
            )
            has_warmup_soc = cur.fetchone()
            if not has_warmup_soc:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN warmup_soc_percent DECIMAL(5,2) NOT NULL DEFAULT 3.00 AFTER ac_charging_losses_percent"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'warmup_minutes'
                """
            )
            has_warmup_minutes = cur.fetchone()
            if not has_warmup_minutes:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN warmup_minutes DECIMAL(5,2) NOT NULL DEFAULT 20.00 AFTER warmup_soc_percent"
                )
            cur.execute(
                """
                SELECT COLUMN_TYPE
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'data_source'
                """
            )
            data_source_column = cur.fetchone()

            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'charge_factor'
                """
            )
            has_charge_factor = cur.fetchone()
            if not has_charge_factor:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN charge_factor DECIMAL(5,2) NULL AFTER data_source"
                )
            cur.execute(
                """
                SELECT COLUMN_NAME
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_vehicle_list'
                  AND column_name = 'confidence'
                """
            )
            has_confidence = cur.fetchone()
            if not has_confidence:
                cur.execute(
                    "ALTER TABLE bhi_vehicle_list ADD COLUMN confidence DECIMAL(5,2) NULL AFTER charge_factor"
                )
        conn.commit()
    finally:
        conn.close()


ensure_vehicle_list_table()


def ensure_vehicle_catalog_tables():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS bhi_vehicle_manufacturers (
                    manufacturer_id INT NOT NULL AUTO_INCREMENT,
                    name VARCHAR(255) NOT NULL,
                    created_at TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
                    updated_at TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    PRIMARY KEY (manufacturer_id),
                    UNIQUE KEY uq_bhi_vehicle_manufacturers_name (name)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
                """
            )
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS bhi_vehicle_models (
                    model_id INT NOT NULL AUTO_INCREMENT,
                    manufacturer_id INT NOT NULL,
                    model_name VARCHAR(255) NOT NULL,
                    model_year_from SMALLINT NULL,
                    model_year_to SMALLINT NULL,
                    battery_capacity_gross_kwh DECIMAL(6,2) DEFAULT NULL,
                    battery_capacity_net_kwh DECIMAL(6,2) DEFAULT NULL,
                    ac_charging_losses_percent DECIMAL(5,2) NOT NULL DEFAULT 8.00,
                    warmup_soc_percent DECIMAL(5,2) NOT NULL DEFAULT 3.00,
                    warmup_minutes DECIMAL(5,2) NOT NULL DEFAULT 20.00,
                    created_at TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
                    updated_at TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    PRIMARY KEY (model_id),
                    CONSTRAINT fk_bhi_vehicle_models_manufacturer
                        FOREIGN KEY (manufacturer_id)
                        REFERENCES bhi_vehicle_manufacturers (manufacturer_id)
                        ON DELETE CASCADE
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
                """
            )
        conn.commit()
    finally:
        conn.close()


ensure_vehicle_catalog_tables()


def ensure_chargepoint_list_table():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS bhi_chargepoint_list (
                    id INT AUTO_INCREMENT PRIMARY KEY,
                    name VARCHAR(255) NOT NULL,
                    chargepoint_id VARCHAR(255) NOT NULL,
                    latitude DECIMAL(10, 7) NULL,
                    longitude DECIMAL(10, 7) NULL,
                    max_power_kw DECIMAL(10, 2) NULL,
                    power_type ENUM('AC','DC') NOT NULL,
                    installation_location ENUM('indoor','outdoor') NOT NULL DEFAULT 'indoor',
                    country_code CHAR(2) NOT NULL DEFAULT 'DE',
                    supports_autocharge ENUM('no','yes') NOT NULL DEFAULT 'no',
                    can_deliver_soc ENUM('no','yes') NOT NULL DEFAULT 'no',
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """
            )
            cur.execute(
                """
                SELECT COUNT(*) AS column_exists
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_chargepoint_list'
                  AND column_name = 'installation_location'
                """
            )
            if not cur.fetchone()["column_exists"]:
                cur.execute(
                    """
                    ALTER TABLE bhi_chargepoint_list
                    ADD COLUMN installation_location ENUM('indoor','outdoor')
                        NOT NULL DEFAULT 'indoor'
                        AFTER power_type
                    """
                )
            cur.execute(
                """
                SELECT COUNT(*) AS column_exists
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_chargepoint_list'
                  AND column_name = 'country_code'
                """
            )
            if not cur.fetchone()["column_exists"]:
                cur.execute(
                    """
                    ALTER TABLE bhi_chargepoint_list
                    ADD COLUMN country_code CHAR(2)
                        NOT NULL DEFAULT 'DE'
                        AFTER installation_location
                    """
                )
            cur.execute(
                """
                SELECT COUNT(*) AS column_exists
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_chargepoint_list'
                  AND column_name = 'supports_autocharge'
                """
            )
            if not cur.fetchone()["column_exists"]:
                cur.execute(
                    """
                    ALTER TABLE bhi_chargepoint_list
                    ADD COLUMN supports_autocharge ENUM('no','yes')
                        NOT NULL DEFAULT 'no'
                        AFTER country_code
                    """
                )
            cur.execute(
                """
                SELECT COUNT(*) AS column_exists
                FROM information_schema.COLUMNS
                WHERE table_schema = DATABASE()
                  AND table_name = 'bhi_chargepoint_list'
                  AND column_name = 'can_deliver_soc'
                """
            )
            if not cur.fetchone()["column_exists"]:
                cur.execute(
                    """
                    ALTER TABLE bhi_chargepoint_list
                    ADD COLUMN can_deliver_soc ENUM('no','yes')
                        NOT NULL DEFAULT 'no'
                        AFTER supports_autocharge
                    """
                )
        conn.commit()
    finally:
        conn.close()


ensure_chargepoint_list_table()


def ensure_timeseries_weather_table():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS bhi_timeseries_weather (
                    id BIGINT AUTO_INCREMENT PRIMARY KEY,
                    chargepoint_id VARCHAR(255) NOT NULL,
                    observation_time DATETIME NOT NULL,
                    raw_payload JSON NOT NULL,
                    temperature DECIMAL(6, 2) NULL,
                    cloud_cover DECIMAL(6, 2) NULL,
                    sunshine DECIMAL(6, 2) NULL,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    UNIQUE KEY uniq_chargepoint_observation (chargepoint_id, observation_time)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """,
            )
        conn.commit()
    finally:
        conn.close()


ensure_timeseries_weather_table()

# ---------------------------------------------------------------------------
# Weather polling (Bright Sky current_weather & Météo-France)
# ---------------------------------------------------------------------------

BRIGHT_SKY_API_URL = "https://api.brightsky.dev/current_weather"
METEO_FRANCE_API_URL = "https://api.open-meteo.com/v1/meteofrance"
WEATHER_POLL_INTERVAL = int(os.getenv("BHI_WEATHER_POLL_INTERVAL", 900))
WEATHER_STORAGE_INTERVAL_MINUTES = int(
    os.getenv("BHI_WEATHER_STORAGE_INTERVAL_MINUTES", 15)
)
logging.info(
    "Weather polling interval configured for %s seconds (%.1f minutes)",
    WEATHER_POLL_INTERVAL,
    WEATHER_POLL_INTERVAL / 60,
)
_weather_thread = None
_weather_thread_lock = threading.Lock()


def _fetch_chargepoints_with_coordinates(country_code):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT chargepoint_id, latitude, longitude
                FROM bhi_chargepoint_list
                WHERE latitude IS NOT NULL
                  AND longitude IS NOT NULL
                  AND country_code = %s
                """,
                (country_code,),
            )
            return cur.fetchall()
    finally:
        conn.close()


def _floor_datetime_to_interval(dt, minutes):
    if minutes <= 0:
        raise ValueError("Interval must be a positive integer")
    normalized = dt.replace(second=0, microsecond=0)
    minute_bucket = (normalized.minute // minutes) * minutes
    return normalized.replace(minute=minute_bucket)


def _prepare_observation_timestamp(cursor, chargepoint_id, observation_time):
    """Return the normalized observation timestamp for storage.

    Weather APIs sometimes return timestamps that do not align with our
    15-minute storage cadence or repeatedly send the same value. Normalize the
    provided timestamp, ensure we at least cover the current bucket (important
    after restarts), and advance in 15-minute steps relative to the most recent
    stored record for the given chargepoint.
    """

    now = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
    base_time = observation_time or now
    normalized = _floor_datetime_to_interval(
        base_time, WEATHER_STORAGE_INTERVAL_MINUTES
    )

    current_bucket = _floor_datetime_to_interval(now, WEATHER_STORAGE_INTERVAL_MINUTES)
    if normalized < current_bucket:
        normalized = current_bucket

    cursor.execute(
        """
        SELECT observation_time
        FROM bhi_timeseries_weather
        WHERE chargepoint_id = %s
        ORDER BY observation_time DESC
        LIMIT 1
        """,
        (chargepoint_id,)
    )
    last_row = cursor.fetchone()
    last_time = None
    if last_row and last_row.get("observation_time"):
        last_time = last_row["observation_time"].replace(second=0, microsecond=0)
        last_time = _floor_datetime_to_interval(
            last_time, WEATHER_STORAGE_INTERVAL_MINUTES
        )
        if normalized <= last_time:
            normalized = last_time + datetime.timedelta(
                minutes=WEATHER_STORAGE_INTERVAL_MINUTES
            )
            normalized = normalized.replace(second=0, microsecond=0)
    return normalized, last_time


def _store_weather_sample(chargepoint_id, observation_time, payload, temperature, cloud_cover, sunshine):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            normalized_observation, previous_observation = _prepare_observation_timestamp(
                cur, chargepoint_id, observation_time
            )
            now = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
            max_delta = datetime.timedelta(minutes=WEATHER_STORAGE_INTERVAL_MINUTES)
            if (
                normalized_observation < now - max_delta
                or normalized_observation > now + max_delta
            ):
                logging.info(
                    "Skipping weather sample for %s at %s; outside 15 minute window",
                    chargepoint_id,
                    normalized_observation,
                )
                return normalized_observation, previous_observation, False, False
            cur.execute(
                """
                INSERT INTO bhi_timeseries_weather (
                    chargepoint_id,
                    observation_time,
                    raw_payload,
                    temperature,
                    cloud_cover,
                    sunshine
                ) VALUES (%s, %s, %s, %s, %s, %s)
                ON DUPLICATE KEY UPDATE
                    raw_payload = VALUES(raw_payload),
                    temperature = VALUES(temperature),
                    cloud_cover = VALUES(cloud_cover),
                    sunshine = VALUES(sunshine),
                    updated_at = CURRENT_TIMESTAMP
                """,
                (
                    chargepoint_id,
                    normalized_observation,
                    json.dumps(payload, ensure_ascii=False),
                    temperature,
                    cloud_cover,
                    sunshine,
                ),
            )
            affected_rows = cur.rowcount
        conn.commit()
        inserted = affected_rows == 1
        updated = affected_rows > 1
        return normalized_observation, previous_observation, inserted, updated
    finally:
        conn.close()


def _parse_iso_datetime(value):
    if not value:
        return None
    try:
        if isinstance(value, (int, float)):
            return datetime.datetime.utcfromtimestamp(value)
        timestamp = str(value)
        if timestamp.endswith("Z"):
            timestamp = timestamp[:-1] + "+00:00"
        dt = datetime.datetime.fromisoformat(timestamp)
        if dt.tzinfo is not None:
            dt = dt.astimezone(datetime.timezone.utc).replace(tzinfo=None)
        return dt
    except ValueError:
        logging.exception("Failed to parse observation timestamp: %s", value)
        return None


def _select_sunshine(weather_payload):
    if weather_payload is None:
        return None
    for key in ("sunshine", "sunshine_60", "sunshine_30", "sunshine_10"):
        value = weather_payload.get(key)
        if value is not None:
            return value
    return None


def _poll_weather_once():
    logging.info("Starting weather data polling cycle")
    brightsky_stats = _poll_weather_brightsky()
    meteo_stats = _poll_weather_meteofrance()

    total_polled = brightsky_stats["polled"] + meteo_stats["polled"]
    total_inserted = brightsky_stats["inserted"] + meteo_stats["inserted"]
    total_updated = brightsky_stats["updated"] + meteo_stats["updated"]

    logging.info(
        "Weather polling cycle processed %s weather samples (%s inserted, %s updated)",
        total_polled,
        total_inserted,
        total_updated,
    )
    return total_polled, total_inserted, total_updated


def _poll_weather_brightsky():
    chargepoints = _fetch_chargepoints_with_coordinates("DE")
    if not chargepoints:
        return {"polled": 0, "inserted": 0, "updated": 0}

    polled = inserted = updated = 0
    for cp in chargepoints:
        cp_id = cp.get("chargepoint_id")
        lat = cp.get("latitude")
        lon = cp.get("longitude")
        if cp_id is None or lat is None or lon is None:
            continue

        try:
            logging.info(
                "Fetching Bright Sky weather data for chargepoint %s", cp_id
            )
            response = requests.get(
                BRIGHT_SKY_API_URL,
                params={"lat": float(lat), "lon": float(lon)},
                timeout=10,
            )
            response.raise_for_status()
        except Exception:
            logging.exception("Failed to fetch weather data for chargepoint %s", cp_id)
            continue

        try:
            payload = response.json()
        except ValueError:
            logging.exception("Invalid JSON payload for chargepoint %s", cp_id)
            continue

        weather_payload = payload.get("weather")
        if not weather_payload:
            logging.warning("No weather data returned for chargepoint %s", cp_id)
            continue

        observation_time = _parse_iso_datetime(weather_payload.get("timestamp"))
        if observation_time is None:
            logging.warning(
                "Skipping weather record for %s due to missing observation timestamp", cp_id
            )
            continue

        temperature = weather_payload.get("temperature")
        cloud_cover = weather_payload.get("cloud_cover")
        sunshine = _select_sunshine(weather_payload)

        try:
            polled += 1
            (
                normalized_observation,
                previous_observation,
                was_inserted,
                was_updated,
            ) = _store_weather_sample(
                cp_id,
                observation_time,
                payload,
                temperature,
                cloud_cover,
                sunshine,
            )
            if was_inserted:
                inserted += 1
                if previous_observation is None:
                    logging.info(
                        "Inserted initial weather sample for chargepoint %s at %s",
                        cp_id,
                        normalized_observation,
                    )
                else:
                    gap_minutes = max(
                        0,
                        int(
                            (normalized_observation - previous_observation).total_seconds()
                            / 60
                        ),
                    )
                    logging.info(
                        "Inserted weather sample for chargepoint %s at %s (gap %s minutes)",
                        cp_id,
                        normalized_observation,
                        gap_minutes,
                    )
            elif was_updated:
                updated += 1
        except Exception:
            logging.exception("Failed to store weather data for chargepoint %s", cp_id)

    return {"polled": polled, "inserted": inserted, "updated": updated}


def _extract_minutely_value(minutely_payload, target_time, key):
    if not minutely_payload:
        return None

    times = minutely_payload.get("time")
    values = minutely_payload.get(key)
    if not times or not values:
        return None

    try:
        idx = times.index(target_time)
    except ValueError:
        return None

    if idx >= len(values):
        return None

    return values[idx]


def _poll_weather_meteofrance():
    chargepoints = _fetch_chargepoints_with_coordinates("FR")
    if not chargepoints:
        return {"polled": 0, "inserted": 0, "updated": 0}

    polled = inserted = updated = 0
    for cp in chargepoints:
        cp_id = cp.get("chargepoint_id")
        lat = cp.get("latitude")
        lon = cp.get("longitude")
        if cp_id is None or lat is None or lon is None:
            continue

        try:
            logging.info(
                "Fetching Météo-France weather data for chargepoint %s", cp_id
            )
            response = requests.get(
                METEO_FRANCE_API_URL,
                params={
                    "latitude": float(lat),
                    "longitude": float(lon),
                    "current_weather": True,
                    "timezone": "UTC",
                    "minutely_15": "cloudcover,shortwave_radiation",
                },
                timeout=10,
            )
            response.raise_for_status()
        except Exception:
            logging.exception(
                "Failed to fetch Météo-France data for chargepoint %s", cp_id
            )
            continue

        try:
            payload = response.json()
        except ValueError:
            logging.exception(
                "Invalid JSON payload from Météo-France for chargepoint %s", cp_id
            )
            continue

        current_weather = payload.get("current_weather")
        if not current_weather:
            logging.warning(
                "No current weather data returned for chargepoint %s", cp_id
            )
            continue

        observation_time = _parse_iso_datetime(current_weather.get("time"))
        if observation_time is None:
            logging.warning(
                "Skipping Météo-France record for %s due to missing observation timestamp",
                cp_id,
            )
            continue

        observation_time_str = current_weather.get("time")
        minutely_payload = payload.get("minutely_15")
        cloud_cover = _extract_minutely_value(
            minutely_payload, observation_time_str, "cloudcover"
        )
        sunshine = _extract_minutely_value(
            minutely_payload, observation_time_str, "shortwave_radiation"
        )
        temperature = current_weather.get("temperature")

        try:
            polled += 1
            (
                normalized_observation,
                previous_observation,
                was_inserted,
                was_updated,
            ) = _store_weather_sample(
                cp_id,
                observation_time,
                payload,
                temperature,
                cloud_cover,
                sunshine,
            )
            if was_inserted:
                inserted += 1
                if previous_observation is None:
                    logging.info(
                        "Inserted initial Météo-France weather sample for chargepoint %s at %s",
                        cp_id,
                        normalized_observation,
                    )
                else:
                    gap_minutes = max(
                        0,
                        int(
                            (normalized_observation - previous_observation).total_seconds()
                            / 60
                        ),
                    )
                    logging.info(
                        "Inserted Météo-France weather sample for chargepoint %s at %s (gap %s minutes)",
                        cp_id,
                        normalized_observation,
                        gap_minutes,
                    )
            elif was_updated:
                updated += 1
        except Exception:
            logging.exception(
                "Failed to store Météo-France weather data for chargepoint %s", cp_id
            )

    return {"polled": polled, "inserted": inserted, "updated": updated}

def _weather_polling_loop():
    sleep_seconds = max(WEATHER_POLL_INTERVAL, 60)
    while True:
        logging.info(
            "Weather polling thread sleeping for %s seconds (%.1f minutes) before next cycle",
            sleep_seconds,
            sleep_seconds / 60,
        )
        time.sleep(sleep_seconds)
        try:
            _poll_weather_once()
        except Exception:
            logging.exception("Unexpected error while polling weather data")


def start_weather_polling():
    global _weather_thread
    with _weather_thread_lock:
        if _weather_thread and _weather_thread.is_alive():
            return
        try:
            logging.info("Running initial weather polling cycle at startup")
            _poll_weather_once()
        except Exception:
            logging.exception("Initial weather polling cycle failed")
        _weather_thread = threading.Thread(
            target=_weather_polling_loop,
            name="WeatherPollingThread",
            daemon=True,
        )
        _weather_thread.start()

# ---------------------------------------------------------------------------
# HTTPS configuration
# ---------------------------------------------------------------------------

_ssl_cfg = _config.get("ssl", {})
SSL_CERT_FILE = os.getenv("SSL_CERT_FILE", _ssl_cfg.get("cert"))
SSL_KEY_FILE = os.getenv("SSL_KEY_FILE", _ssl_cfg.get("key"))


def _build_ssl_context():
    """Create an SSL context if certificate and key files are provided."""

    if not (SSL_CERT_FILE and SSL_KEY_FILE):
        return None

    cert_path = Path(SSL_CERT_FILE)
    key_path = Path(SSL_KEY_FILE)

    missing = [str(p) for p in (cert_path, key_path) if not p.exists()]
    if missing:
        logging.error(
            "SSL configuration is enabled but the following files are missing: %s",
            ", ".join(missing),
        )
        return None

    context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
    try:
        context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
    except ssl.SSLError as exc:  # pragma: no cover - depends on external files
        logging.error("Could not load SSL certificate/key: %%s", exc)
        return None
    return context


def get_password():
    pw = get_config_value(PASSWORD_KEY)
    if pw is None:
        pw = "admin"
        set_config_value(PASSWORD_KEY, pw)
    return pw


def get_token():
    token = get_config_value(TOKEN_KEY)
    if not token:
        token = uuid.uuid4().hex
        set_config_value(TOKEN_KEY, token)
    return token


@app.before_request
def require_login():
    if request.path.startswith('/api/'):
        return
    if request.path.startswith(app.static_url_path) or request.path.startswith('/static'):
        return
    if request.endpoint in ('login', 'direct_login', 'robots_txt'):
        return
    auth = request.authorization
    if auth and auth.type and auth.type.lower() == "basic":
        if (
            (auth.username or "") == BASIC_AUTH_USERNAME
            and (auth.password or "") == BASIC_AUTH_PASSWORD
        ):
            return
    return Response(
        "Unauthorized",
        401,
        {"WWW-Authenticate": 'Basic realm="BHI Dashboard"'},
    )


# Flask 3 removed the before_first_request hook, but older Flask versions do not
# provide the newer before_serving hook. Register the background task using the
# best available hook so it works across different Flask releases.
if hasattr(app, "before_serving"):

    @app.before_serving
    def start_background_tasks():
        start_weather_polling()

elif hasattr(app, "before_first_request"):

    @app.before_first_request
    def start_background_tasks():
        # before_first_request is invoked once per process so this matches the
        # intent of before_serving on older Flask versions.
        start_weather_polling()

else:
    _background_tasks_started = False

    @app.before_request
    def start_background_tasks():
        global _background_tasks_started
        if not _background_tasks_started:
            start_weather_polling()
            _background_tasks_started = True


@app.get('/robots.txt')
def robots_txt():
    return Response('User-agent: *\nDisallow: /\n', mimetype='text/plain')

@app.route('/cf', methods=['GET', 'POST'])
def login():
    error = None
    if request.method == 'POST':
        username = request.form.get('username', '')
        password = request.form.get('password', '')
        if username == 'admin' and password == get_password():
            token = get_token()
            resp = redirect(url_for('dashboard'))
            resp.set_cookie(
                COOKIE_NAME,
                token,
                max_age=10 * 365 * 24 * 3600,
                httponly=True,
            )
            return resp
        error = 'Login fehlgeschlagen'
    return render_template('bhi_login.html', error=error)


@app.route('/cf/op_directlogin')
def direct_login():
    """Authenticate user via token provided in query string."""
    provided = request.args.get('token', '')
    token = get_token()
    if provided == token:
        resp = redirect(url_for('dashboard'))
        resp.set_cookie(
            COOKIE_NAME,
            token,
            max_age=10 * 365 * 24 * 3600,
            httponly=True,
        )
        return resp
    return redirect(url_for('login'))


@app.route('/cf/static/<path:filename>')
def serve_static(filename):
    """Serve static assets from the html/static directory."""
    return send_from_directory(STATIC_FOLDER, filename)


@app.route('/static/<path:filename>')
def serve_legacy_static(filename):
    """Serve static files for legacy links without the /cf prefix."""
    return send_from_directory(app.static_folder, filename)


@app.route('/logout')
def logout():
    """Remove authentication cookie and redirect to login page."""
    resp = redirect(url_for('login'))
    resp.delete_cookie(COOKIE_NAME)
    return resp


@app.route('/cf/demo/<slug>')
def render_demo(slug: str):
    """Render one of the BHI demo pages inside the dashboard frame."""

    template_name = DEMO_TEMPLATES.get(slug)
    if not template_name:
        abort(404)

    return render_template(template_name, aside='demoMenu')


def _fetch_all_vehicles():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT
                    v.vehicle_id,
                    v.name,
                    v.manufacturer,
                    v.model,
                    v.vehicle_model_id,
                    v.build_year,
                    v.id_tag,
                    v.autocharge_id,
                    v.battery_capacity_net_kwh,
                    v.battery_capacity_gross_kwh,
                    v.ac_charging_losses_percent,
                    v.warmup_soc_percent,
                    v.warmup_minutes,
                    v.vin,
                    v.license_plate,
                    v.data_source,
                    v.charge_factor,
                    v.confidence,
                    EXISTS (
                        SELECT 1
                        FROM bhi_oem_data AS d
                        WHERE d.vehicle_id = v.vehicle_id
                        LIMIT 1
                    ) AS has_oem_data
                FROM bhi_vehicle_list AS v
                ORDER BY v.name
                """
            )
            return cur.fetchall()
    finally:
        conn.close()


def _fetch_vehicle_select_options():
    vehicles = []
    for vehicle in _fetch_all_vehicles():
        name = (vehicle.get('name') or '').strip()
        model = (vehicle.get('model') or '').strip()

        label_parts = [part for part in (name, model) if part]
        if label_parts:
            display_label = ', '.join(label_parts)
        else:
            display_label = 'Unnamed vehicle'

        vehicles.append(
            {
                'vehicle_id': vehicle['vehicle_id'],
                'name': name or None,
                'model': model or None,
                'manufacturer': vehicle.get('manufacturer'),
                'license_plate': vehicle.get('license_plate'),
                'id_tag': vehicle.get('id_tag'),
                'display_label': display_label,
            }
        )

    return vehicles


def _get_vehicle(vehicle_id: int):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT vehicle_id, name, manufacturer, model, vehicle_model_id, build_year, id_tag, autocharge_id, battery_capacity_net_kwh, battery_capacity_gross_kwh, ac_charging_losses_percent, warmup_soc_percent, warmup_minutes, vin, license_plate, data_source, charge_factor, confidence
                FROM bhi_vehicle_list
                WHERE vehicle_id=%s
                """,
                (vehicle_id,),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _get_vehicle_by_vin(vin: str):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT vehicle_id, name, manufacturer, model, vin, license_plate
                FROM bhi_vehicle_list
                WHERE vin = %s
                LIMIT 1
                """,
                (vin,),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _build_vehicle_data_filters(
    vehicle_id: int,
    search_term: Optional[str] = None,
    month_start: Optional[datetime.date] = None,
    month_end: Optional[datetime.date] = None,
):
    clauses = ["vehicle_id = %s"]
    params: list = [vehicle_id]
    if search_term:
        clauses.append("LOWER(msg_key) LIKE %s")
        params.append(f"%{search_term.lower()}%")
    if month_start:
        clauses.append("ts >= %s")
        params.append(month_start)
    if month_end:
        clauses.append("ts < %s")
        params.append(month_end)
    return " AND ".join(clauses), params


def _fetch_vehicle_data_entries(
    vehicle_id: int,
    limit: Optional[int],
    offset: int,
    search_term: Optional[str] = None,
    month_start: Optional[datetime.date] = None,
    month_end: Optional[datetime.date] = None,
):
    where_clause, params = _build_vehicle_data_filters(
        vehicle_id, search_term, month_start, month_end
    )
    count_query = f"SELECT COUNT(*) AS total FROM bhi_oem_data WHERE {where_clause}"
    if limit is not None:
        data_query = f"""
            SELECT id, msg_key, msg_value, ts
            FROM bhi_oem_data
            WHERE {where_clause}
            ORDER BY ts DESC
            LIMIT %s OFFSET %s
        """
    else:
        data_query = f"""
            SELECT id, msg_key, msg_value, ts
            FROM bhi_oem_data
            WHERE {where_clause}
            ORDER BY ts DESC
        """

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(count_query, params)
            result = cur.fetchone() or {}
            total = result.get('total', 0) or 0

            if limit is not None and total and offset >= total:
                last_page_offset = max((math.ceil(total / limit) - 1) * limit, 0)
                offset = last_page_offset

            query_params = list(params)
            if limit is not None:
                query_params.extend([limit, offset])
            cur.execute(data_query, query_params)
            rows = cur.fetchall()
            return rows, total, offset
    finally:
        conn.close()


def _fetch_vehicle_data_daily_counts_for_month(
    vehicle_id: int,
    reference_date: Optional[datetime.date] = None,
):
    if reference_date is None:
        reference_date = datetime.date.today()

    try:
        month_start = reference_date.replace(day=1)
    except AttributeError:
        # Fallback in case a datetime is provided instead of a date
        month_start = reference_date.date().replace(day=1)  # type: ignore[union-attr]

    if month_start.month == 12:
        next_month = datetime.date(month_start.year + 1, 1, 1)
    else:
        next_month = datetime.date(month_start.year, month_start.month + 1, 1)

    conn = get_db_conn()
    rows = []
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT DATE(ts) AS day, COUNT(*) AS entry_count
                FROM bhi_oem_data
                WHERE vehicle_id = %s
                    AND ts >= %s
                    AND ts < %s
                GROUP BY day
                ORDER BY day
                """,
                (vehicle_id, month_start, next_month),
            )
            rows = cur.fetchall() or []
    finally:
        conn.close()

    counts_by_date: Dict[datetime.date, int] = {}
    for row in rows:
        day_value = row.get("day")
        if isinstance(day_value, datetime.datetime):
            day_value = day_value.date()
        elif isinstance(day_value, str):
            try:
                day_value = datetime.datetime.strptime(day_value, "%Y-%m-%d").date()
            except ValueError:
                day_value = None

        if day_value is None:
            continue

        try:
            counts_by_date[day_value] = int(row.get("entry_count", 0))
        except (TypeError, ValueError):
            counts_by_date[day_value] = 0

    return month_start, next_month, counts_by_date


def _fetch_vehicle_data_available_months(vehicle_id: int) -> List[datetime.date]:
    conn = get_db_conn()
    rows: List[Dict[str, Any]] = []
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT DATE_FORMAT(ts, '%%Y-%%m-01') AS month_start
                FROM bhi_oem_data
                WHERE vehicle_id = %s
                GROUP BY month_start
                ORDER BY month_start DESC
                """,
                (vehicle_id,),
            )
            rows = cur.fetchall() or []
    finally:
        conn.close()

    months: List[datetime.date] = []
    for row in rows:
        value = row.get("month_start")
        if isinstance(value, datetime.date):
            months.append(value)
            continue
        if isinstance(value, datetime.datetime):
            months.append(value.date())
            continue
        if isinstance(value, str):
            try:
                parsed = datetime.datetime.strptime(value, "%Y-%m-%d").date()
            except ValueError:
                continue
            else:
                months.append(parsed)

    return months


def _format_decimal_for_form(value):
    if value is None or value == "":
        return ""
    if not isinstance(value, Decimal):
        try:
            value = Decimal(str(value))
        except (InvalidOperation, ValueError):
            return str(value)
    normalized = value.normalize()
    formatted = format(normalized, 'f')
    return formatted


def _render_vehicle_list(errors=None, create_form_data=None, edit_vehicle=None):
    vehicles = _fetch_all_vehicles()
    base_form = {
        "name": "",
        "manufacturer": "",
        "model": "",
        "vehicle_model_id": "",
        "build_year": "",
        "autocharge_id": "",
        "battery_capacity_net_kwh": "",
        "battery_capacity_gross_kwh": "",
        "ac_charging_losses_percent": "8",
        "warmup_soc_percent": "3.0",
        "warmup_minutes": "20.0",
        "vin": "",
        "license_plate": "",
        "data_source": VEHICLE_DATA_SOURCES[0],
    }
    if create_form_data:
        base_form.update(create_form_data)
        capacity_value = base_form.pop('battery_capacity_net_kwh_value', None)
        if capacity_value is not None and not base_form.get('battery_capacity_net_kwh'):
            base_form['battery_capacity_net_kwh'] = _format_decimal_for_form(capacity_value)
        gross_capacity_value = base_form.pop('battery_capacity_gross_kwh_value', None)
        if gross_capacity_value is not None and not base_form.get('battery_capacity_gross_kwh'):
            base_form['battery_capacity_gross_kwh'] = _format_decimal_for_form(gross_capacity_value)
        losses_value = base_form.pop('ac_charging_losses_percent_value', None)
        if losses_value is not None and base_form.get('ac_charging_losses_percent') in (None, ''):
            base_form['ac_charging_losses_percent'] = _format_decimal_for_form(losses_value)
        warmup_soc_value = base_form.pop('warmup_soc_percent_value', None)
        if warmup_soc_value is not None and base_form.get('warmup_soc_percent') in (None, ''):
            base_form['warmup_soc_percent'] = _format_decimal_for_form(warmup_soc_value)
        warmup_minutes_value = base_form.pop('warmup_minutes_value', None)
        if warmup_minutes_value is not None and base_form.get('warmup_minutes') in (None, ''):
            base_form['warmup_minutes'] = _format_decimal_for_form(warmup_minutes_value)
        vehicle_model_id_value = base_form.pop('vehicle_model_id_value', None)
        if vehicle_model_id_value is not None and not base_form.get('vehicle_model_id'):
            base_form['vehicle_model_id'] = str(vehicle_model_id_value)
        build_year_value = base_form.pop('build_year_value', None)
        if build_year_value is not None and not base_form.get('build_year'):
            base_form['build_year'] = str(build_year_value)
    else:
        base_form.pop('battery_capacity_net_kwh_value', None)
        base_form.pop('battery_capacity_gross_kwh_value', None)
        base_form.pop('ac_charging_losses_percent_value', None)
        base_form.pop('warmup_soc_percent_value', None)
        base_form.pop('warmup_minutes_value', None)
        base_form.pop('vehicle_model_id_value', None)
        base_form.pop('build_year_value', None)
    if base_form.get('ac_charging_losses_percent') in (None, ''):
        base_form['ac_charging_losses_percent'] = '8'
    if base_form.get('warmup_soc_percent') in (None, ''):
        base_form['warmup_soc_percent'] = '3.0'
    if base_form.get('warmup_minutes') in (None, ''):
        base_form['warmup_minutes'] = '20.0'
    if edit_vehicle:
        capacity_value = edit_vehicle.pop('battery_capacity_net_kwh_value', None)
        if not edit_vehicle.get('battery_capacity_net_kwh') and capacity_value is not None:
            edit_vehicle['battery_capacity_net_kwh'] = _format_decimal_for_form(capacity_value)
        elif isinstance(edit_vehicle.get('battery_capacity_net_kwh'), Decimal):
            edit_vehicle['battery_capacity_net_kwh'] = _format_decimal_for_form(edit_vehicle['battery_capacity_net_kwh'])
        gross_capacity_value = edit_vehicle.pop('battery_capacity_gross_kwh_value', None)
        if not edit_vehicle.get('battery_capacity_gross_kwh') and gross_capacity_value is not None:
            edit_vehicle['battery_capacity_gross_kwh'] = _format_decimal_for_form(gross_capacity_value)
        elif isinstance(edit_vehicle.get('battery_capacity_gross_kwh'), Decimal):
            edit_vehicle['battery_capacity_gross_kwh'] = _format_decimal_for_form(edit_vehicle['battery_capacity_gross_kwh'])
        edit_vehicle.pop('ac_charging_losses_percent_value', None)
        percent_value = edit_vehicle.get('ac_charging_losses_percent')
        if isinstance(percent_value, Decimal):
            edit_vehicle['ac_charging_losses_percent'] = _format_decimal_for_form(percent_value)
        elif percent_value in (None, ''):
            edit_vehicle['ac_charging_losses_percent'] = '8'
        warmup_soc_value = edit_vehicle.pop('warmup_soc_percent_value', None)
        if not edit_vehicle.get('warmup_soc_percent') and warmup_soc_value is not None:
            edit_vehicle['warmup_soc_percent'] = _format_decimal_for_form(warmup_soc_value)
        elif isinstance(edit_vehicle.get('warmup_soc_percent'), Decimal):
            edit_vehicle['warmup_soc_percent'] = _format_decimal_for_form(edit_vehicle['warmup_soc_percent'])
        elif edit_vehicle.get('warmup_soc_percent') in (None, ''):
            edit_vehicle['warmup_soc_percent'] = '3.0'
        warmup_minutes_value = edit_vehicle.pop('warmup_minutes_value', None)
        if not edit_vehicle.get('warmup_minutes') and warmup_minutes_value is not None:
            edit_vehicle['warmup_minutes'] = _format_decimal_for_form(warmup_minutes_value)
        elif isinstance(edit_vehicle.get('warmup_minutes'), Decimal):
            edit_vehicle['warmup_minutes'] = _format_decimal_for_form(edit_vehicle['warmup_minutes'])
        elif edit_vehicle.get('warmup_minutes') in (None, ''):
            edit_vehicle['warmup_minutes'] = '20.0'
        vehicle_model_id_value = edit_vehicle.pop('vehicle_model_id_value', None)
        if not edit_vehicle.get('vehicle_model_id') and vehicle_model_id_value is not None:
            edit_vehicle['vehicle_model_id'] = str(vehicle_model_id_value)
        elif edit_vehicle.get('vehicle_model_id') not in (None, ''):
            edit_vehicle['vehicle_model_id'] = str(edit_vehicle['vehicle_model_id'])
        else:
            edit_vehicle['vehicle_model_id'] = ''
        build_year_value = edit_vehicle.pop('build_year_value', None)
        build_year_field = edit_vehicle.get('build_year')
        if not build_year_field and build_year_value is not None:
            edit_vehicle['build_year'] = str(build_year_value)
        elif isinstance(build_year_field, int):
            edit_vehicle['build_year'] = str(build_year_field)
        elif build_year_field is None:
            edit_vehicle['build_year'] = ''
        vin_field = edit_vehicle.get('vin')
        if isinstance(vin_field, str) and not vin_field.strip():
            edit_vehicle['vin'] = ''
    vehicle_model_options = _fetch_vehicle_model_options()
    return render_template(
        'bhi_vehicle_list.html',
        vehicles=vehicles,
        data_sources=VEHICLE_DATA_SOURCES,
        errors=errors or [],
        create_form=base_form,
        edit_vehicle=edit_vehicle,
        vehicle_model_options=vehicle_model_options,
        aside='vehicleMenu',
    )


def _format_decimal_for_display(value):
    formatted = _format_decimal_for_form(value)
    return formatted if formatted not in (None, "") else ""


def _fetch_vehicle_model_options():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT
                    vm.model_id,
                    vm.model_name,
                    vm.model_year_from,
                    vm.model_year_to,
                    vm.manufacturer_id,
                    m.name AS manufacturer_name
                FROM bhi_vehicle_models vm
                JOIN bhi_vehicle_manufacturers m ON vm.manufacturer_id = m.manufacturer_id
                ORDER BY m.name, vm.model_name, vm.model_year_from
                """
            )
            options = []
            for row in cur.fetchall():
                label = f"{row['manufacturer_name']} {row['model_name']}"
                year_from = row.get('model_year_from')
                year_to = row.get('model_year_to')
                year_text = ""
                if year_from is not None and year_to is not None:
                    if year_from == year_to:
                        year_text = str(year_from)
                    else:
                        year_text = f"{year_from}\u2013{year_to}"
                elif year_from is not None:
                    year_text = f"{year_from}\u2013"
                elif year_to is not None:
                    year_text = f"\u2013{year_to}"
                if year_text:
                    label = f"{label} ({year_text})"
                options.append(
                    {
                        'model_id': row['model_id'],
                        'model_id_str': str(row['model_id']),
                        'label': label,
                        'manufacturer_id': row['manufacturer_id'],
                        'manufacturer_name': row['manufacturer_name'],
                        'model_name': row['model_name'],
                    }
                )
            return options
    finally:
        conn.close()


def _fetch_all_manufacturers_with_models():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT manufacturer_id, name
                FROM bhi_vehicle_manufacturers
                ORDER BY name
                """
            )
            manufacturers = cur.fetchall()
            manufacturer_map = {
                row["manufacturer_id"]: {
                    "manufacturer_id": row["manufacturer_id"],
                    "manufacturer_id_str": str(row["manufacturer_id"]),
                    "name": row["name"],
                    "models": [],
                }
                for row in manufacturers
            }
            if manufacturer_map:
                cur.execute(
                    """
                    SELECT
                        model_id,
                        manufacturer_id,
                        model_name,
                        model_year_from,
                        model_year_to,
                        battery_capacity_gross_kwh,
                        battery_capacity_net_kwh,
                        ac_charging_losses_percent,
                        warmup_soc_percent,
                        warmup_minutes
                    FROM bhi_vehicle_models
                    ORDER BY model_name, model_year_from
                    """
                )
                for row in cur.fetchall():
                    manufacturer = manufacturer_map.get(row["manufacturer_id"])
                    if not manufacturer:
                        continue
                    model = dict(row)
                    model["manufacturer_id_str"] = manufacturer["manufacturer_id_str"]
                    for field in (
                        "battery_capacity_gross_kwh",
                        "battery_capacity_net_kwh",
                        "ac_charging_losses_percent",
                        "warmup_soc_percent",
                        "warmup_minutes",
                    ):
                        model[f"{field}_display"] = _format_decimal_for_display(model.get(field))
                        model[field] = model.get(field)
                    manufacturer["models"].append(model)
        return list(manufacturer_map.values())
    finally:
        conn.close()


def _get_manufacturer(manufacturer_id: int):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "SELECT manufacturer_id, name FROM bhi_vehicle_manufacturers WHERE manufacturer_id=%s",
                (manufacturer_id,),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _get_vehicle_model(model_id: int):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT
                    vm.model_id,
                    vm.manufacturer_id,
                    vm.model_name,
                    vm.model_year_from,
                    vm.model_year_to,
                    vm.battery_capacity_gross_kwh,
                    vm.battery_capacity_net_kwh,
                    vm.ac_charging_losses_percent,
                    vm.warmup_soc_percent,
                    vm.warmup_minutes,
                    m.name AS manufacturer_name
                FROM bhi_vehicle_models vm
                JOIN bhi_vehicle_manufacturers m ON vm.manufacturer_id = m.manufacturer_id
                WHERE vm.model_id=%s
                """,
                (model_id,),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _validate_manufacturer_form(form):
    name = (form.get("name") or "").strip()
    data = {"name": name}
    errors = []
    if not name:
        errors.append("Manufacturer name is required.")
    return data, errors


def _validate_vehicle_model_form(form, *, existing=None):
    errors = []
    manufacturer_raw = (form.get("manufacturer_id") or "").strip()
    model_name = (form.get("model_name") or "").strip()
    year_from_raw = (form.get("model_year_from") or "").strip()
    year_to_raw = (form.get("model_year_to") or "").strip()
    gross_raw = (form.get("battery_capacity_gross_kwh") or "").strip()
    net_raw = (form.get("battery_capacity_net_kwh") or "").strip()
    losses_raw = (form.get("ac_charging_losses_percent") or "").strip()
    warmup_soc_raw = (form.get("warmup_soc_percent") or "").strip()
    warmup_minutes_raw = (form.get("warmup_minutes") or "").strip()

    manufacturer_id = None
    if manufacturer_raw:
        try:
            manufacturer_id = int(manufacturer_raw)
        except ValueError:
            errors.append("Invalid manufacturer selection.")
    else:
        errors.append("Please select a manufacturer.")

    if not model_name:
        errors.append("Model name is required.")

    def parse_year(value, field_name):
        if value == "":
            return None
        try:
            return int(value)
        except ValueError:
            errors.append(f"{field_name} must be a whole number year.")
            return None

    year_from = parse_year(year_from_raw, "Model year (from)")
    year_to = parse_year(year_to_raw, "Model year (to)")

    def parse_decimal(value, field_name, *, minimum=None, maximum=None, required=False):
        if value == "":
            if required:
                errors.append(f"{field_name} is required.")
            return None
        try:
            decimal_value = Decimal(value)
        except (InvalidOperation, ValueError):
            errors.append(f"{field_name} must be a valid number.")
            return None
        if minimum is not None and decimal_value < Decimal(str(minimum)):
            errors.append(f"{field_name} must be at least {minimum}.")
        if maximum is not None and decimal_value > Decimal(str(maximum)):
            errors.append(f"{field_name} must be at most {maximum}.")
        return decimal_value

    battery_gross = parse_decimal(gross_raw, "Battery size (gross)", minimum=0)
    battery_net = parse_decimal(net_raw, "Battery size (net)", minimum=0)
    losses = parse_decimal(
        losses_raw,
        "Default AC charging losses",
        minimum=0,
        maximum=100,
        required=True,
    )
    warmup_soc = parse_decimal(
        warmup_soc_raw,
        "Warm-up SoC trim",
        minimum=0,
        maximum=100,
        required=True,
    )
    warmup_minutes = parse_decimal(
        warmup_minutes_raw,
        "Warm-up time trim",
        minimum=0,
        required=True,
    )

    data = {
        "manufacturer_id": manufacturer_id,
        "model_name": model_name,
        "model_year_from": year_from,
        "model_year_to": year_to,
        "battery_capacity_gross_kwh": battery_gross,
        "battery_capacity_net_kwh": battery_net,
        "ac_charging_losses_percent": losses,
        "warmup_soc_percent": warmup_soc,
        "warmup_minutes": warmup_minutes,
        "form_values": {
            "manufacturer_id": manufacturer_raw,
            "model_name": model_name,
            "model_year_from": year_from_raw,
            "model_year_to": year_to_raw,
            "battery_capacity_gross_kwh": gross_raw,
            "battery_capacity_net_kwh": net_raw,
            "ac_charging_losses_percent": losses_raw,
            "warmup_soc_percent": warmup_soc_raw,
            "warmup_minutes": warmup_minutes_raw,
        },
    }

    return data, errors


def _render_vehicle_manufacturers(
    *,
    manufacturer_errors=None,
    create_manufacturer_form=None,
    edit_manufacturer=None,
):
    manufacturers = _fetch_all_manufacturers_with_models()

    if create_manufacturer_form is None:
        create_manufacturer_form = {"name": ""}

    return render_template(
        "bhi_vehicle_manufacturers.html",
        manufacturers=manufacturers,
        manufacturer_errors=manufacturer_errors or [],
        create_manufacturer_form=create_manufacturer_form,
        edit_manufacturer=edit_manufacturer,
        aside="vehicleCatalog",
    )


def _render_vehicle_models(
    *,
    model_errors=None,
    create_model_form=None,
    edit_model=None,
    active_manufacturer_id=None,
):
    manufacturers = _fetch_all_manufacturers_with_models()
    for idx, manufacturer in enumerate(manufacturers):
        is_active = manufacturer["manufacturer_id"] == active_manufacturer_id
        is_first = active_manufacturer_id is None and idx == 0
        manufacturer["is_open"] = is_active or is_first

    if create_model_form is None:
        create_model_form = {
            "manufacturer_id": "",
            "model_name": "",
            "model_year_from": "",
            "model_year_to": "",
            "battery_capacity_gross_kwh": "",
            "battery_capacity_net_kwh": "",
            "ac_charging_losses_percent": "8.0",
            "warmup_soc_percent": "3.0",
            "warmup_minutes": "20.0",
        }
    if active_manufacturer_id and (create_model_form.get("manufacturer_id") in ("", None)):
        create_model_form["manufacturer_id"] = str(active_manufacturer_id)

    if edit_model:
        for key in (
            "battery_capacity_gross_kwh",
            "battery_capacity_net_kwh",
            "ac_charging_losses_percent",
            "warmup_soc_percent",
            "warmup_minutes",
        ):
            edit_model[key] = _format_decimal_for_form(edit_model.get(key))
        if edit_model.get("model_year_from") is not None:
            edit_model["model_year_from"] = str(edit_model["model_year_from"])
        if edit_model.get("model_year_to") is not None:
            edit_model["model_year_to"] = str(edit_model["model_year_to"])
        if edit_model.get("manufacturer_id") is not None:
            edit_model["manufacturer_id"] = str(edit_model["manufacturer_id"])

    return render_template(
        "bhi_vehicle_models.html",
        manufacturers=manufacturers,
        model_errors=model_errors or [],
        create_model_form=create_model_form,
        edit_model=edit_model,
        active_manufacturer_id=active_manufacturer_id,
        aside="vehicleCatalog",
    )


def _validate_vehicle_form(form):
    data = {
        "name": (form.get('name') or '').strip(),
        "manufacturer": (form.get('manufacturer') or '').strip(),
        "model": (form.get('model') or '').strip(),
        "vehicle_model_id": (form.get('vehicle_model_id') or '').strip(),
        "build_year": (form.get('build_year') or '').strip(),
        "autocharge_id": (form.get('autocharge_id') or '').strip(),
        "battery_capacity_net_kwh": (form.get('battery_capacity_net_kwh') or '').strip(),
        "battery_capacity_gross_kwh": (form.get('battery_capacity_gross_kwh') or '').strip(),
        "ac_charging_losses_percent": (form.get('ac_charging_losses_percent') or '').strip(),
        "warmup_soc_percent": (form.get('warmup_soc_percent') or '').strip(),
        "warmup_minutes": (form.get('warmup_minutes') or '').strip(),
        "vin": (form.get('vin') or '').strip(),
        "license_plate": (form.get('license_plate') or '').strip(),
        "data_source": (form.get('data_source') or '').strip(),
    }
    data["battery_capacity_net_kwh_value"] = None
    data["ac_charging_losses_percent_value"] = None
    data["vehicle_model_id_value"] = None
    data["build_year_value"] = None
    vin_raw = form.get('vin')
    vin_trimmed = (vin_raw or '').strip()
    data['vin'] = vin_trimmed
    data['vin_db_value'] = vin_trimmed if vin_trimmed else ' '
    errors = []
    for field, label in (
        ("name", "Name"),
    ):
        if not data[field]:
            errors.append(f"{label} is required.")
    vehicle_model_raw = data["vehicle_model_id"]
    if not vehicle_model_raw:
        errors.append("Vehicle model is required.")
    else:
        try:
            model_id_value = int(vehicle_model_raw)
        except (TypeError, ValueError):
            errors.append("Vehicle model selection is invalid.")
        else:
            model = _get_vehicle_model(model_id_value)
            if not model:
                errors.append("Selected vehicle model does not exist.")
            else:
                manufacturer_name = model.get('manufacturer_name')
                if not manufacturer_name:
                    manufacturer = _get_manufacturer(model.get('manufacturer_id')) if model.get('manufacturer_id') else None
                    manufacturer_name = manufacturer.get('name') if manufacturer else ''
                model_name = model.get('model_name', '')
                if not manufacturer_name:
                    errors.append("Selected vehicle model is missing a manufacturer.")
                else:
                    data['manufacturer'] = manufacturer_name
                if not model_name:
                    errors.append("Selected vehicle model is missing a model name.")
                else:
                    data['model'] = model_name
                data['vehicle_model_id_value'] = model_id_value
    if len(data["autocharge_id"]) > 25:
        errors.append("AutoCharge ID must be 25 characters or fewer.")
    build_year_raw = data.get('build_year')
    if build_year_raw:
        try:
            build_year_value = int(build_year_raw)
        except (TypeError, ValueError):
            errors.append("Build year must be a whole number.")
        else:
            if build_year_value < 1900 or build_year_value > 2100:
                errors.append("Build year must be between 1900 and 2100.")
            else:
                data['build_year_value'] = build_year_value
    raw_capacity = data["battery_capacity_net_kwh"]
    if raw_capacity:
        normalized_capacity = raw_capacity.replace(",", ".")
        try:
            capacity_value = Decimal(normalized_capacity)
        except (InvalidOperation, ValueError):
            errors.append("Battery capacity must be a number.")
        else:
            if capacity_value < 0:
                errors.append("Battery capacity cannot be negative.")
            else:
                data["battery_capacity_net_kwh_value"] = capacity_value
    raw_gross_capacity = data["battery_capacity_gross_kwh"]
    if raw_gross_capacity:
        normalized_gross_capacity = raw_gross_capacity.replace(",", ".")
        try:
            gross_capacity_value = Decimal(normalized_gross_capacity)
        except (InvalidOperation, ValueError):
            errors.append("Battery capacity (gross) must be a number.")
        else:
            if gross_capacity_value < 0:
                errors.append("Battery capacity (gross) cannot be negative.")
            else:
                data["battery_capacity_gross_kwh_value"] = gross_capacity_value
    raw_losses = data["ac_charging_losses_percent"]
    if not raw_losses:
        raw_losses = "8"
        data["ac_charging_losses_percent"] = "8"
    normalized_losses = raw_losses.replace(",", ".")
    try:
        losses_value = Decimal(normalized_losses)
    except (InvalidOperation, ValueError):
        errors.append("AC charging losses must be a number between 0 and 100.")
    else:
        if losses_value < 0 or losses_value > 100:
            errors.append("AC charging losses must be between 0 and 100.")
        else:
            data["ac_charging_losses_percent_value"] = losses_value
    raw_warmup_soc = data["warmup_soc_percent"] or "3.0"
    data["warmup_soc_percent"] = raw_warmup_soc
    normalized_warmup_soc = raw_warmup_soc.replace(",", ".")
    try:
        warmup_soc_value = Decimal(normalized_warmup_soc)
    except (InvalidOperation, ValueError):
        errors.append("Warm-up SoC trim must be a number.")
    else:
        if warmup_soc_value < 0:
            errors.append("Warm-up SoC trim cannot be negative.")
        else:
            data["warmup_soc_percent_value"] = warmup_soc_value
    raw_warmup_minutes = data["warmup_minutes"] or "20.0"
    data["warmup_minutes"] = raw_warmup_minutes
    normalized_warmup_minutes = raw_warmup_minutes.replace(",", ".")
    try:
        warmup_minutes_value = Decimal(normalized_warmup_minutes)
    except (InvalidOperation, ValueError):
        errors.append("Warm-up time trim must be a number.")
    else:
        if warmup_minutes_value < 0:
            errors.append("Warm-up time trim cannot be negative.")
        else:
            data["warmup_minutes_value"] = warmup_minutes_value
    if data["data_source"] not in VEHICLE_DATA_SOURCE_SET:
        errors.append("Please select a valid vehicle data source option.")
    return data, errors


@app.route('/cf/bhi_vehicle_list')
def bhi_vehicle_list():
    edit_id = request.args.get('edit_id')
    edit_vehicle = None
    if edit_id:
        try:
            edit_vehicle = _get_vehicle(int(edit_id))
        except (TypeError, ValueError):
            edit_vehicle = None
    return _render_vehicle_list(edit_vehicle=edit_vehicle)


def _parse_oem_soc_timestamp(raw: str) -> datetime.datetime:
    """Parse OEM timestamp strings with microsecond precision."""

    raw_text = str(raw)
    value = raw_text
    if raw_text.endswith("Z"):
        value = raw_text[:-1] + "+00:00"

    tz_sep = max(value.rfind("+"), value.rfind("-"))
    if tz_sep > 9:
        base = value[:tz_sep]
        tz = value[tz_sep:]
    else:
        base = value
        tz = ""

    if "." in base:
        prefix, frac = base.split(".", 1)
        frac = frac[:6].ljust(6, "0")
        base = f"{prefix}.{frac}"

    return datetime.datetime.fromisoformat(base + tz)


def _iter_oem_soc_entries(payload: Iterable[Dict[str, Any]]) -> Iterable[Dict[str, Any]]:
    """Yield SOC entries from an OEM JSON payload."""

    for item in payload:
        if isinstance(item, dict) and item.get("key") == "soc_hv_header":
            if "value" in item and "timestamp" in item:
                yield item
            else:
                logger.debug("Skipping incomplete OEM SOC entry: %s", item)


def _oem_soc_entry_exists(conn, vehicle_id: int, timestamp: datetime.datetime) -> bool:
    with conn.cursor() as cur:
        cur.execute(
            """
            SELECT 1
            FROM bhi_oem_data
            WHERE vehicle_id = %s AND msg_key = 'soc' AND ts = %s
            LIMIT 1
            """,
            (vehicle_id, timestamp),
        )
        return cur.fetchone() is not None


def _insert_oem_soc_entry(
    conn,
    vehicle_id: int,
    value: str,
    timestamp: datetime.datetime,
) -> None:
    with conn.cursor() as cur:
        cur.execute(
            """
            INSERT INTO bhi_oem_data (vehicle_id, msg_key, msg_value, ts)
            VALUES (%s, 'soc', %s, %s)
            """,
            (vehicle_id, value, timestamp),
        )


def _import_oem_soc_payload(vehicle_id: int, payload: Any) -> int:
    """Import SOC entries from a decoded OEM JSON payload."""

    if not isinstance(payload, list):
        logger.warning(
            "OEM upload payload for vehicle_id %s is not a list; skipping import.",
            vehicle_id,
        )
        return 0

    conn = get_db_conn()
    inserted = 0
    try:
        for entry in _iter_oem_soc_entries(payload):
            try:
                timestamp = _parse_oem_soc_timestamp(str(entry["timestamp"]))
            except (TypeError, ValueError) as exc:
                logger.warning(
                    "Skipping OEM SOC entry with invalid timestamp for vehicle_id %s: %s",
                    vehicle_id,
                    exc,
                )
                continue

            value = str(entry.get("value", ""))

            if _oem_soc_entry_exists(conn, vehicle_id, timestamp):
                continue

            _insert_oem_soc_entry(conn, vehicle_id, value, timestamp)
            inserted += 1

        conn.commit()
        return inserted
    except Exception:
        conn.rollback()
        raise
    finally:
        conn.close()


@app.route('/cf/bhi_vehicle_data/<int:vehicle_id>')
def bhi_vehicle_data_view(vehicle_id: int):
    vehicle = _get_vehicle(vehicle_id)
    if not vehicle:
        abort(404)

    upload_feedback = None
    upload_status = request.args.get('upload_status')
    inserted_raw = request.args.get('inserted')
    try:
        inserted_count = int(inserted_raw) if inserted_raw is not None else None
    except (TypeError, ValueError):
        inserted_count = None

    if upload_status == 'success':
        if inserted_count and inserted_count > 0:
            message = f"Upload successful. Inserted {inserted_count} new SOC record(s)."
        else:
            message = "Upload successful. No new SOC records were required."
        upload_feedback = {"category": "success", "message": message}
    elif upload_status == 'noop':
        upload_feedback = {
            "category": "info",
            "message": "No new SOC records were inserted; all timestamps already exist.",
        }
    elif upload_status == 'missing':
        upload_feedback = {
            "category": "warning",
            "message": "Please choose a JSON file before uploading.",
        }
    elif upload_status == 'invalid_type':
        upload_feedback = {
            "category": "warning",
            "message": "Only JSON files can be uploaded.",
        }
    elif upload_status == 'invalid_json':
        upload_feedback = {
            "category": "danger",
            "message": "The uploaded file could not be parsed as JSON.",
        }
    elif upload_status == 'error':
        upload_feedback = {
            "category": "danger",
            "message": "An unexpected error occurred while importing the JSON file.",
        }

    search_term_raw = request.args.get('q')
    search_term = (search_term_raw or '').strip()

    available_months = _fetch_vehicle_data_available_months(vehicle_id)
    month_param = (request.args.get('month') or '').strip()
    selected_month: Optional[datetime.date] = None
    if month_param:
        try:
            parsed_month = datetime.datetime.strptime(month_param, '%Y-%m').date()
        except ValueError:
            parsed_month = None
        if parsed_month is not None:
            selected_month = parsed_month.replace(day=1)

    if selected_month is None and available_months:
        selected_month = available_months[0]
    elif selected_month and selected_month not in available_months and available_months:
        selected_month = available_months[0]

    if selected_month and selected_month.month == 12:
        selected_month_end = datetime.date(selected_month.year + 1, 1, 1)
    elif selected_month:
        selected_month_end = datetime.date(selected_month.year, selected_month.month + 1, 1)
    else:
        selected_month_end = None

    month_options = [
        {
            'value': month.strftime('%Y-%m'),
            'label': month.strftime('%B %Y'),
        }
        for month in available_months
    ]
    selected_month_value = selected_month.strftime('%Y-%m') if selected_month else ''

    page_raw = request.args.get('page', '1')
    try:
        page = int(page_raw)
    except (TypeError, ValueError):
        page = 1
    if page < 1:
        page = 1

    per_page = 100
    offset = (page - 1) * per_page

    export_requested = (request.args.get('export') or '').lower() == 'csv'

    if export_requested:
        export_rows, _export_total, _ = _fetch_vehicle_data_entries(
            vehicle_id,
            limit=None,
            offset=0,
            search_term=search_term or None,
            month_start=selected_month,
            month_end=selected_month_end,
        )

        csv_buffer = StringIO()
        writer = csv.writer(csv_buffer)
        writer.writerow(['ID', 'Timestamp', 'Message Key', 'Message Value'])
        for row in export_rows:
            ts_value = row.get('ts')
            if hasattr(ts_value, 'strftime'):
                ts_display = ts_value.strftime('%Y-%m-%d %H:%M:%S')
            else:
                ts_display = str(ts_value)
            writer.writerow(
                [
                    row.get('id'),
                    ts_display,
                    row.get('msg_key'),
                    row.get('msg_value'),
                ]
            )

        filename_suffix = selected_month_value or datetime.datetime.utcnow().strftime('%Y-%m')
        csv_response = Response(csv_buffer.getvalue(), mimetype='text/csv')
        csv_response.headers['Content-Disposition'] = (
            f'attachment; filename="vehicle_{vehicle_id}_bhi_data_{filename_suffix}.csv"'
        )
        return csv_response

    records, total_count, effective_offset = _fetch_vehicle_data_entries(
        vehicle_id,
        per_page,
        offset,
        search_term or None,
        selected_month,
        selected_month_end,
    )

    if total_count:
        total_pages = max(1, math.ceil(total_count / per_page))
        page = effective_offset // per_page + 1
    else:
        total_pages = 1
        page = 1

    entries = []
    for record in records:
        ts_value = record.get('ts')
        if hasattr(ts_value, 'strftime'):
            ts_display = ts_value.strftime('%Y-%m-%d %H:%M:%S')
        else:
            ts_display = str(ts_value)

        entries.append(
            {
                'id': record.get('id'),
                'ts': ts_display,
                'msg_key': record.get('msg_key'),
                'msg_value': record.get('msg_value'),
            }
        )

    month_start, next_month, counts_by_date = _fetch_vehicle_data_daily_counts_for_month(
        vehicle_id, selected_month
    )
    try:
        days_in_month = (next_month - month_start).days
    except TypeError:
        days_in_month = 0

    chart_labels: List[str] = []
    chart_values: List[int] = []
    if days_in_month > 0:
        for day_offset in range(days_in_month):
            current_day = month_start + datetime.timedelta(days=day_offset)
            chart_labels.append(str(current_day.day))
            chart_values.append(counts_by_date.get(current_day, 0))

    if hasattr(month_start, 'strftime'):
        chart_month_label = month_start.strftime('%B %Y')
    else:
        chart_month_label = ''

    if not chart_month_label:
        chart_month_label = 'Current Month'

    chart_data = {
        'labels': chart_labels,
        'values': chart_values,
        'monthLabel': chart_month_label,
    }

    vin = vehicle.get('vin')

    return render_template(
        'bhi_vehicle_data.html',
        vehicle=vehicle,
        vehicle_id=vehicle_id,
        vin=vin,
        entries=entries,
        page=page,
        total_pages=total_pages,
        total_count=total_count,
        search_term=search_term,
        per_page=per_page,
        upload_feedback=upload_feedback,
        chart_data=chart_data,
        month_options=month_options,
        selected_month=selected_month_value,
        aside='vehicleMenu',
    )


@app.post('/cf/bhi_vehicle_data/<int:vehicle_id>/upload')
def bhi_vehicle_data_upload(vehicle_id: int):
    vehicle = _get_vehicle(vehicle_id)
    if not vehicle:
        abort(404)

    redirect_params: Dict[str, Any] = {}

    upload = request.files.get('oem_json')
    if upload is None or not upload.filename:
        redirect_params['upload_status'] = 'missing'
        return redirect(url_for('bhi_vehicle_data_view', vehicle_id=vehicle_id, **redirect_params))

    filename = upload.filename or ''
    if not filename.lower().endswith('.json'):
        redirect_params['upload_status'] = 'invalid_type'
        return redirect(url_for('bhi_vehicle_data_view', vehicle_id=vehicle_id, **redirect_params))

    file_bytes = upload.read()
    try:
        payload = json.loads(file_bytes.decode('utf-8'))
    except (UnicodeDecodeError, json.JSONDecodeError):
        redirect_params['upload_status'] = 'invalid_json'
        return redirect(url_for('bhi_vehicle_data_view', vehicle_id=vehicle_id, **redirect_params))

    try:
        inserted = _import_oem_soc_payload(vehicle_id, payload)
    except Exception:  # pragma: no cover - defensive logging for unexpected errors
        logger.exception("Failed to import OEM JSON upload for vehicle_id %s", vehicle_id)
        redirect_params['upload_status'] = 'error'
    else:
        if inserted:
            redirect_params['upload_status'] = 'success'
            redirect_params['inserted'] = inserted
        else:
            redirect_params['upload_status'] = 'noop'

    return redirect(url_for('bhi_vehicle_data_view', vehicle_id=vehicle_id, **redirect_params))


@app.route('/cf/bhi_vehicle_catalog', endpoint='bhi_vehicle_catalog')
@app.route('/cf/bhi_vehicle_models')
def bhi_vehicle_models():
    manufacturer_id_raw = request.args.get('manufacturer_id')
    model_id_raw = request.args.get('model_id')

    edit_model = None
    active_manufacturer_id = None

    if manufacturer_id_raw:
        try:
            active_manufacturer_id = int(manufacturer_id_raw)
            if not _get_manufacturer(active_manufacturer_id):
                active_manufacturer_id = None
        except ValueError:
            active_manufacturer_id = None

    if model_id_raw:
        try:
            model_id = int(model_id_raw)
        except ValueError:
            model_id = None
        if model_id is not None:
            edit_model = _get_vehicle_model(model_id)
            if edit_model:
                active_manufacturer_id = edit_model.get('manufacturer_id')

    return _render_vehicle_models(
        edit_model=edit_model,
        active_manufacturer_id=active_manufacturer_id,
    )


@app.route('/cf/bhi_vehicle_manufacturers')
def bhi_vehicle_manufacturers():
    manufacturer_id_raw = request.args.get('manufacturer_id')
    manufacturer = None

    if manufacturer_id_raw:
        try:
            manufacturer_id = int(manufacturer_id_raw)
        except ValueError:
            manufacturer_id = None
        if manufacturer_id is not None:
            manufacturer = _get_manufacturer(manufacturer_id)
            if manufacturer:
                manufacturer = dict(manufacturer)

    return _render_vehicle_manufacturers(edit_manufacturer=manufacturer)


@app.post('/cf/bhi_vehicle_catalog/manufacturer/create')
def bhi_vehicle_manufacturer_create():
    data, errors = _validate_manufacturer_form(request.form)
    if errors:
        return _render_vehicle_manufacturers(
            manufacturer_errors=errors,
            create_manufacturer_form=data,
        )

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            try:
                cur.execute(
                    "INSERT INTO bhi_vehicle_manufacturers (name) VALUES (%s)",
                    (data['name'],),
                )
                conn.commit()
            except pymysql.err.IntegrityError:
                conn.rollback()
                errors = ["A manufacturer with this name already exists."]
                return _render_vehicle_manufacturers(
                    manufacturer_errors=errors,
                    create_manufacturer_form=data,
                )
    finally:
        conn.close()

    return redirect(url_for('bhi_vehicle_manufacturers'))


@app.post('/cf/bhi_vehicle_catalog/manufacturer/<int:manufacturer_id>/update')
def bhi_vehicle_manufacturer_update(manufacturer_id: int):
    manufacturer = _get_manufacturer(manufacturer_id)
    if not manufacturer:
        abort(404)

    data, errors = _validate_manufacturer_form(request.form)
    manufacturer['name'] = data.get('name', manufacturer['name'])
    if errors:
        return _render_vehicle_manufacturers(
            manufacturer_errors=errors,
            edit_manufacturer=manufacturer,
        )

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            try:
                cur.execute(
                    "UPDATE bhi_vehicle_manufacturers SET name=%s WHERE manufacturer_id=%s",
                    (data['name'], manufacturer_id),
                )
                conn.commit()
            except pymysql.err.IntegrityError:
                conn.rollback()
                errors = ["A manufacturer with this name already exists."]
                return _render_vehicle_manufacturers(
                    manufacturer_errors=errors,
                    edit_manufacturer=manufacturer,
                )
    finally:
        conn.close()

    return redirect(url_for('bhi_vehicle_manufacturers', manufacturer_id=manufacturer_id))


@app.post('/cf/bhi_vehicle_catalog/manufacturer/<int:manufacturer_id>/delete')
def bhi_vehicle_manufacturer_delete(manufacturer_id: int):
    manufacturer = _get_manufacturer(manufacturer_id)
    if not manufacturer:
        abort(404)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "DELETE FROM bhi_vehicle_manufacturers WHERE manufacturer_id=%s",
                (manufacturer_id,),
            )
            conn.commit()
    finally:
        conn.close()

    return redirect(url_for('bhi_vehicle_manufacturers'))


@app.post('/cf/bhi_vehicle_catalog/model/create')
def bhi_vehicle_model_create():
    data, errors = _validate_vehicle_model_form(request.form)
    form_values = data.get('form_values', {})
    active_manufacturer_id = data.get('manufacturer_id')
    if errors:
        return _render_vehicle_models(
            model_errors=errors,
            create_model_form=form_values,
            active_manufacturer_id=active_manufacturer_id,
        )

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                INSERT INTO bhi_vehicle_models (
                    manufacturer_id,
                    model_name,
                    model_year_from,
                    model_year_to,
                    battery_capacity_gross_kwh,
                    battery_capacity_net_kwh,
                    ac_charging_losses_percent,
                    warmup_soc_percent,
                    warmup_minutes
                ) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
                """,
                (
                    data['manufacturer_id'],
                    data['model_name'],
                    data['model_year_from'],
                    data['model_year_to'],
                    data['battery_capacity_gross_kwh'],
                    data['battery_capacity_net_kwh'],
                    data['ac_charging_losses_percent'],
                    data['warmup_soc_percent'],
                    data['warmup_minutes'],
                ),
            )
            conn.commit()
    finally:
        conn.close()

    return redirect(url_for('bhi_vehicle_catalog', manufacturer_id=data['manufacturer_id']))


@app.post('/cf/bhi_vehicle_catalog/model/<int:model_id>/update')
def bhi_vehicle_model_update(model_id: int):
    existing = _get_vehicle_model(model_id)
    if not existing:
        abort(404)

    data, errors = _validate_vehicle_model_form(request.form, existing=existing)
    form_values = data.get('form_values', {})
    active_manufacturer_id = data.get('manufacturer_id') or existing.get('manufacturer_id')

    if errors:
        edit_model = dict(existing)
        edit_model.update(
            {
                'manufacturer_id': form_values.get('manufacturer_id', str(existing.get('manufacturer_id'))),
                'model_name': form_values.get('model_name', existing.get('model_name')),
                'model_year_from': form_values.get('model_year_from', existing.get('model_year_from')),
                'model_year_to': form_values.get('model_year_to', existing.get('model_year_to')),
                'battery_capacity_gross_kwh': form_values.get('battery_capacity_gross_kwh', existing.get('battery_capacity_gross_kwh')),
                'battery_capacity_net_kwh': form_values.get('battery_capacity_net_kwh', existing.get('battery_capacity_net_kwh')),
                'ac_charging_losses_percent': form_values.get('ac_charging_losses_percent', existing.get('ac_charging_losses_percent')),
                'warmup_soc_percent': form_values.get('warmup_soc_percent', existing.get('warmup_soc_percent')),
                'warmup_minutes': form_values.get('warmup_minutes', existing.get('warmup_minutes')),
            }
        )
        return _render_vehicle_models(
            model_errors=errors,
            edit_model=edit_model,
            active_manufacturer_id=active_manufacturer_id,
        )

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                UPDATE bhi_vehicle_models
                SET
                    manufacturer_id=%s,
                    model_name=%s,
                    model_year_from=%s,
                    model_year_to=%s,
                    battery_capacity_gross_kwh=%s,
                    battery_capacity_net_kwh=%s,
                    ac_charging_losses_percent=%s,
                    warmup_soc_percent=%s,
                    warmup_minutes=%s
                WHERE model_id=%s
                """,
                (
                    data['manufacturer_id'],
                    data['model_name'],
                    data['model_year_from'],
                    data['model_year_to'],
                    data['battery_capacity_gross_kwh'],
                    data['battery_capacity_net_kwh'],
                    data['ac_charging_losses_percent'],
                    data['warmup_soc_percent'],
                    data['warmup_minutes'],
                    model_id,
                ),
            )
            conn.commit()
    finally:
        conn.close()

    return redirect(url_for('bhi_vehicle_catalog', manufacturer_id=data['manufacturer_id']))


@app.post('/cf/bhi_vehicle_catalog/model/<int:model_id>/delete')
def bhi_vehicle_model_delete(model_id: int):
    existing = _get_vehicle_model(model_id)
    if not existing:
        abort(404)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "DELETE FROM bhi_vehicle_models WHERE model_id=%s",
                (model_id,),
            )
            conn.commit()
    finally:
        conn.close()

    return redirect(url_for('bhi_vehicle_catalog', manufacturer_id=existing.get('manufacturer_id')))


@app.post('/cf/bhi_vehicle_list/create')
def bhi_vehicle_create():
    data, errors = _validate_vehicle_form(request.form)
    if errors:
        return _render_vehicle_list(errors=errors, create_form_data=data)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                INSERT INTO bhi_vehicle_list
                (name, manufacturer, model, vehicle_model_id, build_year, autocharge_id, battery_capacity_net_kwh, battery_capacity_gross_kwh, ac_charging_losses_percent, warmup_soc_percent, warmup_minutes, vin, license_plate, data_source)
                VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """,
                (
                    data['name'],
                    data['manufacturer'],
                    data['model'],
                    data['vehicle_model_id_value'],
                    data.get('build_year_value'),
                    data['autocharge_id'] or None,
                    data['battery_capacity_net_kwh_value'],
                    data.get('battery_capacity_gross_kwh_value'),
                    data['ac_charging_losses_percent_value'],
                    data['warmup_soc_percent_value'],
                    data['warmup_minutes_value'],
                    data['vin_db_value'],
                    data['license_plate'] or None,
                    data['data_source'],
                ),
            )
        conn.commit()
    finally:
        conn.close()
    return redirect(url_for('bhi_vehicle_list'))


@app.post('/cf/bhi_vehicle_list/update/<int:vehicle_id>')
def bhi_vehicle_update(vehicle_id):
    existing = _get_vehicle(vehicle_id)
    if not existing:
        abort(404)

    data, errors = _validate_vehicle_form(request.form)
    if errors:
        existing.update(data)
        return _render_vehicle_list(errors=errors, edit_vehicle=existing)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                UPDATE bhi_vehicle_list
                SET name=%s, manufacturer=%s, model=%s, vehicle_model_id=%s, build_year=%s, autocharge_id=%s, battery_capacity_net_kwh=%s, battery_capacity_gross_kwh=%s, ac_charging_losses_percent=%s, warmup_soc_percent=%s, warmup_minutes=%s, vin=%s, license_plate=%s, data_source=%s
                WHERE vehicle_id=%s
                """,
                (
                    data['name'],
                    data['manufacturer'],
                    data['model'],
                    data['vehicle_model_id_value'],
                    data.get('build_year_value'),
                    data['autocharge_id'] or None,
                    data['battery_capacity_net_kwh_value'],
                    data.get('battery_capacity_gross_kwh_value'),
                    data['ac_charging_losses_percent_value'],
                    data['warmup_soc_percent_value'],
                    data['warmup_minutes_value'],
                    data['vin_db_value'],
                    data['license_plate'] or None,
                    data['data_source'],
                    vehicle_id,
                ),
            )
        conn.commit()
    finally:
        conn.close()
    return redirect(url_for('bhi_vehicle_list'))


def _count_chargepoints() -> int:
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "SELECT COUNT(*) AS chargepoint_count FROM bhi_chargepoint_list"
            )
            row = cur.fetchone() or {}
            count = row.get('chargepoint_count')
    finally:
        conn.close()

    try:
        return int(count)
    except (TypeError, ValueError):
        return 0


def _normalize_chargepoint_identifier(value):
    """Return a normalized identifier used to match chargepoints."""

    if value is None:
        return None

    if isinstance(value, (bytes, bytearray)):
        try:
            value = value.decode("utf-8", "ignore")
        except Exception:  # pragma: no cover - best effort decoding
            value = bytes(value)

    normalized = normalize_station_id(str(value))
    if not normalized:
        return None

    return normalized.lower()


def _format_duration_seconds(value):
    """Format a duration value expressed in seconds."""

    try:
        seconds = int(value)
    except (TypeError, ValueError):
        return None

    if seconds < 0:
        seconds = 0

    return str(datetime.timedelta(seconds=seconds))


def _fetch_connected_evse_entries():
    """Return the list of connected EVSE entries from the proxy endpoint."""

    try:
        response = requests.get(CONNECTED_ENDPOINT, timeout=5)
        response.raise_for_status()
    except requests.RequestException as exc:
        logger.warning("Unable to fetch connected EVSE data: %s", exc)
        return [], "Unable to retrieve live chargepoint data."

    try:
        payload = response.json()
    except ValueError as exc:
        logger.warning("Invalid JSON from connected EVSE endpoint: %s", exc)
        return [], "Received invalid data from the live chargepoint endpoint."

    if isinstance(payload, dict):
        entries = payload.get("connected") or payload.get("data") or payload.get("items")
    else:
        entries = payload

    if not isinstance(entries, list):
        logger.warning(
            "Connected EVSE endpoint returned unexpected payload type: %s",
            type(entries).__name__,
        )
        return [], "Connected EVSE endpoint returned unexpected payload."

    filtered = [entry for entry in entries if isinstance(entry, dict)]

    return filtered, None


def _count_recent_unassigned_sessions(days: int = 3) -> int:
    """Return the number of unassigned sessions within the last *days* days."""

    chargepoint_match_clause = _build_chargepoint_match_condition(
        "s.station_id", "c.chargepoint_id"
    )
    valid_clause = _build_session_validity_clause("s")

    threshold = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=days)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT COUNT(*) AS unassigned_count
                FROM op_charging_sessions AS s
                INNER JOIN bhi_chargepoint_list AS c
                    ON {chargepoint_match_clause}
                WHERE COALESCE(s.session_end, s.session_start) >= %s
                    AND s.vehicle_id IS NULL
                    AND {valid_clause}
                """,
                (threshold,),
            )
            row = cur.fetchone() or {}
            count = row.get("unassigned_count")
    finally:
        conn.close()

    try:
        return int(count)
    except (TypeError, ValueError):
        return 0


def _fetch_all_chargepoints():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT id, name, chargepoint_id, latitude, longitude, max_power_kw, power_type,
                       installation_location, country_code, supports_autocharge, can_deliver_soc
                FROM bhi_chargepoint_list
                ORDER BY name
                """
            )
            return cur.fetchall()
    finally:
        conn.close()


def _get_chargepoint(chargepoint_id: int):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT id, name, chargepoint_id, latitude, longitude, max_power_kw, power_type,
                       installation_location, country_code, supports_autocharge, can_deliver_soc
                FROM bhi_chargepoint_list
                WHERE id=%s
                """,
                (chargepoint_id,),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _fetch_chargepoint_by_identifier(identifier: str):
    if not identifier:
        return None

    clause = _ensure_chargepoint_collate_clause()
    params = [identifier]
    if clause:
        query = f"""
            SELECT id, name, chargepoint_id, latitude, longitude, max_power_kw, power_type,
                   installation_location, country_code, supports_autocharge, can_deliver_soc
            FROM bhi_chargepoint_list
            WHERE chargepoint_id{clause} = %s{clause}
            LIMIT 1
        """
    else:
        query = """
            SELECT id, name, chargepoint_id, latitude, longitude, max_power_kw, power_type,
                   installation_location, country_code, supports_autocharge, can_deliver_soc
            FROM bhi_chargepoint_list
            WHERE BINARY chargepoint_id = BINARY %s
            LIMIT 1
        """

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(query, params)
            return cur.fetchone()
    finally:
        conn.close()


def _fetch_chargepoint_weather_history(chargepoint, hours: int = 48):
    """Fetch recent weather samples for a chargepoint."""

    if not chargepoint:
        return []

    identifier = chargepoint.get("chargepoint_id")
    if not identifier:
        return []

    since = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT observation_time, temperature, cloud_cover
                FROM bhi_timeseries_weather
                WHERE chargepoint_id=%s AND observation_time >= %s
                ORDER BY observation_time
                """,
                (identifier, since),
            )
            return cur.fetchall()
    finally:
        conn.close()


def _simulate_solar_generation(observation_time, cloud_cover):
    """Return a simulated solar production value between 0 and 1."""

    if not isinstance(observation_time, datetime.datetime):
        return 0.0

    hour = observation_time.hour + observation_time.minute / 60.0
    sunrise = 6.0
    sunset = 20.0

    if hour < sunrise or hour > sunset:
        base = 0.0
    else:
        midpoint = (sunrise + sunset) / 2.0
        sigma = (sunset - sunrise) / 6.0 or 1.0
        base = math.exp(-0.5 * ((hour - midpoint) / sigma) ** 2)

    if cloud_cover is None:
        attenuation = 1.0
    else:
        try:
            cover = float(cloud_cover)
        except (TypeError, ValueError):
            cover = 0.0
        cover = max(0.0, min(100.0, cover))
        attenuation = 1.0 - cover / 100.0

    return round(base * attenuation, 4)


def _render_chargepoint_list(errors=None, create_form_data=None, edit_chargepoint=None):
    chargepoints = _fetch_all_chargepoints()
    base_form = {
        "name": "",
        "chargepoint_id": "",
        "latitude": "",
        "longitude": "",
        "max_power_kw": "",
        "power_type": CHARGEPOINT_POWER_TYPES[0],
        "installation_location": CHARGEPOINT_INSTALL_LOCATIONS[0],
        "country_code": CHARGEPOINT_COUNTRY_CODES[0],
        "supports_autocharge": CHARGEPOINT_BOOLEAN_OPTIONS[0],
        "can_deliver_soc": CHARGEPOINT_BOOLEAN_OPTIONS[0],
    }
    if create_form_data:
        base_form.update(create_form_data)
    return render_template(
        'bhi_chargepoint_list.html',
        chargepoints=chargepoints,
        power_types=CHARGEPOINT_POWER_TYPES,
        installation_locations=CHARGEPOINT_INSTALL_LOCATIONS,
        country_codes=CHARGEPOINT_COUNTRY_CODES,
        boolean_options=CHARGEPOINT_BOOLEAN_OPTIONS,
        errors=errors or [],
        create_form=base_form,
        edit_chargepoint=edit_chargepoint,
        aside='vehicleMenu',
    )


def _compute_infrastructure_overview(include_chargepoint_rows: bool = True):
    """Build the infrastructure overview summary and optional chargepoint rows."""

    connected_entries, fetch_error = _fetch_connected_evse_entries()
    registered_chargepoints = _fetch_all_chargepoints()

    identifier_map = {}
    for chargepoint in registered_chargepoints:
        normalized = _normalize_chargepoint_identifier(chargepoint.get("chargepoint_id"))
        if not normalized:
            continue
        identifier_map.setdefault(normalized, chargepoint)

    connected_map = {}

    for entry in connected_entries:
        station_identifier = (
            entry.get("station_id")
            or entry.get("stationId")
            or entry.get("stationID")
            or entry.get("station")
        )

        normalized = _normalize_chargepoint_identifier(station_identifier)
        if not normalized:
            continue

        chargepoint = identifier_map.get(normalized)
        if not chargepoint:
            continue

        duration_text = _format_duration_seconds(entry.get("duration_seconds"))

        try:
            reconnects = int(entry.get("reconnectCounter"))
        except (TypeError, ValueError):
            reconnects = None

        try:
            connection_quality = float(entry.get("connectionQuality"))
        except (TypeError, ValueError):
            connection_quality = None

        if connection_quality is not None:
            clamped_quality = max(0.0, min(100.0, connection_quality))
        else:
            clamped_quality = None

        connected_map[normalized] = {
            "duration": duration_text,
            "reconnects": reconnects,
            "connection_quality": connection_quality,
            "connection_quality_clamped": clamped_quality,
        }

    quality_values = [
        details.get("connection_quality")
        for details in connected_map.values()
        if details.get("connection_quality") is not None
    ]

    if quality_values:
        infrastructure_quality_index = sum(quality_values) / len(quality_values)
    else:
        infrastructure_quality_index = None

    registered_count = len(registered_chargepoints)
    connected_count = len(connected_map)

    if registered_count:
        availability = (connected_count / registered_count) * 100
    else:
        availability = None

    chargepoint_rows = []
    if include_chargepoint_rows:
        for chargepoint in registered_chargepoints:
            normalized = _normalize_chargepoint_identifier(chargepoint.get("chargepoint_id"))

            max_power_kw = chargepoint.get("max_power_kw")
            if isinstance(max_power_kw, Decimal):
                max_power_kw = float(max_power_kw)

            base_row = {
                "name": chargepoint.get("name"),
                "chargepoint_id": chargepoint.get("chargepoint_id"),
                "installation_location": chargepoint.get("installation_location"),
                "power_type": chargepoint.get("power_type"),
                "max_power_kw": max_power_kw,
                "duration": None,
                "reconnects": None,
                "connection_quality": None,
                "connection_quality_clamped": None,
                "status": "Disconnected",
            }

            if normalized and normalized in connected_map:
                details = connected_map[normalized]
                base_row.update(
                    {
                        "duration": details.get("duration"),
                        "reconnects": details.get("reconnects"),
                        "connection_quality": details.get("connection_quality"),
                        "connection_quality_clamped": details.get("connection_quality_clamped"),
                        "status": "Connected",
                    }
                )

            chargepoint_rows.append(base_row)

        chargepoint_rows.sort(
            key=lambda item: (
                (item.get("name") or "").lower(),
                item.get("chargepoint_id") or "",
            )
        )

    summary = {
        "connected_chargepoints": connected_count,
        "registered_chargepoints": registered_count,
        "availability": availability,
        "infrastructure_quality_index": infrastructure_quality_index,
    }

    return summary, chargepoint_rows, fetch_error


@app.route('/cf/bhi_charge_factor_explained')
def bhi_charge_factor_explained():
    return render_template(
        'bhi_charge_factor_explained.html',
        aside='vehicleMenu',
    )


@app.route('/cf/bhi_charginginfrastucture_qi')
def bhi_charging_infrastructure_qi():
    summary, chargepoint_rows, fetch_error = _compute_infrastructure_overview()

    return render_template(
        'bhi_charginginfrastructure_qi.html',
        summary=summary,
        chargepoints=chargepoint_rows,
        fetch_error=fetch_error,
        aside='vehicleMenu',
    )


def _validate_chargepoint_form(form):
    data = {
        "name": (form.get('name') or '').strip(),
        "chargepoint_id": (form.get('chargepoint_id') or '').strip(),
        "latitude": (form.get('latitude') or '').strip(),
        "longitude": (form.get('longitude') or '').strip(),
        "max_power_kw": (form.get('max_power_kw') or '').strip(),
        "power_type": (form.get('power_type') or '').strip(),
        "installation_location": (form.get('installation_location') or CHARGEPOINT_INSTALL_LOCATIONS[0]).strip().lower(),
        "country_code": (form.get('country_code') or CHARGEPOINT_COUNTRY_CODES[0]).strip().upper(),
        "supports_autocharge": (form.get('supports_autocharge') or CHARGEPOINT_BOOLEAN_OPTIONS[0]).strip().lower(),
        "can_deliver_soc": (form.get('can_deliver_soc') or CHARGEPOINT_BOOLEAN_OPTIONS[0]).strip().lower(),
    }
    errors = []
    parsed = {
        "name": data['name'],
        "chargepoint_id": data['chargepoint_id'],
        "latitude": None,
        "longitude": None,
        "max_power_kw": None,
        "power_type": data['power_type'],
        "installation_location": data['installation_location'],
        "country_code": data['country_code'],
        "supports_autocharge": data['supports_autocharge'],
        "can_deliver_soc": data['can_deliver_soc'],
    }

    for field, label in (
        ("name", "Name"),
        ("chargepoint_id", "Chargepoint ID"),
        ("power_type", "AC / DC"),
        ("installation_location", "Installation Location"),
        ("country_code", "Country"),
    ):
        if not data[field]:
            errors.append(f"{label} is required.")

    power_type = data['power_type']
    if power_type and power_type not in CHARGEPOINT_POWER_TYPE_SET:
        errors.append("Please select a valid AC / DC option.")

    installation_location = data['installation_location']
    if installation_location and installation_location not in CHARGEPOINT_INSTALL_LOCATION_SET:
        errors.append("Please select a valid installation location.")

    country_code = data['country_code']
    if country_code:
        if country_code not in CHARGEPOINT_COUNTRY_CODE_SET:
            errors.append("Please select a valid country.")
        else:
            parsed['country_code'] = country_code

    supports_autocharge = data['supports_autocharge']
    if supports_autocharge not in CHARGEPOINT_BOOLEAN_OPTION_SET:
        errors.append("Please select whether the chargepoint supports AutoCharge.")
    else:
        parsed['supports_autocharge'] = supports_autocharge

    can_deliver_soc = data['can_deliver_soc']
    if can_deliver_soc not in CHARGEPOINT_BOOLEAN_OPTION_SET:
        errors.append("Please select whether the chargepoint can deliver SoC.")
    else:
        parsed['can_deliver_soc'] = can_deliver_soc

    if data['latitude']:
        try:
            latitude_val = float(data['latitude'])
        except ValueError:
            errors.append("Latitude must be a number.")
        else:
            if not (-90 <= latitude_val <= 90):
                errors.append("Latitude must be between -90 and 90.")
            else:
                parsed['latitude'] = latitude_val

    if data['longitude']:
        try:
            longitude_val = float(data['longitude'])
        except ValueError:
            errors.append("Longitude must be a number.")
        else:
            if not (-180 <= longitude_val <= 180):
                errors.append("Longitude must be between -180 and 180.")
            else:
                parsed['longitude'] = longitude_val

    if data['max_power_kw']:
        try:
            max_power_val = float(data['max_power_kw'])
        except ValueError:
            errors.append("Max Power must be a number.")
        else:
            if max_power_val <= 0:
                errors.append("Max Power must be greater than 0.")
            else:
                parsed['max_power_kw'] = max_power_val

    return data, parsed, errors


@app.route('/cf/bhi_chargepoint_list')
def bhi_chargepoint_list():
    edit_id = request.args.get('edit_id')
    edit_chargepoint = None
    if edit_id:
        try:
            edit_chargepoint = _get_chargepoint(int(edit_id))
        except (TypeError, ValueError):
            edit_chargepoint = None
    return _render_chargepoint_list(edit_chargepoint=edit_chargepoint)


@app.post('/cf/bhi_chargepoint_list/create')
def bhi_chargepoint_create():
    form_data, parsed_data, errors = _validate_chargepoint_form(request.form)
    if errors:
        return _render_chargepoint_list(errors=errors, create_form_data=form_data)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                INSERT INTO bhi_chargepoint_list
                (name, chargepoint_id, latitude, longitude, max_power_kw, power_type, installation_location, country_code,
                 supports_autocharge, can_deliver_soc)
                VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """,
                (
                    parsed_data['name'],
                    parsed_data['chargepoint_id'],
                    parsed_data['latitude'],
                    parsed_data['longitude'],
                    parsed_data['max_power_kw'],
                    parsed_data['power_type'],
                    parsed_data['installation_location'],
                    parsed_data['country_code'],
                    parsed_data['supports_autocharge'],
                    parsed_data['can_deliver_soc'],
                ),
            )
        conn.commit()
    finally:
        conn.close()
    return redirect(url_for('bhi_chargepoint_list'))


@app.post('/cf/bhi_chargepoint_list/update/<int:chargepoint_id>')
def bhi_chargepoint_update(chargepoint_id):
    existing = _get_chargepoint(chargepoint_id)
    if not existing:
        abort(404)

    form_data, parsed_data, errors = _validate_chargepoint_form(request.form)
    if errors:
        existing.update(form_data)
        return _render_chargepoint_list(errors=errors, edit_chargepoint=existing)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                UPDATE bhi_chargepoint_list
                SET name=%s, chargepoint_id=%s, latitude=%s, longitude=%s, max_power_kw=%s, power_type=%s,
                    installation_location=%s, country_code=%s, supports_autocharge=%s, can_deliver_soc=%s
                WHERE id=%s
                """,
                (
                    parsed_data['name'],
                    parsed_data['chargepoint_id'],
                    parsed_data['latitude'],
                    parsed_data['longitude'],
                    parsed_data['max_power_kw'],
                    parsed_data['power_type'],
                    parsed_data['installation_location'],
                    parsed_data['country_code'],
                    parsed_data['supports_autocharge'],
                    parsed_data['can_deliver_soc'],
                    chargepoint_id,
                ),
            )
        conn.commit()
    finally:
        conn.close()
    return redirect(url_for('bhi_chargepoint_list'))


@app.post('/cf/bhi_chargepoint_list/delete/<int:chargepoint_id>')
def bhi_chargepoint_delete(chargepoint_id):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "DELETE FROM bhi_chargepoint_list WHERE id=%s",
                (chargepoint_id,),
            )
        conn.commit()
    finally:
        conn.close()
    return redirect(url_for('bhi_chargepoint_list'))


@app.get('/cf/bhi_chargepoint_temperature/<int:chargepoint_id>')
def bhi_chargepoint_temperature(chargepoint_id: int):
    chargepoint = _get_chargepoint(chargepoint_id)
    if not chargepoint:
        abort(404)

    weather_rows = _fetch_chargepoint_weather_history(chargepoint)

    labels = []
    temperatures = []

    highest_temperature = None
    lowest_temperature = None

    for row in weather_rows:
        observation_time = row.get('observation_time')
        if isinstance(observation_time, datetime.datetime):
            label = observation_time.strftime('%Y-%m-%d %H:%M')
        else:
            label = str(observation_time)

        temperature = row.get('temperature')
        if temperature is not None:
            try:
                temperature = float(temperature)
            except (TypeError, ValueError):
                temperature = None

        labels.append(label)
        temperatures.append(temperature)

        if temperature is not None:
            if (
                highest_temperature is None
                or temperature > highest_temperature['value']
            ):
                highest_temperature = {
                    'value': temperature,
                    'label': label,
                }

            if (
                lowest_temperature is None
                or temperature < lowest_temperature['value']
            ):
                lowest_temperature = {
                    'value': temperature,
                    'label': label,
                }

    chart_data = {
        'labels': labels,
        'temperatures': temperatures,
    }

    return render_template(
        'bhi_chargepoint_temperature.html',
        chargepoint=chargepoint,
        chart_data=chart_data,
        highest_temperature=highest_temperature,
        lowest_temperature=lowest_temperature,
        aside='vehicleMenu',
    )


def _ensure_utc_datetime(value):
    if not isinstance(value, datetime.datetime):
        return None

    if value.tzinfo is None:
        return value.replace(tzinfo=UTC)

    return value.astimezone(UTC)


def _to_berlin_time(value):
    utc_value = _ensure_utc_datetime(value)
    if utc_value is None:
        return None

    return utc_value.astimezone(EUROPE_BERLIN)


def _format_duration(delta):
    if not isinstance(delta, datetime.timedelta):
        return None

    total_seconds = int(delta.total_seconds())
    hours, remainder = divmod(total_seconds, 3600)
    minutes, seconds = divmod(remainder, 60)
    return f"{hours:02d}:{minutes:02d}:{seconds:02d}"


def _format_display_timestamp(value):
    if not isinstance(value, datetime.datetime):
        return None

    return value.strftime("%Y-%m-%d %H:%M:%S %Z")


def _format_optional_string(value):
    if value is None:
        return "-"

    if isinstance(value, (int, float, Decimal)):
        return str(value)

    text = str(value).strip()
    return text or "-"


def _format_angle(value):
    if value is None:
        return "-"

    return f"{value:.2f}°"


def _format_percentage(value):
    if value is None:
        return "-"

    return f"{value:.1f} %"


def _format_session_rows(rows):
    for row in rows:
        start = row.get("session_start")
        end = row.get("session_end")

        start_local = _to_berlin_time(start)
        end_local = _to_berlin_time(end)

        row["session_start_local"] = start_local
        row["session_end_local"] = end_local

        if start_local is not None:
            row["session_start_fmt"] = start_local.strftime("%Y-%m-%d %H:%M:%S")
        else:
            row["session_start_fmt"] = start

        if end_local is not None:
            row["session_end_fmt"] = end_local.strftime("%Y-%m-%d %H:%M:%S")
        else:
            row["session_end_fmt"] = end

        if start_local is not None and end_local is not None:
            duration = end_local - start_local
            row["duration"] = _format_duration(duration)
        else:
            row["duration"] = None

        meter_start = row.get("meter_start")
        meter_stop = row.get("meter_stop")
        if meter_start is not None and meter_stop is not None:
            row["meter_delta"] = meter_stop - meter_start
        else:
            row["meter_delta"] = None

        row["charge_factor"] = _normalize_percentage(row.get("charge_factor"))
        row["confidence"] = _normalize_percentage(row.get("confidence"))

        row["highlighted"] = bool(row.get("highlighted"))

        meter_delta = row.get("meter_delta")
        if meter_delta is not None:
            row["meter_delta_kwh"] = meter_delta / 1000.0
        else:
            row["meter_delta_kwh"] = None

        if "vehicle_assigned" in row:
            row["vehicle_assigned"] = bool(row.get("vehicle_assigned"))

    return rows


def _parse_ocpp_timestamp(value):
    if isinstance(value, datetime.datetime):
        return _ensure_utc_datetime(value)

    if not value:
        return None

    if isinstance(value, str):
        cleaned = value.strip()
        if cleaned.endswith("Z"):
            cleaned = cleaned[:-1] + "+00:00"

        try:
            parsed = datetime.datetime.fromisoformat(cleaned)
        except ValueError:
            parsed = None

        if parsed is None:
            for fmt in ("%Y-%m-%dT%H:%M:%S.%f%z", "%Y-%m-%dT%H:%M:%S%z"):
                try:
                    parsed = datetime.datetime.strptime(cleaned, fmt)
                    break
                except ValueError:
                    parsed = None

        if parsed is None:
            return None

        return _ensure_utc_datetime(parsed)

    return None


def _extract_meter_samples(row):
    message = row.get("message")
    fallback_timestamp = _ensure_utc_datetime(row.get("timestamp"))

    if not message:
        return []

    if isinstance(message, (bytes, bytearray)):
        message = message.decode("utf-8", errors="ignore")

    try:
        payload = json.loads(message)
    except (TypeError, ValueError):
        return []

    if not isinstance(payload, list) or len(payload) < 4:
        return []

    body = payload[3]
    if not isinstance(body, dict):
        return []

    meter_values = body.get("meterValue") or []
    if not isinstance(meter_values, list):
        return []

    samples = []
    for meter_entry in meter_values:
        if not isinstance(meter_entry, dict):
            continue

        timestamp = _parse_ocpp_timestamp(meter_entry.get("timestamp")) or fallback_timestamp
        timestamp_local = _to_berlin_time(timestamp)

        sampled_values = meter_entry.get("sampledValue") or []
        if isinstance(sampled_values, dict):
            sampled_values = [sampled_values]

        for sample in sampled_values:
            if not isinstance(sample, dict):
                continue

            measurand = sample.get("measurand") or "Value"
            unit = sample.get("unit")
            raw_value = sample.get("value")
            phase = sample.get("phase")
            location = sample.get("location")

            if isinstance(phase, str):
                phase = phase.strip() or None
            if isinstance(location, str):
                location = location.strip() or None

            try:
                value = float(raw_value)
            except (TypeError, ValueError):
                continue

            if timestamp_local is None:
                continue

            samples.append(
                {
                    "measurand": measurand,
                    "unit": unit,
                    "value": value,
                    "timestamp_local": timestamp_local,
                    "phase": phase,
                    "location": location,
                }
            )

    return samples


def _extract_measurement_bounds(detail_rows):
    """Return the first and last measurement timestamps in *detail_rows*."""

    first_timestamp = None
    last_timestamp = None

    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            if not isinstance(timestamp, datetime.datetime):
                continue

            if first_timestamp is None or timestamp < first_timestamp:
                first_timestamp = timestamp
            if last_timestamp is None or timestamp > last_timestamp:
                last_timestamp = timestamp

    return first_timestamp, last_timestamp


def _fetch_session_ambient_temperatures(session, detail_rows):
    """Fetch ambient temperature samples for the session timeline."""

    if not isinstance(session, dict):
        return [], None, None

    chargepoint_identifier = session.get("station_id")
    if not chargepoint_identifier:
        return [], None, None

    measurement_start, measurement_end = _extract_measurement_bounds(detail_rows)

    start_local = session.get("session_start_local") or measurement_start
    end_local = session.get("session_end_local") or measurement_end

    start_utc = _ensure_utc_datetime(start_local) if start_local else None
    end_utc = _ensure_utc_datetime(end_local) if end_local else None

    if start_utc is None and end_utc is None:
        return [], None, None

    margin = datetime.timedelta(minutes=30)
    if start_utc and end_utc:
        window_start = min(start_utc, end_utc) - margin
        window_end = max(start_utc, end_utc) + margin
    else:
        target = start_utc or end_utc
        window_start = target - margin
        window_end = target + datetime.timedelta(hours=4)

    if window_end <= window_start:
        window_end = window_start + datetime.timedelta(hours=2)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT observation_time, temperature
                FROM bhi_timeseries_weather
                WHERE chargepoint_id = %s
                  AND observation_time BETWEEN %s AND %s
                ORDER BY observation_time
                """,
                (chargepoint_identifier, window_start, window_end),
            )
            rows = cur.fetchall()
    finally:
        conn.close()

    samples = []
    for row in rows:
        observation_time = _ensure_utc_datetime(row.get("observation_time"))
        temperature = row.get("temperature")

        if observation_time is None or temperature is None:
            continue

        try:
            temperature_value = float(temperature)
        except (TypeError, ValueError):
            continue

        local_time = _to_berlin_time(observation_time)
        if local_time is None:
            continue

        samples.append((local_time, temperature_value))

    samples.sort(key=lambda item: item[0])

    def closest_temperature(target):
        if not samples or not isinstance(target, datetime.datetime):
            return None

        nearest_time, value = min(samples, key=lambda item: abs(item[0] - target))
        # Only consider values within six hours to avoid unrelated samples.
        if abs(nearest_time - target) > datetime.timedelta(hours=6):
            return None
        return value

    ambient_start = closest_temperature(start_local)
    ambient_end = closest_temperature(end_local)

    return samples, ambient_start, ambient_end


def _build_axis_id(measurand, unit):
    if measurand and measurand.lower() == "soc":
        return "axis_soc"

    base = unit or measurand or "value"
    base = base.lower()
    base = re.sub(r"[^a-z0-9]+", "_", base).strip("_")
    if not base:
        base = "value"

    return f"axis_{base}"


def _build_axes_config(axis_meta):
    axes = []
    position_cycle = ["left", "right"]
    position_counts = {"left": 0, "right": 0}

    for index, (axis_id, meta) in enumerate(axis_meta.items()):
        position = position_cycle[index % len(position_cycle)]
        draw_grid = position_counts[position] == 0
        axis_config = {
            "id": axis_id,
            "position": position,
            "title": meta.get("title") or axis_id,
            "draw_grid": draw_grid,
            "offset": position_counts[position] > 0,
        }

        unit = (meta.get("unit") or "").lower()
        if unit == "percent":
            axis_config["suggested_min"] = 0
            axis_config["suggested_max"] = 100

        axes.append(axis_config)
        position_counts[position] += 1

    return axes


def _fetch_session_oem_soc_data(session, start_local=None, end_local=None):
    """Return OEM SoC samples and start/end candidates for a session."""

    result = {
        "samples": [],
        "start_candidate": None,
        "end_candidate": None,
    }

    if not isinstance(session, dict):
        return result

    vehicle_id = session.get("vehicle_id")
    if vehicle_id in (None, ""):
        return result

    vehicle_id = _coerce_int(vehicle_id)
    if vehicle_id is None:
        return result

    if start_local is None and isinstance(session.get("session_start_local"), datetime.datetime):
        start_local = session.get("session_start_local")
    if end_local is None and isinstance(session.get("session_end_local"), datetime.datetime):
        end_local = session.get("session_end_local")

    start_utc = _ensure_utc_datetime(start_local) if isinstance(start_local, datetime.datetime) else None
    end_utc = _ensure_utc_datetime(end_local) if isinstance(end_local, datetime.datetime) else None

    if start_utc is None and end_utc is None:
        return result

    margin = datetime.timedelta(minutes=30)
    if start_utc and end_utc:
        window_start = min(start_utc, end_utc) - margin
        window_end = max(start_utc, end_utc) + margin
    else:
        target = start_utc or end_utc
        window_start = target - margin
        window_end = target + datetime.timedelta(hours=4)

    if window_end <= window_start:
        window_end = window_start + datetime.timedelta(hours=2)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT ts, msg_value
                FROM bhi_oem_data
                WHERE vehicle_id = %s
                  AND msg_key = %s
                  AND ts BETWEEN %s AND %s
                ORDER BY ts
                """,
                (vehicle_id, "soc", window_start, window_end),
            )
            rows = cur.fetchall()
    finally:
        conn.close()

    fifteen_minutes = datetime.timedelta(minutes=15)

    def update_candidate(current, candidate, target_time):
        if target_time is None:
            return current
        candidate_time, _ = candidate
        if abs(candidate_time - target_time) > fifteen_minutes:
            return current
        if current is None:
            return candidate
        current_delta = abs(current[0] - target_time)
        candidate_delta = abs(candidate_time - target_time)
        if candidate_delta < current_delta:
            return candidate
        return current

    samples = []

    for row in rows:
        timestamp_utc = _ensure_utc_datetime(row.get("ts"))
        if timestamp_utc is None:
            continue

        timestamp_local = _to_berlin_time(timestamp_utc)
        if timestamp_local is None:
            continue

        value = _normalize_numeric_value(row.get("msg_value"))
        if value is None:
            continue

        if not math.isfinite(value):
            continue

        if value < 0 or value > 100:
            continue

        candidate = (timestamp_local, value)
        result["start_candidate"] = update_candidate(result["start_candidate"], candidate, start_local)
        result["end_candidate"] = update_candidate(result["end_candidate"], candidate, end_local)

        include_sample = True
        if isinstance(start_local, datetime.datetime) and timestamp_local < start_local:
            include_sample = False
        if include_sample and isinstance(end_local, datetime.datetime) and timestamp_local > end_local:
            include_sample = False

        if include_sample:
            samples.append(candidate)

    samples.sort(key=lambda item: item[0])
    result["samples"] = samples

    return result


def _prepare_session_chart_payload(detail_rows, ambient_samples=None, session=None):
    datasets_map = OrderedDict()
    first_meter_timestamp = None
    last_meter_timestamp = None
    chart_first_timestamp = None
    chart_last_timestamp = None

    soc_overlay_dataset = None
    soc_axis_meta = None
    soc_axis_id = None

    def update_meter_range(timestamp):
        nonlocal first_meter_timestamp, last_meter_timestamp
        if first_meter_timestamp is None or timestamp < first_meter_timestamp:
            first_meter_timestamp = timestamp
        if last_meter_timestamp is None or timestamp > last_meter_timestamp:
            last_meter_timestamp = timestamp

    def update_chart_range(timestamp):
        nonlocal chart_first_timestamp, chart_last_timestamp
        if chart_first_timestamp is None or timestamp < chart_first_timestamp:
            chart_first_timestamp = timestamp
        if chart_last_timestamp is None or timestamp > chart_last_timestamp:
            chart_last_timestamp = timestamp

    oem_soc_data = None
    soc_measurand_present = False

    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            timestamp = sample["timestamp_local"]
            update_meter_range(timestamp)
            update_chart_range(timestamp)

            measurand = sample.get("measurand")
            unit = sample.get("unit")
            phase = sample.get("phase")
            location = sample.get("location")

            key = (measurand, unit, phase, location)
            datasets_map.setdefault(key, []).append((timestamp, sample.get("value")))

            if isinstance(measurand, str) and measurand.lower() == "soc":
                soc_measurand_present = True

    if not soc_measurand_present:
        start_local = None
        end_local = None
        if isinstance(session, dict):
            start_local = session.get("session_start_local")
            end_local = session.get("session_end_local")

        if start_local is None:
            start_local = chart_first_timestamp
        if end_local is None:
            end_local = chart_last_timestamp

        oem_soc_data = _fetch_session_oem_soc_data(session, start_local, end_local)
        oem_samples = oem_soc_data.get("samples") if isinstance(oem_soc_data, dict) else None

        if oem_samples:
            key = ("soc", "%", None, "OEM")
            for timestamp, value in oem_samples:
                update_meter_range(timestamp)
                update_chart_range(timestamp)
                datasets_map.setdefault(key, []).append((timestamp, value))

    has_meter_values = bool(datasets_map)

    def is_power_measurand(measurand):
        if not isinstance(measurand, str):
            return False
        measurand_lower = measurand.lower()
        return measurand_lower.startswith("power")

    def is_current_measurand(measurand):
        if not isinstance(measurand, str):
            return False
        measurand_lower = measurand.lower()
        return measurand_lower.startswith("current")

    def has_voltage_dataset():
        for measurand, _, _, _ in datasets_map.keys():
            if isinstance(measurand, str) and measurand.lower().startswith("voltage"):
                return True
        return False

    if ambient_samples:
        ambient_values = []
        for timestamp, value in ambient_samples:
            if not isinstance(timestamp, datetime.datetime):
                continue
            try:
                numeric_value = float(value)
            except (TypeError, ValueError):
                continue

            update_chart_range(timestamp)
            ambient_values.append((timestamp, numeric_value))

        if ambient_values:
            key = ("Ambient Temperature", "°C", None, None)
            datasets_map.setdefault(key, []).extend(ambient_values)

    if datasets_map and not has_voltage_dataset():
        power_by_timestamp = defaultdict(list)
        current_by_timestamp = defaultdict(list)

        def normalize_power(value, unit):
            if value is None:
                return None
            if not unit:
                return value
            unit_lower = unit.lower()
            if unit_lower == "w":
                return value
            if unit_lower == "kw":
                return value * 1000
            if unit_lower == "mw":
                return value * 1_000_000
            return None

        def normalize_current(value, unit):
            if value is None:
                return None
            if not unit:
                return value
            unit_lower = unit.lower()
            if unit_lower in {"a", "amp", "ampere", "amps"}:
                return value
            if unit_lower == "ma":
                return value / 1000
            return None

        for (measurand, unit, _, _), values in datasets_map.items():
            if not values:
                continue
            if is_power_measurand(measurand):
                for timestamp, value in values:
                    normalized_value = normalize_power(value, unit)
                    if normalized_value is None:
                        continue
                    power_by_timestamp[timestamp].append(normalized_value)
            elif is_current_measurand(measurand):
                for timestamp, value in values:
                    normalized_value = normalize_current(value, unit)
                    if normalized_value is None:
                        continue
                    current_by_timestamp[timestamp].append(normalized_value)

        shared_timestamps = sorted(set(power_by_timestamp.keys()) & set(current_by_timestamp.keys()))
        voltage_values = []
        for timestamp in shared_timestamps:
            power_samples = power_by_timestamp.get(timestamp) or []
            current_samples = current_by_timestamp.get(timestamp) or []
            if not power_samples or not current_samples:
                continue
            avg_power = sum(power_samples) / len(power_samples)
            avg_current = sum(current_samples) / len(current_samples)
            if not avg_current:
                continue
            voltage = avg_power / avg_current
            if not math.isfinite(voltage):
                continue
            voltage_values.append((timestamp, voltage))

        if voltage_values:
            key = ("Voltage (calculated)", "V", None, None)
            datasets_map[key] = voltage_values

    def categorize_measurand(measurand):
        text = (measurand or "").strip().lower()
        if text == "soc" or text.startswith("energy"):
            return "SoC & Energy"
        if "temperature" in text:
            return "Temperatures"
        if text.startswith("power") or text.startswith("current") or text.startswith("voltage"):
            return "Power & Current"
        return "Other Measurements"

    grouped_data = OrderedDict()

    for (measurand, unit, phase, location), values in datasets_map.items():
        values.sort(key=lambda item: item[0])
        axis_id = _build_axis_id(measurand, unit)
        axis_title = measurand or "Value"
        if unit:
            axis_title = f"{axis_title} ({unit})"

        qualifier_parts = []
        if phase:
            qualifier_parts.append(str(phase))
        if location:
            qualifier_parts.append(str(location))

        dataset_label = axis_title
        if qualifier_parts:
            dataset_label = f"{axis_title} [{', '.join(qualifier_parts)}]"

        data_points = [
            {
                "x": timestamp.isoformat(),
                "y": value,
            }
            for timestamp, value in values
        ]

        is_soc = bool(measurand and measurand.lower() == "soc")

        category = categorize_measurand(measurand)
        group_entry = grouped_data.setdefault(
            category,
            {
                "datasets": [],
                "axis_meta": OrderedDict(),
            },
        )

        group_entry["datasets"].append(
            {
                "label": dataset_label,
                "data": data_points,
                "yAxisID": axis_id,
                "isSoc": is_soc,
            }
        )

        if axis_id not in group_entry["axis_meta"]:
            group_entry["axis_meta"][axis_id] = {
                "title": axis_title,
                "unit": unit,
            }

        if is_soc and soc_overlay_dataset is None:
            soc_axis_id = axis_id
            soc_axis_meta = {
                "title": axis_title,
                "unit": unit,
            }
            soc_overlay_dataset = {
                "label": dataset_label,
                "data": copy.deepcopy(data_points),
                "yAxisID": axis_id,
                "isSoc": True,
            }

    category_priority = {
        "SoC & Energy": 0,
        "Power & Current": 1,
        "Temperatures": 2,
    }
    original_order = {category: index for index, category in enumerate(grouped_data.keys())}

    chart_groups = OrderedDict()

    for category, group in sorted(
        grouped_data.items(),
        key=lambda item: (
            category_priority.get(item[0], len(category_priority)),
            original_order.get(item[0], 0),
        ),
    ):
        axes = _build_axes_config(group.get("axis_meta", OrderedDict()))
        chart_groups[category] = {
            "datasets": group.get("datasets", []),
            "axes": axes,
            "time_range": {
                "start": chart_first_timestamp.isoformat()
                if isinstance(chart_first_timestamp, datetime.datetime)
                else None,
                "end": chart_last_timestamp.isoformat()
                if isinstance(chart_last_timestamp, datetime.datetime)
                else None,
            },
        }

    if soc_overlay_dataset and chart_groups:
        soc_axis_title = None
        soc_axis_unit = None
        if isinstance(soc_axis_meta, dict):
            soc_axis_title = soc_axis_meta.get("title")
            soc_axis_unit = soc_axis_meta.get("unit")

        for group in chart_groups.values():
            datasets = group.get("datasets", [])
            if any(dataset.get("isSoc") for dataset in datasets):
                continue

            overlay_dataset = copy.deepcopy(soc_overlay_dataset)
            overlay_dataset["yAxisID"] = soc_axis_id or "axis_soc"
            datasets.append(overlay_dataset)

            axes = group.get("axes")
            if axes is None:
                axes = []
                group["axes"] = axes

            if not any(axis.get("id") == (soc_axis_id or "axis_soc") for axis in axes):
                axis_config = {
                    "id": soc_axis_id or "axis_soc",
                    "position": "right",
                    "title": soc_axis_title or "SoC",
                    "draw_grid": False,
                    "offset": True,
                }

                if isinstance(soc_axis_unit, str) and soc_axis_unit.lower() in {"percent", "%"}:
                    axis_config["suggested_min"] = 0
                    axis_config["suggested_max"] = 100

                axes.append(axis_config)

    stats = {
        "first_measurement": first_meter_timestamp,
        "last_measurement": last_meter_timestamp,
        "soc": None,
        "energy": None,
        "voltage": None,
        "current": None,
        "power_active_import": None,
    }

    def extract_numeric_values(values):
        numeric_values = []
        for _, raw_value in values:
            if raw_value is None:
                continue
            value = raw_value
            if isinstance(value, Decimal):
                value = float(value)
            elif not isinstance(value, (int, float)):
                try:
                    value = float(value)
                except (TypeError, ValueError):
                    continue
            if math.isfinite(value):
                numeric_values.append(value)
        return numeric_values

    def update_range_stat(stat_key, unit, numeric_values):
        if not numeric_values:
            return
        stat = stats.get(stat_key) or {}
        stat_min = stat.get("min")
        stat_max = stat.get("max")
        current_min = min(numeric_values)
        current_max = max(numeric_values)
        if stat_min is None or current_min < stat_min:
            stat["min"] = current_min
        if stat_max is None or current_max > stat_max:
            stat["max"] = current_max
        if unit:
            stat.setdefault("unit", unit)
        stats[stat_key] = stat

    def update_max_stat(stat_key, unit, numeric_values):
        if not numeric_values:
            return
        stat = stats.get(stat_key) or {}
        stat_max = stat.get("max")
        current_max = max(numeric_values)
        if stat_max is None or current_max > stat_max:
            stat["max"] = current_max
        if unit:
            stat.setdefault("unit", unit)
        stats[stat_key] = stat

    for (measurand, unit, _phase, _location), values in datasets_map.items():
        if not values:
            continue

        measurand_lower = measurand.lower() if isinstance(measurand, str) else ""
        numeric_values = extract_numeric_values(values)

        if measurand and measurand.lower() == "soc":
            stats["soc"] = {
                "start": values[0][1],
                "end": values[-1][1],
            }

        if measurand == "Energy.Active.Import.Register":
            stats["energy"] = {
                "start": values[0][1],
                "end": values[-1][1],
                "unit": unit,
            }

        if measurand_lower.startswith("voltage"):
            update_range_stat("voltage", unit, numeric_values)

        if measurand_lower.startswith("current"):
            update_range_stat("current", unit, numeric_values)

        if measurand_lower == "power.active.import":
            update_max_stat("power_active_import", unit, numeric_values)

    if oem_soc_data is not None:
        soc_stats = stats.get("soc") or {}
        start_candidate = oem_soc_data.get("start_candidate") if isinstance(oem_soc_data, dict) else None
        end_candidate = oem_soc_data.get("end_candidate") if isinstance(oem_soc_data, dict) else None

        if start_candidate:
            soc_stats["start"] = start_candidate[1]
        elif not soc_measurand_present:
            soc_stats.pop("start", None)

        if not soc_measurand_present:
            if end_candidate:
                soc_stats["end"] = end_candidate[1]
            else:
                soc_stats.pop("end", None)

        stats["soc"] = soc_stats if soc_stats else None

    return chart_groups, stats, has_meter_values


def calculate_session_algorithm_metrics(session, detail_rows, vehicle):
    results = {
        "charge_factor": None,
        "confidence": None,
        "estimated_soh": None,
    }

    markers = {}

    start_local = session.get("session_start_local")
    threshold_time = None
    if isinstance(start_local, datetime.datetime):
        threshold_time = start_local + datetime.timedelta(minutes=20)
        markers["twenty_min_timestamp"] = threshold_time.isoformat()

    soc_samples = []
    measurement_map = {}
    reached_eighty_soc = False
    last_energy_after_threshold = None

    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            measurand = sample.get("measurand")
            measurand_lower = measurand.lower() if isinstance(measurand, str) else ""
            if not measurand or measurand_lower != "soc":
                # Track energy samples for SoH estimation
                if measurand_lower == "energy.active.import.register":
                    timestamp = sample.get("timestamp_local")
                    if not isinstance(timestamp, datetime.datetime):
                        continue
                    if threshold_time and timestamp < threshold_time:
                        continue

                    converted_energy = _convert_energy_to_kwh(sample.get("value"), sample.get("unit"))
                    if converted_energy is None:
                        continue

                    entry = measurement_map.setdefault(timestamp, {})
                    entry["energy"] = converted_energy
                    last_energy_after_threshold = converted_energy
                continue

            timestamp = sample.get("timestamp_local")
            value = sample.get("value")

            if not isinstance(timestamp, datetime.datetime):
                continue

            try:
                numeric_value = float(value)
            except (TypeError, ValueError):
                continue

            soc_samples.append((timestamp, numeric_value))

            if threshold_time and timestamp < threshold_time:
                continue

            if numeric_value >= 80:
                reached_eighty_soc = True
                if "soc_80_timestamp" not in markers:
                    markers["soc_80_timestamp"] = timestamp.isoformat()

            entry = measurement_map.setdefault(timestamp, {})
            entry["soc"] = numeric_value
            if "energy" not in entry and last_energy_after_threshold is not None:
                entry["energy"] = last_energy_after_threshold

    if len(soc_samples) < 2:
        results["display"] = {
            "charge_factor": _format_angle(results["charge_factor"]),
            "confidence": _format_percentage(results["confidence"]),
            "estimated_soh": _format_percentage(results["estimated_soh"]),
        }
        if markers:
            results["chart_markers"] = markers
        _persist_algo_results(session, vehicle, results)
        return results

    soc_samples.sort(key=lambda item: item[0])
    origin = soc_samples[0][0]

    x_values = []
    y_values = []
    for timestamp, value in soc_samples:
        delta_hours = (timestamp - origin).total_seconds() / 3600.0
        x_values.append(delta_hours)
        y_values.append(value)

    x_mean = sum(x_values) / len(x_values)
    y_mean = sum(y_values) / len(y_values)

    numerator = sum((x - x_mean) * (y - y_mean) for x, y in zip(x_values, y_values))
    denominator = sum((x - x_mean) ** 2 for x in x_values)

    if denominator == 0:
        slope = 0.0
    else:
        slope = numerator / denominator

    intercept = y_mean - slope * x_mean

    predicted = [slope * x + intercept for x in x_values]
    residuals = [actual - estimate for actual, estimate in zip(y_values, predicted)]

    ss_res = sum(residual ** 2 for residual in residuals)
    ss_tot = sum((y - y_mean) ** 2 for y in y_values)

    if ss_tot > 0:
        r_squared = 1 - (ss_res / ss_tot)
    else:
        r_squared = 1.0 if ss_res == 0 else 0.0

    r_squared = max(0.0, min(1.0, r_squared))

    charge_factor = math.degrees(math.atan(slope))
    confidence = r_squared * 100.0

    results.update(
        {
            "charge_factor": charge_factor,
            "confidence": confidence,
        }
    )

    results.update(_estimate_soh(vehicle, measurement_map, reached_eighty_soc, markers))

    results["display"] = {
        "charge_factor": _format_angle(charge_factor),
        "confidence": _format_percentage(confidence),
        "estimated_soh": _format_percentage(results["estimated_soh"]),
    }

    if markers:
        results["chart_markers"] = markers

    _persist_algo_results(session, vehicle, results)
    return results


def _convert_energy_to_kwh(value, unit):
    try:
        numeric_value = float(value)
    except (TypeError, ValueError):
        return None

    unit_normalized = (unit or "").strip().lower()
    if unit_normalized == "wh":
        return numeric_value / 1000.0
    if unit_normalized == "kwh":
        return numeric_value
    if not unit_normalized:
        return numeric_value

    # Unknown unit – best effort by returning the raw numeric value
    return numeric_value


def _convert_energy_to_wh(value, unit):
    converted = _convert_energy_to_kwh(value, unit)
    if converted is None:
        return None

    return converted * 1000.0


def _build_soc_energy_profile(detail_rows):
    """Return Wh-per-SoC data points derived from *detail_rows*.

    The result is a dict suitable for charting, or ``None`` when no
    meaningful SoC/energy combination is available.
    """

    measurement_map = {}

    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            if not isinstance(timestamp, datetime.datetime):
                continue

            entry = measurement_map.setdefault(timestamp, {})

            measurand = (sample.get("measurand") or "").strip().lower()
            value = sample.get("value")
            unit = sample.get("unit")

            if measurand == "soc":
                if value is None or not math.isfinite(value):
                    continue
                if value < 0 or value > 100:
                    continue
                entry["soc"] = value
            elif measurand == "energy.active.import.register":
                energy_wh = _convert_energy_to_wh(value, unit)
                if energy_wh is None or not math.isfinite(energy_wh):
                    continue
                entry["energy_wh"] = energy_wh

    if not measurement_map:
        return None

    sorted_points = sorted(
        (
            timestamp,
            values.get("soc"),
            values.get("energy_wh"),
        )
        for timestamp, values in measurement_map.items()
    )

    if not sorted_points:
        return None

    chart_points = []
    energy_at_full_soc = {}
    total_energy = 0.0

    def _store_energy_for_soc(soc_value: float | None, energy_value: float | None):
        nonlocal total_energy

        if soc_value is None or energy_value is None:
            return
        if not math.isfinite(soc_value) or not math.isfinite(energy_value):
            return

        rounded_soc = int(round(soc_value))
        if abs(soc_value - rounded_soc) > 1e-6:
            return
        if rounded_soc < 0 or rounded_soc > 100:
            return

        if rounded_soc not in energy_at_full_soc:
            previous_energy = energy_at_full_soc.get(rounded_soc - 1)
            energy_at_full_soc[rounded_soc] = energy_value
            if previous_energy is not None and energy_value >= previous_energy:
                delta_energy = energy_value - previous_energy
                chart_points.append({
                    "x": float(rounded_soc),
                    "y": delta_energy,
                })
                total_energy += delta_energy

    last_energy = None
    last_soc = None
    last_energy_is_real = False

    for _timestamp, soc_value, energy_wh in sorted_points:
        if energy_wh is None:
            continue

        if soc_value is None:
            continue

        if not math.isfinite(soc_value) or not math.isfinite(energy_wh):
            continue

        if last_soc is None or not last_energy_is_real:
            last_soc = soc_value
            last_energy = energy_wh
            last_energy_is_real = True
            _store_energy_for_soc(soc_value, energy_wh)
            continue

        if soc_value <= last_soc:
            last_soc = soc_value
            last_energy = energy_wh
            last_energy_is_real = True
            _store_energy_for_soc(soc_value, energy_wh)
            continue

        if energy_wh < last_energy:
            last_soc = soc_value
            last_energy = energy_wh
            last_energy_is_real = True
            _store_energy_for_soc(soc_value, energy_wh)
            continue

        soc_delta = soc_value - last_soc
        energy_delta = energy_wh - last_energy

        if soc_delta <= 0:
            last_soc = soc_value
            last_energy = energy_wh
            last_energy_is_real = True
            _store_energy_for_soc(soc_value, energy_wh)
            continue

        if last_energy_is_real:
            _store_energy_for_soc(last_soc, last_energy)

        start_int = math.floor(last_soc) + 1
        end_int = math.floor(soc_value)

        if last_energy_is_real and end_int >= start_int:
            for soc_int in range(start_int, end_int + 1):
                if soc_int < 0 or soc_int > 100:
                    continue
                interpolated_energy = last_energy + (energy_delta * (soc_int - last_soc) / soc_delta)
                if math.isfinite(interpolated_energy):
                    _store_energy_for_soc(float(soc_int), interpolated_energy)

        _store_energy_for_soc(soc_value, energy_wh)

        last_soc = soc_value
        last_energy = energy_wh
        last_energy_is_real = True

    if not chart_points:
        return None

    chart_points.sort(key=lambda point: point["x"])

    min_soc = min(point["x"] for point in chart_points)
    max_soc = max(point["x"] for point in chart_points)

    average = None
    total_soc_steps = len(chart_points)
    if total_soc_steps > 0:
        average = total_energy / total_soc_steps

    return {
        "points": chart_points,
        "average": average,
        "min_soc": min_soc,
        "max_soc": max_soc,
    }


def _estimate_soh(vehicle, measurement_map, reached_eighty_soc, markers):
    result_updates = {}

    capacity_value = None
    if vehicle and isinstance(vehicle, dict):
        capacity_value = vehicle.get("battery_capacity_net_kwh")

    if isinstance(capacity_value, Decimal):
        capacity_value = float(capacity_value)

    if capacity_value in (None, ""):
        capacity_value = None

    if capacity_value is not None and not isinstance(capacity_value, (int, float)):
        try:
            capacity_value = float(capacity_value)
        except (TypeError, ValueError):
            capacity_value = None

    if capacity_value is None or capacity_value <= 0:
        return result_updates

    usable_points = []
    for timestamp, values in measurement_map.items():
        soc_value = values.get("soc")
        energy_value = values.get("energy")
        if soc_value is None or energy_value is None:
            continue

        if soc_value < 20 or soc_value > 80:
            continue

        usable_points.append((timestamp, soc_value, energy_value))

    if not usable_points:
        return result_updates

    usable_points.sort(key=lambda item: item[0])

    if len(usable_points) < 2:
        return result_updates

    start_point = usable_points[0]
    end_point = usable_points[-1]

    soc_delta = end_point[1] - start_point[1]
    energy_delta = end_point[2] - start_point[2]

    if soc_delta <= 0 or energy_delta <= 0:
        return result_updates

    energy_per_percent = energy_delta / soc_delta
    estimated_full_capacity = energy_per_percent * 100.0
    soh_percentage = (estimated_full_capacity / capacity_value) * 100.0

    result_updates["estimated_soh"] = soh_percentage

    if (reached_eighty_soc or end_point[1] >= 80) and "soc_80_timestamp" not in markers:
        timestamp = end_point[0]
        if isinstance(timestamp, datetime.datetime):
            markers["soc_80_timestamp"] = timestamp.isoformat()

    return result_updates


def _persist_algo_results(session, vehicle, results):
    transaction_id = session.get("transaction_id")
    if transaction_id is None:
        return

    vehicle_id = None
    if vehicle and isinstance(vehicle, dict):
        vehicle_id = vehicle.get("vehicle_id")

    if vehicle_id in (None, ""):
        vehicle_id = 0

    try:
        vehicle_id = int(vehicle_id)
    except (TypeError, ValueError):
        vehicle_id = 0

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                CREATE TABLE IF NOT EXISTS bhi_algo_results (
                    transaction_id BIGINT NOT NULL,
                    vehicle_id INT NOT NULL,
                    charge_factor DOUBLE NULL,
                    confidence DOUBLE NULL,
                    estimated_soh DOUBLE NULL,
                    created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
                    updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
                    PRIMARY KEY (transaction_id, vehicle_id)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
                """
            )

            cur.execute(
                """
                INSERT INTO bhi_algo_results (
                    transaction_id,
                    vehicle_id,
                    charge_factor,
                    confidence,
                    estimated_soh
                ) VALUES (%s, %s, %s, %s, %s)
                ON DUPLICATE KEY UPDATE
                    charge_factor = VALUES(charge_factor),
                    confidence = VALUES(confidence),
                    estimated_soh = VALUES(estimated_soh)
                """,
                (
                    transaction_id,
                    vehicle_id,
                    results.get("charge_factor"),
                    results.get("confidence"),
                    results.get("estimated_soh"),
                ),
            )

        conn.commit()
    finally:
        conn.close()

def _fetch_charging_session_by_id(session_id):
    ensure_charging_session_highlights_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            valid_clause = _build_session_validity_clause("s")
            cur.execute(
                f"""
                SELECT
                    s.id,
                    s.station_id,
                    s.connector_id,
                    s.transaction_id,
                    s.id_tag,
                    s.vehicle_id,
                    s.session_start,
                    s.session_end,
                    s.meter_start,
                    s.meter_stop,
                    s.reason,
                    COALESCE(h.highlighted, 0) AS highlighted
                FROM op_charging_sessions AS s
                LEFT JOIN {SESSION_HIGHLIGHT_TABLE} AS h
                    ON h.session_id = s.id
                WHERE s.id = %s
                    AND {valid_clause}
                """,
                (session_id,),
            )
            row = cur.fetchone()
    finally:
        conn.close()

    if not row:
        return None

    formatted = _format_session_rows([row])
    return formatted[0] if formatted else None


def _fetch_charging_session_detail_rows(session, vehicle=None):
    chargepoint_id = session.get("station_id")
    transaction_id = session.get("transaction_id")

    if not chargepoint_id:
        return []

    rows = _query_charging_session_detail_rows_by_transaction(
        chargepoint_id,
        transaction_id,
    )

    if _detail_rows_contain_metrics(rows):
        return rows

    fallback_rows = _query_charging_session_detail_rows_by_context(
        session,
        chargepoint_id,
        vehicle,
    )

    if fallback_rows:
        return fallback_rows

    return rows


def _query_charging_session_detail_rows_by_transaction(chargepoint_id, transaction_id):
    params = [chargepoint_id]
    like_clauses = []

    if transaction_id is not None:
        tx_value = str(transaction_id)
        like_clauses.extend(
            [
                "message LIKE %s",
                "message LIKE %s",
                "message LIKE %s",
            ]
        )
        params.extend(
            [
                f'%"transactionId": {tx_value}%',
                f'%"transactionId":{tx_value}%',
                f'%"transactionId": "{tx_value}"%'
            ]
        )

    where_clauses = ["chargepoint_id = %s"]
    if like_clauses:
        where_clauses.append("(" + " OR ".join(like_clauses) + ")")

    return _query_charging_session_detail_table(where_clauses, params)


def _query_charging_session_detail_rows_by_context(session, chargepoint_id, vehicle):
    where_clauses = []
    params = []

    if chargepoint_id:
        where_clauses.append("chargepoint_id = %s")
        params.append(chargepoint_id)

    vehicle_id = None
    if vehicle and isinstance(vehicle, dict):
        vehicle_id = _coerce_int(vehicle.get("vehicle_id"))

    if vehicle_id is not None:
        where_clauses.append("vehicle_id = %s")
        params.append(vehicle_id)

    start_time = session.get("session_start")
    end_time = session.get("session_end")

    margin = datetime.timedelta(minutes=30)
    if isinstance(start_time, datetime.datetime):
        where_clauses.append("timestamp >= %s")
        params.append(start_time - margin)

    if isinstance(end_time, datetime.datetime):
        where_clauses.append("timestamp <= %s")
        params.append(end_time + margin)
    elif isinstance(start_time, datetime.datetime):
        where_clauses.append("timestamp <= %s")
        params.append(start_time + datetime.timedelta(hours=6))

    if not where_clauses:
        return []

    return _query_charging_session_detail_table(where_clauses, params, limit=500)


def _query_charging_session_detail_table(where_clauses, params, limit=None):
    base_query = [
        "SELECT",
        "    id,",
        "    message,",
        "    timestamp,",
        "    charging_mode,",
        "    est_capacity_wh,",
        "    soc_step_kw,",
        "    lin_soc_start,",
        "    lin_soc_end,",
        "    lin_threshold,",
        "    vehicle_id,",
        "    json_payload",
        "FROM bhi_chargingsession_details",
    ]

    if where_clauses:
        base_query.append("WHERE " + " AND ".join(where_clauses))

    base_query.append("ORDER BY timestamp ASC, id ASC")

    query_params = list(params)
    if limit is not None:
        base_query.append("LIMIT %s")
        query_params.append(int(limit))

    sql_query = "\n".join(base_query)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(sql_query, query_params)
            return cur.fetchall()
    finally:
        conn.close()


def _detail_rows_contain_metrics(rows):
    for row in rows or []:
        for key in ("est_capacity_wh", "soc_step_kw", "lin_soc_start", "lin_soc_end"):
            value = _normalize_numeric_value(row.get(key))
            if value is not None and value != 0:
                return True
    return False


def _normalize_numeric_value(value):
    if isinstance(value, Decimal):
        return float(value)

    if isinstance(value, (int, float)) and not isinstance(value, bool):
        return float(value)

    if value in (None, ""):
        return None

    try:
        return float(value)
    except (TypeError, ValueError):
        return None


def _parse_soc_input(value):
    if value is None:
        return None

    if isinstance(value, str):
        text = value.strip()
        if not text:
            return None
        text = text.replace(",", ".")
    else:
        text = str(value).strip()
        if not text:
            return None

    return _normalize_numeric_value(text)


def _coerce_int(value):
    if isinstance(value, Decimal):
        try:
            return int(value)
        except (TypeError, ValueError):
            return None

    if value in (None, ""):
        return None

    try:
        return int(value)
    except (TypeError, ValueError):
        return None


def _resolve_vehicle_capacity_kwh(vehicle):
    if not vehicle or not isinstance(vehicle, dict):
        return None

    capacity_value = _normalize_numeric_value(vehicle.get("battery_capacity_net_kwh"))
    if capacity_value is not None and capacity_value > 0:
        return capacity_value

    vehicle_model_id = _coerce_int(vehicle.get("vehicle_model_id"))
    vehicle_id = _coerce_int(vehicle.get("vehicle_id"))

    if capacity_value is None and vehicle_id is not None:
        extra = _get_vehicle(vehicle_id)
        if extra:
            capacity_from_vehicle = _normalize_numeric_value(extra.get("battery_capacity_net_kwh"))
            if capacity_from_vehicle is not None and capacity_from_vehicle > 0:
                return capacity_from_vehicle
            if vehicle_model_id is None:
                vehicle_model_id = _coerce_int(extra.get("vehicle_model_id"))

    if vehicle_model_id is not None:
        model = _get_vehicle_model(vehicle_model_id)
        if model:
            capacity_from_model = _normalize_numeric_value(model.get("battery_capacity_net_kwh"))
            if capacity_from_model is not None and capacity_from_model > 0:
                return capacity_from_model

    return None


def _count_session_irregularities(detail_rows):
    if not detail_rows:
        return 0

    keywords = ("error", "fault", "warn", "irregular")
    count = 0

    for row in detail_rows:
        texts = []

        message = row.get("message")
        if message:
            if isinstance(message, (bytes, bytearray)):
                try:
                    message = message.decode("utf-8", errors="ignore")
                except Exception:  # pragma: no cover - best effort decoding
                    message = None
            if message is not None:
                texts.append(str(message))

        payload = row.get("json_payload")
        if payload:
            if isinstance(payload, (bytes, bytearray)):
                try:
                    payload = payload.decode("utf-8", errors="ignore")
                except Exception:  # pragma: no cover - best effort decoding
                    payload = None
            if isinstance(payload, (dict, list)):
                try:
                    payload_text = json.dumps(payload, ensure_ascii=False)
                except (TypeError, ValueError):
                    payload_text = str(payload)
            elif payload is not None:
                payload_text = str(payload)
            else:
                payload_text = None

            if payload_text:
                texts.append(payload_text)

        for text in texts:
            lower = text.lower()
            if any(keyword in lower for keyword in keywords):
                count += 1
                break

    return count


def _build_session_overview_metrics(
    detail_metadata,
    detail_display,
    vehicle,
    algo_results,
    detail_rows,
):
    metrics = {
        "estimated_capacity": "-",
        "battery_capacity_net": "-",
        "soc_step_per_percent": "-",
        "irregularities": "0",
        "soc_step_is_custom": False,
    }

    if isinstance(detail_display, dict):
        estimated_display = detail_display.get("estimated_capacity")
        if isinstance(estimated_display, str) and estimated_display.strip() and estimated_display.strip() != "-":
            metrics["estimated_capacity"] = estimated_display.strip()

        soc_display = detail_display.get("soc_step_per_percent")
        if isinstance(soc_display, str) and soc_display.strip() and soc_display.strip() != "-":
            metrics["soc_step_per_percent"] = soc_display.strip()

    vehicle_capacity = _resolve_vehicle_capacity_kwh(vehicle)
    if vehicle_capacity is not None and vehicle_capacity > 0:
        metrics["battery_capacity_net"] = f"{vehicle_capacity:.1f} kWh"

    est_capacity_kwh = None
    if isinstance(detail_metadata, dict):
        raw_capacity_wh = detail_metadata.get("est_capacity_wh")
        numeric_capacity = _normalize_numeric_value(raw_capacity_wh)
        if numeric_capacity is not None and numeric_capacity > 0:
            est_capacity_kwh = numeric_capacity / 1000.0

    if est_capacity_kwh is not None:
        metrics["estimated_capacity"] = f"{est_capacity_kwh:.1f} kWh"
        if metrics["soc_step_per_percent"] == "-":
            metrics["soc_step_per_percent"] = f"{est_capacity_kwh / 100.0:.3f} kWh"

    needs_estimated_capacity = metrics["estimated_capacity"] == "-"
    if needs_estimated_capacity and vehicle_capacity is not None and vehicle_capacity > 0:
        soh_value = None
        if isinstance(algo_results, dict):
            soh_value = algo_results.get("estimated_soh")
        soh_numeric = _normalize_numeric_value(soh_value)
        if soh_numeric is not None and soh_numeric > 0:
            estimated_capacity = vehicle_capacity * (soh_numeric / 100.0)
            metrics["estimated_capacity"] = f"{estimated_capacity:.1f} kWh"
            if metrics["soc_step_per_percent"] == "-":
                metrics["soc_step_per_percent"] = f"{estimated_capacity / 100.0:.3f} kWh"

    if metrics["soc_step_per_percent"] == "-" and vehicle_capacity is not None and vehicle_capacity > 0:
        metrics["soc_step_per_percent"] = f"{vehicle_capacity / 100.0:.3f} kWh"

    irregularity_count = _count_session_irregularities(detail_rows)
    metrics["irregularities"] = str(irregularity_count)

    return metrics


def _interpolate_value_at_time(samples, target_time):
    if not isinstance(target_time, datetime.datetime):
        return None

    closest_before = None
    closest_after = None

    for timestamp, value in samples:
        if not isinstance(timestamp, datetime.datetime):
            continue

        if timestamp == target_time:
            return value

        if timestamp < target_time:
            if closest_before is None or timestamp > closest_before[0]:
                closest_before = (timestamp, value)
        else:
            if closest_after is None or timestamp < closest_after[0]:
                closest_after = (timestamp, value)
            if closest_after is not None:
                break

    if closest_after and closest_before:
        total_seconds = (closest_after[0] - closest_before[0]).total_seconds()
        if total_seconds > 0:
            ratio = (target_time - closest_before[0]).total_seconds() / total_seconds
            return closest_before[1] + (closest_after[1] - closest_before[1]) * ratio
        return closest_after[1]

    if closest_after:
        return closest_after[1]

    if closest_before:
        return closest_before[1]

    return None


def _estimate_time_for_soc(samples, target_soc):
    previous_point = None

    for timestamp, value in samples:
        if not isinstance(timestamp, datetime.datetime):
            continue

        if value is None:
            continue

        if value >= target_soc:
            if value == target_soc or not previous_point:
                return timestamp

            prev_timestamp, prev_value = previous_point
            delta_soc = value - prev_value
            if delta_soc <= 0:
                return timestamp

            total_seconds = (timestamp - prev_timestamp).total_seconds()
            if total_seconds <= 0:
                return timestamp

            ratio = (target_soc - prev_value) / delta_soc
            return prev_timestamp + datetime.timedelta(seconds=total_seconds * ratio)

        previous_point = (timestamp, value)

    return None


def _collect_soc_energy_samples(detail_rows):
    soc_samples = []
    energy_samples = []

    for row in detail_rows or []:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            if not isinstance(timestamp, datetime.datetime):
                continue

            measurand = (sample.get("measurand") or "").lower()

            if measurand == "soc":
                value = sample.get("value")
                if value is not None:
                    soc_samples.append((timestamp, value))
            elif measurand == "energy.active.import.register":
                energy_value = _convert_energy_to_kwh(sample.get("value"), sample.get("unit"))
                if energy_value is not None:
                    energy_samples.append((timestamp, energy_value))

    soc_samples.sort(key=lambda item: item[0])
    energy_samples.sort(key=lambda item: item[0])
    return soc_samples, energy_samples


def _calculate_custom_soc_step(detail_rows, start_soc, stop_soc):
    start_value = _normalize_numeric_value(start_soc)
    stop_value = _normalize_numeric_value(stop_soc)

    if start_value is None or stop_value is None:
        return None

    if start_value < 0 or stop_value > 100 or start_value >= stop_value:
        return None

    soc_samples, energy_samples = _collect_soc_energy_samples(detail_rows)
    if not soc_samples or not energy_samples:
        return None

    start_time = _estimate_time_for_soc(soc_samples, start_value)
    stop_time = _estimate_time_for_soc(soc_samples, stop_value)

    if not start_time or not stop_time or stop_time <= start_time:
        return None

    energy_at_start = _interpolate_value_at_time(energy_samples, start_time)
    energy_at_stop = _interpolate_value_at_time(energy_samples, stop_time)

    if energy_at_start is None or energy_at_stop is None:
        return None

    energy_delta = energy_at_stop - energy_at_start
    if energy_delta <= 0:
        return None

    soc_span = stop_value - start_value
    if soc_span <= 0:
        return None

    return {
        "kwh_per_percent": energy_delta / soc_span,
        "energy_delta": energy_delta,
        "start_soc": start_value,
        "stop_soc": stop_value,
    }


def _detect_cc_to_cv_transition(detail_rows):
    voltage_by_time = defaultdict(list)
    current_by_time = defaultdict(list)
    soc_samples = []

    def normalize_voltage(value, unit):
        if value is None:
            return None
        if isinstance(unit, str):
            unit_lower = unit.strip().lower()
            if unit_lower in {"v", "volt", "volts"}:
                return value
            if unit_lower == "kv":
                return value * 1000
            if unit_lower == "mv":
                return value / 1000
        return value

    def normalize_current(value, unit):
        if value is None:
            return None
        if isinstance(unit, str):
            unit_lower = unit.strip().lower()
            if unit_lower in {"a", "amp", "amps", "ampere"}:
                return value
            if unit_lower == "ma":
                return value / 1000
        return value

    for row in detail_rows or []:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            if not isinstance(timestamp, datetime.datetime):
                continue

            measurand = sample.get("measurand")
            measurand_lower = measurand.lower() if isinstance(measurand, str) else ""

            if measurand_lower.startswith("voltage"):
                normalized_voltage = normalize_voltage(
                    sample.get("value"), sample.get("unit")
                )
                if normalized_voltage is None:
                    continue
                try:
                    numeric_voltage = float(normalized_voltage)
                except (TypeError, ValueError):
                    continue
                if not math.isfinite(numeric_voltage):
                    continue
                voltage_by_time[timestamp].append(numeric_voltage)
            elif measurand_lower.startswith("current"):
                normalized_current = normalize_current(
                    sample.get("value"), sample.get("unit")
                )
                if normalized_current is None:
                    continue
                try:
                    numeric_current = float(normalized_current)
                except (TypeError, ValueError):
                    continue
                if not math.isfinite(numeric_current):
                    continue
                current_by_time[timestamp].append(numeric_current)
            elif measurand_lower == "soc":
                value = sample.get("value")
                if value is None:
                    continue
                try:
                    numeric_value = float(value)
                except (TypeError, ValueError):
                    continue
                if not math.isfinite(numeric_value):
                    continue
                soc_samples.append((timestamp, numeric_value))

    shared_timestamps = sorted(set(voltage_by_time.keys()) & set(current_by_time.keys()))
    if len(shared_timestamps) < 6:
        return None

    aggregated = []
    for timestamp in shared_timestamps:
        voltage_values = voltage_by_time.get(timestamp) or []
        current_values = current_by_time.get(timestamp) or []
        if not voltage_values or not current_values:
            continue

        avg_voltage = sum(voltage_values) / len(voltage_values)
        avg_current = sum(current_values) / len(current_values)

        if not math.isfinite(avg_voltage) or not math.isfinite(avg_current):
            continue

        aggregated.append((timestamp, avg_voltage, avg_current))

    if len(aggregated) < 6:
        return None

    max_voltage = max(value for _, value, _ in aggregated)
    max_current = max(value for _, _, value in aggregated)

    if max_voltage <= 0 or max_current <= 0:
        return None

    voltage_threshold = max_voltage * 0.995
    window = 3
    transition_time = None

    for index in range(window, len(aggregated)):
        timestamp, voltage, current = aggregated[index]

        if voltage < voltage_threshold:
            continue

        before_window = [
            entry_current
            for _, _, entry_current in aggregated[max(0, index - window) : index]
            if math.isfinite(entry_current)
        ]
        after_window = [
            entry_current
            for _, _, entry_current in aggregated[index : index + window]
            if math.isfinite(entry_current)
        ]

        if len(before_window) < 2 or len(after_window) < 2:
            continue

        before_avg = sum(before_window) / len(before_window)
        after_avg = sum(after_window) / len(after_window)

        if before_avg <= 0:
            continue

        if after_avg <= before_avg * 0.9 and after_avg <= max_current * 0.95:
            transition_time = timestamp
            break

    if not transition_time:
        return None

    soc_samples.sort(key=lambda item: item[0])
    if not soc_samples:
        return None

    soc_value = _interpolate_value_at_time(soc_samples, transition_time)
    if soc_value is None:
        return None

    try:
        soc_numeric = float(soc_value)
    except (TypeError, ValueError):
        return None

    if not math.isfinite(soc_numeric):
        return None

    if soc_numeric < 0 or soc_numeric > 100:
        return None

    return {"timestamp": transition_time, "soc": soc_numeric}


def _extract_session_detail_metadata(detail_rows):
    metadata = {
        "charging_mode": None,
        "lin_threshold": None,
        "est_capacity_wh": None,
        "soc_step_wh": None,
        "lin_soc_start": None,
        "lin_soc_end": None,
    }

    def _coerce_numeric(value):
        if isinstance(value, Decimal):
            return float(value)
        return value

    for row in detail_rows or []:
        if metadata["charging_mode"] is None:
            mode_value = row.get("charging_mode")
            if isinstance(mode_value, str):
                cleaned = mode_value.strip()
                if cleaned:
                    metadata["charging_mode"] = cleaned
        if metadata["lin_threshold"] is None:
            threshold_value = row.get("lin_threshold")
            if threshold_value is not None:
                numeric = _coerce_numeric(threshold_value)
                if isinstance(numeric, (int, float)):
                    metadata["lin_threshold"] = numeric
        if metadata["est_capacity_wh"] is None:
            capacity_value = row.get("est_capacity_wh")
            if capacity_value is not None:
                numeric = _coerce_numeric(capacity_value)
                if isinstance(numeric, (int, float)):
                    metadata["est_capacity_wh"] = numeric
        if metadata["soc_step_wh"] is None:
            step_value = row.get("soc_step_kw")
            if step_value is not None:
                numeric = _coerce_numeric(step_value)
                if isinstance(numeric, (int, float)):
                    metadata["soc_step_wh"] = numeric
        if metadata["lin_soc_start"] is None:
            start_value = row.get("lin_soc_start")
            if start_value is not None:
                numeric = _coerce_numeric(start_value)
                if isinstance(numeric, (int, float)):
                    metadata["lin_soc_start"] = numeric
        if metadata["lin_soc_end"] is None:
            end_value = row.get("lin_soc_end")
            if end_value is not None:
                numeric = _coerce_numeric(end_value)
                if isinstance(numeric, (int, float)):
                    metadata["lin_soc_end"] = numeric

    display_values = {}

    charging_mode_display = metadata.get("charging_mode") or "-"
    display_values["charging_mode"] = charging_mode_display

    lin_threshold_value = metadata.get("lin_threshold")
    if isinstance(lin_threshold_value, (int, float)):
        display_values["lin_threshold"] = f"{lin_threshold_value * 100:.1f} %"
    else:
        display_values["lin_threshold"] = "-"

    est_capacity_wh = metadata.get("est_capacity_wh")
    if isinstance(est_capacity_wh, (int, float)) and est_capacity_wh > 0:
        display_values["estimated_capacity"] = f"{est_capacity_wh / 1000:.1f} kWh"
    else:
        display_values["estimated_capacity"] = "-"

    soc_step_wh = metadata.get("soc_step_wh")
    if isinstance(soc_step_wh, (int, float)) and soc_step_wh > 0:
        display_values["soc_step_per_percent"] = f"{soc_step_wh / 1000:.3f} kWh"
    else:
        display_values["soc_step_per_percent"] = "-"

    lin_soc_start = metadata.get("lin_soc_start")
    if isinstance(lin_soc_start, (int, float)):
        display_values["lin_soc_start"] = f"{lin_soc_start:.1f} %"
    else:
        display_values["lin_soc_start"] = "-"

    lin_soc_end = metadata.get("lin_soc_end")
    if isinstance(lin_soc_end, (int, float)):
        display_values["lin_soc_end"] = f"{lin_soc_end:.1f} %"
    else:
        display_values["lin_soc_end"] = "-"

    metadata["display"] = display_values
    return metadata


def _compute_additional_session_metrics(session, detail_rows, stats):
    metrics = {}

    transition = _detect_cc_to_cv_transition(detail_rows)
    if transition:
        metrics["cc_to_cv_transition"] = transition

    start_local = session.get("session_start_local") if isinstance(session, dict) else None
    if not isinstance(start_local, datetime.datetime):
        return metrics

    threshold_time = start_local + datetime.timedelta(minutes=20)

    end_local = session.get("session_end_local") if isinstance(session, dict) else None
    measurement_end = stats.get("last_measurement") if isinstance(stats, dict) else None
    effective_end = end_local or measurement_end

    if not isinstance(effective_end, datetime.datetime):
        return metrics

    if (effective_end - start_local).total_seconds() < 20 * 60:
        return metrics

    soc_samples = []
    energy_samples = []

    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            if not isinstance(timestamp, datetime.datetime):
                continue

            measurand = (sample.get("measurand") or "").lower()

            if measurand == "soc":
                soc_samples.append((timestamp, sample.get("value")))
            elif measurand == "energy.active.import.register":
                energy_value = _convert_energy_to_kwh(sample.get("value"), sample.get("unit"))
                if energy_value is not None:
                    energy_samples.append((timestamp, energy_value))

    if not soc_samples:
        return metrics

    soc_samples.sort(key=lambda item: item[0])
    energy_samples.sort(key=lambda item: item[0])

    if not any(timestamp >= threshold_time for timestamp, _ in soc_samples):
        return metrics

    soc_twenty_value = _interpolate_value_at_time(soc_samples, threshold_time)
    if soc_twenty_value is None:
        return metrics

    metrics["soc_twenty_minutes"] = soc_twenty_value

    if energy_samples:
        soc_eighty_time = _estimate_time_for_soc(soc_samples, 80)
        if soc_eighty_time:
            energy_at_twenty = _interpolate_value_at_time(energy_samples, threshold_time)
            energy_at_eighty = _interpolate_value_at_time(energy_samples, soc_eighty_time)

            if (
                energy_at_twenty is not None
                and energy_at_eighty is not None
                and energy_at_eighty >= energy_at_twenty
            ):
                metrics["energy_soc20_to_soc80_kwh"] = energy_at_eighty - energy_at_twenty

    return metrics


def _summarize_charging_session(
    session,
    stats,
    vehicle=None,
    extra_metrics=None,
    ambient_summary=None,
    detail_metadata=None,
):
    start_local = session.get("session_start_local")
    end_local = session.get("session_end_local")

    measurement_end = stats.get("last_measurement")
    effective_end = end_local or measurement_end

    summary = {
        "charging_start": _format_display_timestamp(start_local) or "-",
        "charging_stop": _format_display_timestamp(effective_end) or "-",
        "duration": session.get("duration"),
        "start_soc": "-",
        "end_soc": "-",
        "energy_charged": "-",
        "min_voltage": "-",
        "max_voltage": "-",
        "min_current": "-",
        "max_current": "-",
        "max_power_active_import": "-",
        "ramp_up_phase_end_soc": "-",
        "taper_phase_start_soc": "-",
    }

    summary["transaction_id"] = _format_optional_string(session.get("transaction_id"))
    summary["chargepoint_id"] = _format_optional_string(session.get("station_id"))

    detail_display = {}
    if isinstance(detail_metadata, dict):
        display_map = detail_metadata.get("display")
        if isinstance(display_map, dict):
            detail_display = display_map

    lin_soc_start_raw = None
    lin_soc_end_raw = None
    if isinstance(detail_metadata, dict):
        lin_soc_start_raw = detail_metadata.get("lin_soc_start")
        lin_soc_end_raw = detail_metadata.get("lin_soc_end")

    def _assign_soc(summary_key, display_key, raw_value):
        if detail_display:
            display_value = detail_display.get(display_key)
            if isinstance(display_value, str):
                trimmed = display_value.strip()
                if trimmed and trimmed != "-":
                    summary[summary_key] = trimmed
                    return
        if isinstance(raw_value, (int, float)):
            summary[summary_key] = f"{raw_value:.1f} %"

    _assign_soc("ramp_up_phase_end_soc", "lin_soc_start", lin_soc_start_raw)
    _assign_soc("taper_phase_start_soc", "lin_soc_end", lin_soc_end_raw)

    charging_mode_display = detail_display.get("charging_mode") if detail_display else None
    if not charging_mode_display and isinstance(detail_metadata, dict):
        charging_mode_display = detail_metadata.get("charging_mode")
    summary["charging_mode"] = charging_mode_display or "-"

    lin_threshold_display = detail_display.get("lin_threshold") if detail_display else None
    if (not lin_threshold_display or lin_threshold_display == "-") and isinstance(
        detail_metadata, dict
    ):
        threshold_value = detail_metadata.get("lin_threshold")
        if isinstance(threshold_value, (int, float)):
            lin_threshold_display = f"{threshold_value * 100:.1f} %"
    summary["lin_threshold"] = lin_threshold_display or "-"

    vin_value = "-"
    if vehicle:
        vin_value = _format_optional_string(vehicle.get("vin"))
    summary["vin"] = vin_value

    summary["battery_capacity_net"] = None
    if vehicle:
        capacity_value = vehicle.get("battery_capacity_net_kwh")
        if isinstance(capacity_value, Decimal):
            capacity_value = float(capacity_value)
        if capacity_value not in (None, ""):
            try:
                capacity_number = float(capacity_value)
            except (TypeError, ValueError):
                capacity_number = None
            if capacity_number is not None and capacity_number > 0:
                summary["battery_capacity_net"] = f"{capacity_number:.1f} kWh"

    if start_local and effective_end and summary["duration"] is None:
        summary["duration"] = _format_duration(effective_end - start_local)

    soc_stats = stats.get("soc") or {}
    soc_start = soc_stats.get("start")
    soc_end = soc_stats.get("end")
    if soc_start is not None:
        summary["start_soc"] = f"{soc_start:.0f} %"
    if soc_end is not None:
        summary["end_soc"] = f"{soc_end:.0f} %"

    energy_stats = stats.get("energy")
    energy_display = None
    if energy_stats and energy_stats.get("start") is not None and energy_stats.get("end") is not None:
        energy_delta = energy_stats["end"] - energy_stats["start"]
        unit = energy_stats.get("unit") or ""
        if unit.lower() == "wh":
            energy_display = f"{energy_delta / 1000:.2f} kWh"
        elif unit:
            energy_display = f"{energy_delta:.2f} {unit}"
        else:
            energy_display = f"{energy_delta:.2f}"

    if energy_display is None:
        meter_delta = session.get("meter_delta")
        if meter_delta is not None:
            try:
                energy_display = f"{float(meter_delta) / 1000:.2f} kWh"
            except (TypeError, ValueError):
                energy_display = str(meter_delta)

    if energy_display:
        summary["energy_charged"] = energy_display

    if summary["duration"] is None:
        summary["duration"] = "-"

    def format_temperature(value):
        if value is None:
            return "-"
        return f"{value:.1f} °C"

    ambient_summary = ambient_summary or {}
    summary["ambient_temperature_start"] = format_temperature(
        ambient_summary.get("start")
    )
    summary["ambient_temperature_end"] = format_temperature(
        ambient_summary.get("end")
    )

    summary["soc_twenty_minutes"] = None
    summary["energy_soc20_to_soc80"] = None

    def format_measurement(value, unit):
        if value is None:
            return "-"
        try:
            numeric_value = float(value)
        except (TypeError, ValueError):
            return "-"
        if unit:
            return f"{numeric_value:.2f} {unit}"
        return f"{numeric_value:.2f}"

    voltage_stats = stats.get("voltage") if isinstance(stats, dict) else None
    if voltage_stats:
        summary["min_voltage"] = format_measurement(
            voltage_stats.get("min"), voltage_stats.get("unit")
        )
        summary["max_voltage"] = format_measurement(
            voltage_stats.get("max"), voltage_stats.get("unit")
        )

    current_stats = stats.get("current") if isinstance(stats, dict) else None
    if current_stats:
        summary["min_current"] = format_measurement(
            current_stats.get("min"), current_stats.get("unit")
        )
        summary["max_current"] = format_measurement(
            current_stats.get("max"), current_stats.get("unit")
        )

    power_stats = stats.get("power_active_import") if isinstance(stats, dict) else None
    if power_stats:
        summary["max_power_active_import"] = format_measurement(
            power_stats.get("max"), power_stats.get("unit")
        )

    if extra_metrics:
        soc_twenty_value = extra_metrics.get("soc_twenty_minutes")
        if soc_twenty_value is not None:
            summary["soc_twenty_minutes"] = f"{soc_twenty_value:.0f} %"

        energy_delta = extra_metrics.get("energy_soc20_to_soc80_kwh")
        if energy_delta is not None:
            summary["energy_soc20_to_soc80"] = f"{energy_delta:.2f} kWh"

    return summary


def _normalize_percentage(value):
    if value is None:
        return None
    if isinstance(value, Decimal):
        return float(value)
    try:
        return float(value)
    except (TypeError, ValueError):
        return None


def _fetch_daily_charging_session_counts(days: int = 30):
    try:
        days = int(days)
    except (TypeError, ValueError):
        return []

    if days <= 0:
        return []

    chargepoint_match_clause = _build_chargepoint_match_condition(
        "s.station_id", "c.chargepoint_id"
    )
    valid_clause = _build_session_validity_clause("s")

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT
                    DATE(COALESCE(s.session_end, s.session_start)) AS session_date,
                    COUNT(*) AS session_count
                FROM op_charging_sessions AS s
                INNER JOIN bhi_chargepoint_list AS c
                    ON {chargepoint_match_clause}
                WHERE COALESCE(s.session_end, s.session_start) >= DATE_SUB(CURDATE(), INTERVAL %s DAY)
                    AND {valid_clause}
                GROUP BY session_date
                ORDER BY session_date
                """,
                (max(days - 1, 0),),
            )
            rows = cur.fetchall()
    finally:
        conn.close()

    counts_by_date = {}
    for row in rows:
        session_date = row.get("session_date")
        if isinstance(session_date, datetime.datetime):
            session_date = session_date.date()
        elif isinstance(session_date, str):
            try:
                session_date = datetime.datetime.strptime(
                    session_date, "%Y-%m-%d"
                ).date()
            except ValueError:
                session_date = None

        if session_date is None:
            continue

        count = row.get("session_count")
        try:
            counts_by_date[session_date] = int(count)
        except (TypeError, ValueError):
            counts_by_date[session_date] = 0

    end_date = datetime.date.today()
    start_date = end_date - datetime.timedelta(days=days - 1)

    chart_data = []
    for offset in range(days):
        current_day = start_date + datetime.timedelta(days=offset)
        chart_data.append(
            {
                "date": current_day.strftime("%Y-%m-%d"),
                "count": counts_by_date.get(current_day, 0),
            }
        )

    return chart_data


def _fetch_fleet_report_rows():
    ensure_charging_session_highlights_table()
    valid_clause = _build_session_validity_clause("s")
    latest_valid_clause = _build_session_validity_clause("s_latest")

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT
                    v.vehicle_id,
                    v.name,
                    v.manufacturer,
                    v.model,
                    v.build_year,
                    v.id_tag,
                    v.charge_factor AS stored_charge_factor,
                    v.confidence AS stored_confidence,
                    (
                        SELECT ar.charge_factor
                        FROM bhi_algo_results AS ar
                        WHERE ar.vehicle_id = v.vehicle_id
                            AND ar.confidence IS NOT NULL
                        ORDER BY ar.confidence DESC, ar.charge_factor DESC
                        LIMIT 1
                    ) AS best_charge_factor,
                    (
                        SELECT ar.confidence
                        FROM bhi_algo_results AS ar
                        WHERE ar.vehicle_id = v.vehicle_id
                            AND ar.confidence IS NOT NULL
                        ORDER BY ar.confidence DESC, ar.charge_factor DESC
                        LIMIT 1
                    ) AS best_confidence,
                    COUNT(s.id) AS session_count,
                    MAX(COALESCE(s.session_end, s.session_start)) AS last_session,
                    (
                        SELECT s_latest.id
                        FROM op_charging_sessions AS s_latest
                        WHERE s_latest.vehicle_id = v.vehicle_id
                            AND {latest_valid_clause}
                        ORDER BY COALESCE(s_latest.session_end, s_latest.session_start) DESC, s_latest.id DESC
                        LIMIT 1
                    ) AS latest_session_id,
                    (
                        SELECT COALESCE(h_latest.highlighted, 0)
                        FROM op_charging_sessions AS s_latest
                        LEFT JOIN {SESSION_HIGHLIGHT_TABLE} AS h_latest
                            ON h_latest.session_id = s_latest.id
                        WHERE s_latest.vehicle_id = v.vehicle_id
                            AND {latest_valid_clause}
                        ORDER BY COALESCE(s_latest.session_end, s_latest.session_start) DESC, s_latest.id DESC
                        LIMIT 1
                    ) AS latest_session_highlighted,
                    (
                        SELECT
                            CASE
                                WHEN s_latest.meter_start IS NOT NULL
                                    AND s_latest.meter_stop IS NOT NULL
                                THEN (s_latest.meter_stop - s_latest.meter_start) / 1000.0
                                ELSE NULL
                            END
                        FROM op_charging_sessions AS s_latest
                        WHERE s_latest.vehicle_id = v.vehicle_id
                            AND {latest_valid_clause}
                        ORDER BY COALESCE(s_latest.session_end, s_latest.session_start) DESC, s_latest.id DESC
                        LIMIT 1
                    ) AS latest_session_kwh
                FROM bhi_vehicle_list AS v
                INNER JOIN op_charging_sessions AS s
                    ON s.vehicle_id = v.vehicle_id
                    AND {valid_clause}
                GROUP BY
                    v.vehicle_id,
                    v.name,
                    v.manufacturer,
                    v.model,
                    v.build_year,
                    v.id_tag,
                    v.charge_factor,
                    v.confidence
                ORDER BY
                    last_session IS NULL ASC,
                    last_session DESC,
                    v.name
                """
            )
            rows = cur.fetchall()
    finally:
        conn.close()

    rows = [
        row
        for row in rows
        if (row.get("name") or "").strip().lower() != "unknown vehicle"
    ]

    for row in rows:
        last_session = row.get("last_session")
        if isinstance(last_session, datetime.datetime):
            row["last_session_fmt"] = last_session.strftime("%Y-%m-%d %H:%M:%S")
        else:
            row["last_session_fmt"] = last_session

        build_year = row.get("build_year")
        if isinstance(build_year, Decimal):
            row["build_year"] = int(build_year)

        default_charge_factor = _normalize_percentage(row.pop("stored_charge_factor", None))
        default_confidence = _normalize_percentage(row.pop("stored_confidence", None))

        best_charge_factor = _normalize_percentage(row.pop("best_charge_factor", None))
        best_confidence = _normalize_percentage(row.pop("best_confidence", None))

        if best_confidence is not None:
            row["charge_factor"] = best_charge_factor
            row["confidence"] = best_confidence
        else:
            row["charge_factor"] = default_charge_factor
            row["confidence"] = default_confidence

        session_count = row.get("session_count")
        try:
            row["session_count"] = int(session_count)
        except (TypeError, ValueError):
            row["session_count"] = 0

        latest_session_id = row.get("latest_session_id")
        try:
            row["latest_session_id"] = int(latest_session_id) if latest_session_id is not None else None
        except (TypeError, ValueError):
            row["latest_session_id"] = None

        row["latest_session_highlighted"] = bool(row.get("latest_session_highlighted"))

        latest_session_kwh = row.get("latest_session_kwh")
        if isinstance(latest_session_kwh, Decimal):
            row["latest_session_kwh"] = float(latest_session_kwh)

    return rows


def _fetch_vehicle_charging_sessions(id_tag: str | None, vehicle_id: int | None = None):
    if not id_tag and vehicle_id is None:
        return []

    ensure_charging_session_highlights_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            algo_join_condition = "ar.transaction_id = s.transaction_id"
            params = []
            valid_clause = _build_session_validity_clause("s")
            filters = [valid_clause]

            if vehicle_id is not None:
                try:
                    vehicle_id_int = int(vehicle_id)
                except (TypeError, ValueError):
                    vehicle_id_int = None
                if vehicle_id_int is not None:
                    algo_join_condition += " AND ar.vehicle_id = %s"
                    params.append(vehicle_id_int)
                    filters.append("s.vehicle_id = %s")
                    params.append(vehicle_id_int)

            if len(filters) == 1:
                # No vehicle_id filter available, fall back to id_tag if present
                if not id_tag:
                    return []
                filters.append("s.id_tag = %s")
                params.append(id_tag)

            cur.execute(
                f"""
                SELECT
                    s.id,
                    s.station_id,
                    s.connector_id,
                    s.transaction_id,
                    s.id_tag,
                    s.session_start,
                    s.session_end,
                    s.meter_start,
                    s.meter_stop,
                    s.reason,
                    COALESCE(h.highlighted, 0) AS highlighted,
                    ar.charge_factor,
                    ar.confidence
                FROM op_charging_sessions AS s
                LEFT JOIN {SESSION_HIGHLIGHT_TABLE} AS h
                    ON h.session_id = s.id
                LEFT JOIN bhi_algo_results AS ar
                    ON {algo_join_condition}
                WHERE {' AND '.join(filters)}
                ORDER BY COALESCE(s.session_end, s.session_start) DESC, s.id DESC
                """,
                (*params,),
            )
            rows = cur.fetchall()
    finally:
        conn.close()

    return _format_session_rows(rows)


def _fetch_vehicle_by_id_tag(id_tag, vehicle_id=None):
    vehicle_id_int = _coerce_int(vehicle_id)
    if vehicle_id_int is not None:
        vehicle = _get_vehicle(vehicle_id_int)
        if vehicle:
            return vehicle

    if not id_tag:
        return None

    try:
        clause = _ensure_rfid_collate_clause()
    except RuntimeError:
        clause = ""

    comparison = "id_tag = %s"
    if clause:
        comparison = f"id_tag{clause} = %s{clause}"

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT
                    vehicle_id,
                    name,
                    manufacturer,
                    model,
                    vehicle_model_id,
                    id_tag,
                    battery_capacity_net_kwh,
                    battery_capacity_gross_kwh,
                    vin,
                    license_plate
                FROM bhi_vehicle_list
                WHERE {comparison}
                LIMIT 1
                """,
                (id_tag,),
            )
            return cur.fetchone()
    finally:
        conn.close()


def _fetch_recent_charging_sessions(limit: int = 200, station_id: Optional[str] = None):
    """Return the most recent charging sessions limited by *limit*."""

    _ensure_rfid_collate_clause()
    # Re-use the helper so comparisons survive differing collations in
    # production without exploding with ``Illegal mix of collations`` again.
    match_clause = _build_rfid_match_condition("s.id_tag", "v_tag.id_tag")
    valid_clause = _build_session_validity_clause("s")

    ensure_charging_session_highlights_table()

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            chargepoint_match_clause = _build_chargepoint_match_condition(
                "s.station_id", "c.chargepoint_id"
            )

            where_clauses = [valid_clause]
            params = []

            if station_id:
                station_id = station_id.strip()
                if station_id:
                    clause = _ensure_chargepoint_collate_clause()
                    if clause:
                        station_condition = f"c.chargepoint_id{clause} = %s{clause}"
                    else:
                        station_condition = "BINARY c.chargepoint_id = BINARY %s"
                    where_clauses.append(station_condition)
                    params.append(station_id)

            cur.execute(
                f"""
                SELECT
                    s.id,
                    s.station_id,
                    s.connector_id,
                    s.transaction_id,
                    s.id_tag,
                    s.vehicle_id,
                    s.session_start,
                    s.session_end,
                    s.meter_start,
                    s.meter_stop,
                    s.reason,
                    COALESCE(v_id.name, v_tag.name) AS vehicle_name,
                    COALESCE(h.highlighted, 0) AS highlighted,
                    c.id AS chargepoint_internal_id,
                    CASE
                        WHEN s.vehicle_id IS NULL THEN 0
                        ELSE 1
                    END AS vehicle_assigned
                FROM op_charging_sessions AS s
                LEFT JOIN bhi_vehicle_list AS v_id
                    ON v_id.vehicle_id = s.vehicle_id
                LEFT JOIN bhi_vehicle_list AS v_tag
                    ON v_tag.id_tag IS NOT NULL
                    AND v_tag.id_tag <> ''
                    AND {match_clause}
                LEFT JOIN {SESSION_HIGHLIGHT_TABLE} AS h
                    ON h.session_id = s.id
                INNER JOIN bhi_chargepoint_list AS c
                    ON {chargepoint_match_clause}
                WHERE {' AND '.join(where_clauses)}
                ORDER BY COALESCE(s.session_end, s.session_start) DESC, s.id DESC
                LIMIT %s
                """,
                (*params, limit),
            )
            rows = cur.fetchall()

        return _format_session_rows(rows)
    finally:
        conn.close()
        


def _fetch_wallbox_sessions():
    _ensure_rfid_collate_clause()
    # Using the same guarded comparison as the fleet report keeps this view
    # usable even when session and vehicle tables disagree on their collations.
    match_clause = _build_rfid_match_condition("s.id_tag", "v.id_tag")
    chargepoint_match_clause = _build_chargepoint_match_condition(
        "s.station_id", "c.chargepoint_id"
    )
    valid_clause = _build_session_validity_clause("s")

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                f"""
                SELECT
                    s.id,
                    s.station_id,
                    s.connector_id,
                    s.transaction_id,
                    s.id_tag,
                    s.vehicle_id,
                    s.session_start,
                    s.session_end,
                    s.meter_start,
                    s.meter_stop,
                    s.reason,
                    COALESCE(v_id.name, v_tag.name) AS vehicle_name,
                    CASE
                        WHEN s.vehicle_id IS NULL THEN 0
                        ELSE 1
                    END AS vehicle_assigned
                FROM op_charging_sessions AS s
                INNER JOIN bhi_chargepoint_list AS c
                    ON {chargepoint_match_clause}
                LEFT JOIN bhi_vehicle_list AS v_id
                    ON v_id.vehicle_id = s.vehicle_id
                LEFT JOIN bhi_vehicle_list AS v_tag
                    ON v_tag.id_tag IS NOT NULL
                    AND v_tag.id_tag <> ''
                    AND {match_clause}
                WHERE {valid_clause}
                ORDER BY COALESCE(s.session_end, s.session_start) DESC, s.id DESC
                """
            )
            rows = cur.fetchall()

        rows = _format_session_rows(rows)
        return rows
    finally:
        conn.close()


@app.route('/op_charging_sessions')
@app.route('/cf/bhi_charging_sessions')
def charging_sessions():
    """Render the most recent charging sessions from the OCPP broker."""

    station_filter = request.args.get('station_id')
    if station_filter:
        station_filter = station_filter.strip()
    if station_filter:
        sessions = _fetch_recent_charging_sessions(station_id=station_filter)
    else:
        station_filter = None
        sessions = _fetch_recent_charging_sessions()
    return render_template(
        'bhi_charging_sessions.html',
        sessions=sessions,
        aside='repo_diag',
        station_filter=station_filter,
    )

@app.route('/cf/bhi_charging_sessions_all')
def charging_sessions_all():
    """Render the most recent charging sessions from the OCPP broker."""

    filter_options = {
        'all': 'All Sessions',
        'assigned': 'Assigned Sessions',
        'unassigned': 'Unassigned Sessions',
    }

    session_filter = (request.args.get('session_filter') or 'unassigned').lower()
    if session_filter not in filter_options:
        session_filter = 'unassigned'

    sessions = _fetch_recent_charging_sessions()

    def _is_assigned(session):
        if session.get('vehicle_assigned') is not None:
            return bool(session.get('vehicle_assigned'))

        vehicle_id = session.get('vehicle_id')
        if vehicle_id not in (None, ''):
            return True

        vehicle_name = session.get('vehicle_name')
        if vehicle_name is None:
            return False
        vehicle_name_str = str(vehicle_name).strip()
        return bool(vehicle_name_str) and vehicle_name_str.lower() != 'not assigned'

    if session_filter == 'unassigned':
        sessions = [s for s in sessions if not _is_assigned(s)]
    elif session_filter == 'assigned':
        sessions = [s for s in sessions if _is_assigned(s)]

    vehicles = _fetch_vehicle_select_options()

    return render_template(
        'bhi_charging_sessions_all.html',
        sessions=sessions,
        vehicles=vehicles,
        session_filter=session_filter,
        page_title=filter_options[session_filter],
        aside='repo_diag',
    )

@app.route('/cf/bhi_charging_sessions/<int:session_id>/highlight', methods=['POST'])
def bhi_toggle_charging_session_highlight(session_id: int):
    ensure_charging_session_highlights_table()

    payload = request.get_json(silent=True) or {}
    highlighted_raw = payload.get('highlighted')

    if isinstance(highlighted_raw, str):
        highlighted = highlighted_raw.strip().lower() in {"1", "true", "yes", "on"}
    else:
        highlighted = bool(highlighted_raw)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "SELECT 1 FROM op_charging_sessions WHERE id = %s LIMIT 1",
                (session_id,),
            )
            exists = cur.fetchone()
            if not exists:
                abort(404)

            cur.execute(
                f"""
                INSERT INTO {SESSION_HIGHLIGHT_TABLE} (session_id, highlighted)
                VALUES (%s, %s)
                ON DUPLICATE KEY UPDATE highlighted = VALUES(highlighted)
                """,
                (session_id, 1 if highlighted else 0),
            )

        conn.commit()
    finally:
        conn.close()

    return jsonify({'highlighted': bool(highlighted)})


@app.route('/cf/bhi_charging_sessions/<int:session_id>/analysis')
def bhi_charging_session_analysis(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    vehicle = _fetch_vehicle_by_id_tag(session.get("id_tag"), session.get("vehicle_id"))
    detail_rows = _fetch_charging_session_detail_rows(session, vehicle)
    ambient_samples, ambient_start, ambient_end = _fetch_session_ambient_temperatures(
        session, detail_rows
    )
    chart_groups, stats, has_meter_values = _prepare_session_chart_payload(
        detail_rows, ambient_samples, session
    )
    detail_metadata = _extract_session_detail_metadata(detail_rows)
    additional_metrics = _compute_additional_session_metrics(session, detail_rows, stats)
    soc_energy_profile = _build_soc_energy_profile(detail_rows)
    ambient_summary = {"start": ambient_start, "end": ambient_end}
    summary = _summarize_charging_session(
        session,
        stats,
        vehicle,
        additional_metrics,
        ambient_summary,
        detail_metadata,
    )
    algo_results = calculate_session_algorithm_metrics(session, detail_rows, vehicle)

    markers = algo_results.get("chart_markers") if isinstance(algo_results, dict) else None
    chart_markers = markers if markers else {}

    detail_metrics = detail_metadata.get("display") if detail_metadata else {}
    overview_metrics = _build_session_overview_metrics(
        detail_metadata,
        detail_metrics,
        vehicle,
        algo_results,
        detail_rows,
    )

    custom_values = _fetch_session_custom_values(session_id)
    custom_soc_result = None
    if custom_values.get("has_values"):
        custom_soc_result = _calculate_custom_soc_step(
            detail_rows,
            custom_values.get("soc_analysis_start"),
            custom_values.get("soc_analysis_stop"),
        )
        if custom_soc_result:
            overview_metrics["soc_step_per_percent"] = f"{custom_soc_result['kwh_per_percent']:.3f} kWh"
            overview_metrics["soc_step_is_custom"] = True

    overview_metrics.setdefault("soc_step_is_custom", False)

    custom_error_code = request.args.get("custom_error")

    obd_row_count = None
    transaction_id = session.get("transaction_id")
    if transaction_id is not None:
        obd_measurement = _fetch_obd_measurement(transaction_id)
        if obd_measurement:
            obd_row_count = obd_measurement.get("row_count")

    chargepoint_identifier = summary.get("chargepoint_id")
    chargepoint_record = _fetch_chargepoint_by_identifier(chargepoint_identifier)
    chargepoint_power_type = None
    if chargepoint_record:
        chargepoint_power_type = chargepoint_record.get("power_type")

    charging_mode_text = (summary.get("charging_mode") or "").upper()
    mode_is_dc = charging_mode_text.startswith("DC")
    mode_is_ac = charging_mode_text.startswith("AC")

    def _power_type_display():
        if chargepoint_power_type in {"AC", "DC"}:
            return chargepoint_power_type
        if mode_is_ac:
            return "AC"
        if mode_is_dc:
            return "DC"
        return "-"

    power_type_display = _power_type_display()
    ac_available = power_type_display == "AC"
    dc_available = power_type_display == "DC"

    data_sources = [
        ("Chargepoint ID", chargepoint_identifier or "-"),
        ("Power Type", power_type_display),
        (
            "AC Charging",
            "yes"
            if ac_available or (not chargepoint_power_type and mode_is_ac)
            else "no",
        ),
        (
            "DC Charging",
            "yes"
            if dc_available or (not chargepoint_power_type and mode_is_dc)
            else "no",
        ),
        (
            "FleetLite",
            "yes"
            if ac_available or (not chargepoint_power_type and mode_is_ac)
            else "no",
        ),
        ("OBD Measurement", obd_row_count if obd_row_count is not None else ""),
        ("Manual entry", "no"),
    ]

    return render_template(
        'bhi_charging_session_analysis.html',
        session=session,
        chart_groups=chart_groups,
        chart_markers=chart_markers,
        summary=summary,
        algo_results=algo_results,
        detail_metrics=detail_metrics,
        overview_metrics=overview_metrics,
        has_measurements=has_meter_values,
        vehicle=vehicle,
        custom_values=custom_values,
        custom_soc_result=custom_soc_result,
        custom_parameters_error=custom_error_code,
        additional_metrics=additional_metrics,
        soc_energy_profile=soc_energy_profile,
        obd_row_count=obd_row_count,
        data_sources=data_sources,
        aside='repo_diag',
    )


@app.post('/cf/bhi_charging_sessions/<int:session_id>/analysis/custom_values')
def bhi_charging_session_update_custom_values(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    start_input = request.form.get("soc_analysis_start")
    stop_input = request.form.get("soc_analysis_stop")

    start_text = (start_input or "").strip()
    stop_text = (stop_input or "").strip()

    error_code = None

    if not start_text and not stop_text:
        _delete_session_custom_values(session_id)
    else:
        start_value = _parse_soc_input(start_input)
        stop_value = _parse_soc_input(stop_input)

        if start_value is None or stop_value is None:
            error_code = "invalid"
        elif not (0 <= start_value < stop_value <= 100):
            error_code = "range"
        else:
            _save_session_custom_values(
                session_id,
                round(start_value, 2),
                round(stop_value, 2),
            )

    redirect_params = {}
    if error_code:
        redirect_params["custom_error"] = error_code

    return redirect(url_for('bhi_charging_session_analysis', session_id=session_id, **redirect_params))


@app.post('/cf/bhi_charging_sessions/<int:session_id>/analysis/obd_measurements')
def bhi_upload_obd_measurement(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    transaction_id = session.get("transaction_id")
    if transaction_id is None:
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    upload = request.files.get("obd_csv")
    if upload is None or not upload.filename:
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    filename = upload.filename or ""
    if not filename.lower().endswith(".csv"):
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    csv_bytes = upload.read()
    row_count = _count_csv_rows(csv_bytes)

    _save_obd_measurement(transaction_id, csv_bytes, row_count)

    return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))


@app.get('/cf/bhi_charging_sessions/<int:session_id>/analysis/obd_measurements/download')
def bhi_download_obd_measurement(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    transaction_id = session.get("transaction_id")
    if transaction_id is None:
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    measurement = _fetch_obd_measurement_file(transaction_id)
    if not measurement:
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    csv_data = measurement.get("csv_data")
    if not csv_data:
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    filename = f"obd_measurement_{transaction_id}.csv"

    return send_file(
        BytesIO(csv_data),
        mimetype="text/csv",
        as_attachment=True,
        download_name=filename,
    )


@app.post('/cf/bhi_charging_sessions/<int:session_id>/analysis/obd_measurements/delete')
def bhi_delete_obd_measurement(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    transaction_id = session.get("transaction_id")
    if transaction_id is None:
        return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))

    _delete_obd_measurement(transaction_id)

    return redirect(url_for('bhi_charging_session_analysis', session_id=session_id))


@app.route('/cf/bhi_charging_sessions/<int:session_id>/analysis/download.csv')
def bhi_charging_session_download_csv(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    vehicle = _fetch_vehicle_by_id_tag(session.get("id_tag"), session.get("vehicle_id"))
    detail_rows = _fetch_charging_session_detail_rows(session, vehicle)

    entries = []
    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            timestamp_value = ""
            sort_key = None
            if isinstance(timestamp, datetime.datetime):
                sort_key = timestamp.timestamp()
                timestamp_value = timestamp.isoformat()

            entries.append(
                (
                    sort_key,
                    timestamp_value,
                    sample.get("measurand"),
                    sample.get("unit"),
                    sample.get("phase"),
                    sample.get("location"),
                    sample.get("value"),
                )
            )

    entries.sort(key=lambda item: (item[0] is None, item[0] if item[0] is not None else float('-inf')))

    csv_stream = StringIO()
    writer = csv.writer(csv_stream)
    writer.writerow(["timestamp", "measurand", "unit", "phase", "location", "value"])

    for _, timestamp_value, measurand, unit, phase, location, value in entries:
        writer.writerow([
            timestamp_value,
            (measurand or ""),
            (unit or ""),
            (phase or ""),
            (location or ""),
            "" if value is None else value,
        ])

    csv_stream.seek(0)
    filename = f"charging_session_{session_id}.csv"

    response = Response(csv_stream.getvalue(), mimetype='text/csv')
    response.headers['Content-Disposition'] = f'attachment; filename="{filename}"'
    return response


@app.route('/cf/bhi_charging_sessions/<int:session_id>/analysis/download_soc.csv')
def bhi_charging_session_download_soc_csv(session_id: int):
    session = _fetch_charging_session_by_id(session_id)
    if not session:
        abort(404)

    vehicle = _fetch_vehicle_by_id_tag(session.get("id_tag"), session.get("vehicle_id"))
    detail_rows = _fetch_charging_session_detail_rows(session, vehicle)

    samples = []
    for row in detail_rows:
        for sample in _extract_meter_samples(row):
            timestamp = sample.get("timestamp_local")
            if isinstance(timestamp, datetime.datetime):
                samples.append(sample)

    def _sample_sort_key(entry):
        timestamp = entry.get("timestamp_local")
        measurand = (entry.get("measurand") or "").strip().lower()
        priority = 0 if measurand == "energy.active.import.register" else 1
        return timestamp, priority

    samples.sort(key=_sample_sort_key)

    entries = []
    last_energy_wh = None
    energy_origin_wh = None

    for sample in samples:
        measurand = (sample.get("measurand") or "").strip().lower()
        timestamp = sample.get("timestamp_local")
        if not isinstance(timestamp, datetime.datetime):
            continue

        if measurand == "energy.active.import.register":
            energy_wh = _convert_energy_to_wh(sample.get("value"), sample.get("unit"))
            if energy_wh is None:
                continue

            if energy_origin_wh is None:
                energy_origin_wh = energy_wh

            relative_energy = energy_wh - energy_origin_wh
            if relative_energy < 0:
                relative_energy = 0.0

            last_energy_wh = relative_energy
            continue

        if measurand != "soc":
            continue

        soc_value = sample.get("value")
        if soc_value is None:
            continue

        entries.append(
            (
                timestamp.isoformat(),
                float(soc_value),
                "" if last_energy_wh is None else last_energy_wh,
            )
        )

    csv_stream = StringIO()
    writer = csv.writer(csv_stream)
    writer.writerow(["timestamp", "soc_percent", "energy_charged_wh"])

    for timestamp_value, soc_value, energy_value in entries:
        writer.writerow([timestamp_value, soc_value, energy_value])

    csv_stream.seek(0)
    filename = f"charging_session_{session_id}_soc.csv"

    response = Response(csv_stream.getvalue(), mimetype='text/csv')
    response.headers['Content-Disposition'] = f'attachment; filename="{filename}"'
    return response


@app.route('/cf/bhi_unassigned_sessions')
def bhi_unassigned_sessions():
    filter_value = request.args.get('session_filter', 'unassigned').lower()
    valid_filters = {'all', 'unassigned', 'assigned'}
    if filter_value not in valid_filters:
        filter_value = 'unassigned'

    sessions = _fetch_wallbox_sessions()

    if filter_value == 'unassigned':
        sessions = [session for session in sessions if not session.get('vehicle_assigned')]
    elif filter_value == 'assigned':
        sessions = [session for session in sessions if session.get('vehicle_assigned')]

    vehicles = _fetch_vehicle_select_options()
    return render_template(
        'bhi_unassigned_sessions.html',
        sessions=sessions,
        vehicles=vehicles,
        session_filter=filter_value,
        aside='repo_diag',
    )


@app.post('/cf/bhi_charging_sessions/<int:session_id>/assign_vehicle')
def bhi_assign_vehicle(session_id: int):
    vehicle_id_raw = request.form.get('vehicle_id', '')
    if vehicle_id_raw is not None:
        vehicle_id_raw = vehicle_id_raw.strip()

    vehicle_id = None
    if vehicle_id_raw:
        try:
            vehicle_id = int(vehicle_id_raw)
        except ValueError:
            abort(400)

        vehicle = _get_vehicle(vehicle_id)
        if not vehicle:
            abort(400)

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "UPDATE op_charging_sessions SET vehicle_id=%s WHERE id=%s",
                (vehicle_id, session_id),
            )
        conn.commit()
    finally:
        conn.close()

    redirect_target = request.referrer
    if redirect_target:
        return redirect(redirect_target)

    return redirect(url_for('bhi_unassigned_sessions'))


@app.route('/cf/bhi_ocpp_solution')
def bhi_ocpp_solution():
    """Render the integration instructions for the OCPP broker solution."""

    return render_template(
        'bhi_ocpp_solution.html',
        aside='repo_solution',
    )


@app.route('/cf/bhi_fleet_report')
def bhi_fleet_report():
    vehicles = _fetch_fleet_report_rows()
    daily_sessions = _fetch_daily_charging_session_counts()

    vehicle_count = len(vehicles)
    total_sessions = sum(row.get('session_count', 0) for row in vehicles)
    fleet_health_values = []
    for row in vehicles:
        normalized = _normalize_percentage(row.get('charge_factor'))
        if normalized is not None:
            fleet_health_values.append(normalized)
    fleet_health = (
        sum(fleet_health_values) / len(fleet_health_values)
        if fleet_health_values
        else None
    )
    chargepoint_count = _count_chargepoints()
    unassigned_recent_count = _count_recent_unassigned_sessions(days=3)

    charge_factors = []
    for row in vehicles:
        value = row.get('charge_factor')
        if isinstance(value, Decimal):
            charge_factors.append(float(value))
        elif isinstance(value, (int, float)):
            charge_factors.append(float(value))

    fleet_health = None
    if charge_factors:
        fleet_health = round(sum(charge_factors) / len(charge_factors), 1)

    infrastructure_summary, _, _ = _compute_infrastructure_overview(include_chargepoint_rows=False)

    summary = {
        'vehicle_count': vehicle_count,
        'total_sessions': total_sessions,
        'chargepoint_count': chargepoint_count,
        'unassigned_recent_count': unassigned_recent_count,
        'fleet_health': fleet_health,
        'infrastructure_quality_index': infrastructure_summary.get('infrastructure_quality_index'),
    }

    return render_template(
        'bhi_fleet_report.html',
        vehicles=vehicles,
        daily_sessions=daily_sessions,
        summary=summary,
        aside='fleetReport',
    )


@app.route('/cf/bhi_vehicle_leaderboard')
def bhi_vehicle_leaderboard():
    vehicles = _fetch_fleet_report_rows()

    def sort_key(vehicle):
        charge_factor = vehicle.get('charge_factor')
        if charge_factor is None:
            return (1, 0, (vehicle.get('name') or '').lower())
        return (0, -float(charge_factor), (vehicle.get('name') or '').lower())

    sorted_vehicles = sorted(vehicles, key=sort_key)

    return render_template(
        'bhi_vehicle_leaderboard.html',
        vehicles=sorted_vehicles,
        aside='fleetReport',
    )


@app.route('/cf/bhi_fleet_report/<int:vehicle_id>')
def bhi_fleet_report_details(vehicle_id: int):
    vehicle = _get_vehicle(vehicle_id)
    if not vehicle:
        abort(404)

    sessions = _fetch_vehicle_charging_sessions(
        vehicle.get('id_tag'),
        vehicle.get('vehicle_id'),
    )

    default_charge_factor = _normalize_percentage(vehicle.get('charge_factor'))
    default_confidence = _normalize_percentage(vehicle.get('confidence'))

    best_session = max(
        (session for session in sessions if session.get('confidence') is not None),
        key=lambda session: session['confidence'],
        default=None,
    )

    if best_session is not None:
        vehicle['confidence'] = best_session.get('confidence')
        vehicle['charge_factor'] = best_session.get('charge_factor')
    else:
        vehicle['confidence'] = default_confidence
        vehicle['charge_factor'] = default_charge_factor

    return render_template(
        'bhi_fleet_report_details.html',
        vehicle=vehicle,
        sessions=sessions,
        aside='fleetReport',
    )


@app.post('/cf/bhi_vehicle_list/delete/<int:vehicle_id>')
def bhi_vehicle_delete(vehicle_id):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                "DELETE FROM bhi_vehicle_list WHERE vehicle_id=%s",
                (vehicle_id,)
            )
        conn.commit()
    finally:
        conn.close()
    return redirect(url_for('bhi_vehicle_list'))


def _fetch_latest_vehicle_clouddata(limit: int = 200):
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT d.id, d.vin, d.json_payload, d.ts, l.name
                FROM bhi_vehicle_data AS d
                LEFT JOIN bhi_vehicle_list AS l ON d.vin = l.vin
                ORDER BY d.ts DESC
                LIMIT %s
                """,
                (limit,),
            )
            rows = cur.fetchall()
            for row in rows:
                payload = row.get("json_payload")
                if isinstance(payload, (bytes, bytearray)):
                    row["json_payload"] = payload.decode("utf-8", "replace")
            return rows
    finally:
        conn.close()

def current_messages_table(now: datetime.datetime | None = None) -> str:
    """Return the name of the monthly op_messages table for the provided date."""

    if now is None:
        now = datetime.datetime.utcnow()
    return f"op_messages_{now.strftime('%y%m')}"

def _fetch_redirects():
    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute(
                """
                SELECT source_url, last_connected
                FROM op_redirects
                ORDER BY source_url ASC
                """
            )
            return cur.fetchall()
    finally:
        conn.close()


def _decode_bytes_in_rows(rows):
    for row in rows:
        for key, value in list(row.items()):
            if isinstance(value, (bytes, bytearray)):
                row[key] = value.decode("utf-8", "replace")


@app.route('/cf/bhi_all_chargepoints')
@app.route('/bhi_all_chargepoints')
def bhi_all_chargepoints():
    rows = _fetch_redirects()
    for row in rows:
        source = row.get('source_url', '') or ''
        source = source.rstrip('/')
        row['station_label'] = source.split('/')[-1] if source else ''
    return render_template(
        'bhi_all_chargepoints.html',
        rows=rows,
        aside='repo_diag',
    )


def _find_filter_column(columns):
    candidates = [
        'station_id',
        'station',
        'source_url',
        'source',
        'stationId',
        'chargepoint',
        'charge_point',
        'cp_id',
        'identity',
    ]
    for candidate in candidates:
        if candidate in columns:
            return candidate
    return None


def _find_order_column(columns, fallback):
    if 'id' in columns:
        return 'id'
    for candidate in ['created_at', 'ts', 'timestamp', 'datetime', 'event_time']:
        if candidate in columns:
            return candidate
    return fallback


@app.route('/cf/bhi_chargepoint_messages')
@app.route('/bhi_chargepoint_messages')
def bhi_chargepoint_messages():
    source_url = request.args.get('source_url', '').strip()
    if not source_url:
        abort(400, 'source_url parameter is required')

    station_label = (source_url.rstrip('/') or source_url).split('/')[-1]
    normalized = normalize_station_id(source_url)
    table_name = current_messages_table()

    rows = []
    columns = []
    filter_column = None
    order_column = None
    table_exists = False
    selected_value = None
    error = None

    conn = get_db_conn()
    try:
        with conn.cursor() as cur:
            cur.execute("SHOW TABLES LIKE %s", (table_name,))
            table_exists = cur.fetchone() is not None
            if not table_exists:
                error = f"Tabelle {table_name} wurde nicht gefunden."
            else:
                cur.execute(f"SHOW COLUMNS FROM `{table_name}`")
                column_info = cur.fetchall()
                columns = [col['Field'] for col in column_info]
                filter_column = _find_filter_column(columns)
                if not filter_column:
                    error = (
                        f"Keine geeignete Spalte zum Filtern in {table_name} gefunden."
                    )
                else:
                    order_column = _find_order_column(columns, filter_column)
                    query = (
                        f"SELECT * FROM `{table_name}` "
                        f"WHERE `{filter_column}`=%s ORDER BY `{order_column}` DESC LIMIT 50"
                    )
                    search_values = []
                    if normalized:
                        search_values.append(normalized)
                    if source_url not in search_values:
                        search_values.append(source_url)
                    alt = '/' + normalized if normalized and not normalized.startswith('/') else normalized
                    if alt and alt not in search_values:
                        search_values.append(alt)

                    for candidate in search_values:
                        cur.execute(query, (candidate,))
                        rows = cur.fetchall()
                        if rows:
                            selected_value = candidate
                            break
                    if not rows:
                        error = (
                            "Keine Nachrichten f\u00fcr diesen Ladepunkt im aktuellen Monat gefunden."
                        )
    finally:
        conn.close()

    _decode_bytes_in_rows(rows)

    payload_fields = [
        col for col in columns if any(key in col.lower() for key in ['payload', 'message', 'body'])
    ]

    display_fields = [
        col
        for col in [
            'id',
            'created_at',
            'ts',
            'timestamp',
            'direction',
            'message_type',
            'action',
            'uniqueId',
        ]
        if col in columns
    ]
    if not display_fields:
        display_fields = columns

    return render_template(
        'bhi_chargepoint_messages.html',
        source_url=source_url,
        station_label=station_label,
        rows=rows,
        table_name=table_name,
        table_exists=table_exists,
        filter_column=filter_column,
        order_column=order_column,
        display_fields=display_fields,
        payload_fields=payload_fields,
        selected_value=selected_value,
        error=error,
        aside='repo_diag',
    )


@app.route('/cf/bhi_vehicle_clouddata')
def bhi_vehicle_clouddata():
    records = _fetch_latest_vehicle_clouddata()
    return render_template(
        'bhi_vehicle_clouddata.html',
        records=records,
        aside='vehicleMenu',
    )


@app.route('/cf/bhi_data_flow_diagram')
def data_flow_diagram():
    """Display the data flow diagram inside the dashboard frame."""

    return render_template('bhi_data_flow_diagram.html', aside='vehicleMenu')


@app.route('/cf/op_dashboard')
def dashboard():
    # 1) Verbindung zur DB aufbauen (DictCursor, damit fetchall() Dicts liefert)
    conn = get_db_conn()
    cursor = conn.cursor()

    # 2) Redirects laden
    cursor.execute('SELECT * FROM op_redirects ORDER BY source_url ASC')
    redirects = cursor.fetchall()

    # 3) Targets mit Kurzname laden
    cursor.execute('SELECT ws_url, short_name FROM op_targets')
    target_rows = cursor.fetchall()
    target_dict = {'/'.join(t['ws_url'].split('/')[:4]): t['short_name'] for t in target_rows}

    # 4) Für jeden Redirect die Basis-URL extrahieren und ggf. durch Kurzname ersetzen
    for r in redirects:
        base = '/'.join(r['ws_url'].split('/')[:4])
        r['ws_url_short'] = target_dict.get(base, base)

    cursor.close()
    conn.close()

    # 5) Connected-Durations holen
    try:
        r = requests.get(CONNECTED_ENDPOINT, timeout=5)
        connected = r.json().get('connected', [])
    except Exception:
        connected = []

    durations = {}
    session_reconnects = {}
    connection_quality = {}
    for entry in connected:
        sid = entry.get('station_id')
        secs = entry.get('duration_seconds')
        durations[sid] = (
            str(datetime.timedelta(seconds=int(secs)))
            if secs is not None
            else "00:00:00"
        )
        session_reconnects[sid] = entry.get("reconnectCounter")
        connection_quality[sid] = entry.get("connectionQuality")

    # 6) Stats holen
    try:
        resp = requests.get(STATS_ENDPOINT, timeout=3)
        text = resp.text
        m_e = re.search(r'entries\s*:\s*(\d+)', text)
        m_c = re.search(r'connected\s*:\s*(\d+)', text)
        entries = int(m_e.group(1)) if m_e else 0
        connected_evse = int(m_c.group(1)) if m_c else 0
    except Exception:
        entries = 0
        connected_evse = 0

    PROXY_BASE_WS = PROXY_BASE_URL.replace("http", "ws")

    # 7) Render dashboard template
    template_context = {
        "rows": redirects,
        "PROXY_BASE_WS": PROXY_BASE_WS,
        "durations": durations,
        "reconnects": session_reconnects,
        "qualities": connection_quality,
        "entries": entries,
        "connected_evse": connected_evse,
    }

    return render_template("bhi_dashboard.html", **template_context)


def main() -> None:
    """Start the dashboard application."""
    # Prefer dashboard port from config.json, fall back to default 5000
    port = int(_config.get("dashboard_port", 5000))
    ssl_context = _build_ssl_context()

    try:
        start_weather_polling()
    except Exception:
        logging.exception("Unable to start weather polling thread during startup")

    if ssl_context:
        logging.info(
            "Starting dashboard with HTTPS enabled on port %s", port
        )
    else:
        logging.info(
            "Starting dashboard without HTTPS on port %s", port
        )

    app.run(
        host="0.0.0.0",
        port=port,
        debug=True,
        use_reloader=True,
        ssl_context=ssl_context,
    )


if __name__ == "__main__":
    main()
