#!/usr/bin/env python3
from __future__ import annotations

import importlib.util
import json
import os
import re
import sys
from collections import Counter
from datetime import date, datetime, timedelta
from pathlib import Path
from typing import Any
from urllib.parse import urljoin

from bs4 import BeautifulSoup
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait

THIS_FILE = Path(__file__).resolve()
PULLED_INFO_ROOT = THIS_FILE.parents[1]
ROOT = THIS_FILE.parents[2]
RW_HELPER_PATH = PULLED_INFO_ROOT / "Boot_Features" / "RW_Scrapers" / "RW_Site_Scraper-Orders_Page.py"
OUTPUT_PATH = THIS_FILE.with_name("incoming_stock.json")

ORDER_HISTORY_URL = "https://order.redwingshoes.com/on/demandware.store/Sites-B2B-Site/default/Order-History?orderFilter=6"
ORDER_HISTORY_LABEL = "Last Six Months"
TRACKING_URL_TEMPLATE = "https://www.ups.com/track?loc=null&tracknum={tracking}&requester=ST/trackdetails"

RUN_HEADLESS = os.environ.get("RW_HEADLESS", "1").strip().lower() in {"1", "true", "yes", "y", "on"}
LOOKBACK_DAYS = max(7, int(os.environ.get("RW_INCOMING_LOOKBACK_DAYS", "45") or "45"))
MAX_ORDERS = max(0, int(os.environ.get("RW_INCOMING_MAX_ORDERS", "0") or "0"))
UPS_WAIT_SECONDS = max(6, int(os.environ.get("RW_UPS_WAIT_SECONDS", "30") or "30"))

MONTHS = {
    "january": 1,
    "february": 2,
    "march": 3,
    "april": 4,
    "may": 5,
    "june": 6,
    "july": 7,
    "august": 8,
    "september": 9,
    "october": 10,
    "november": 11,
    "december": 12,
}


def load_rw_helper():
    module_name = "_rw_order_site_helper"
    if module_name in sys.modules:
        return sys.modules[module_name]
    spec = importlib.util.spec_from_file_location(module_name, RW_HELPER_PATH)
    if spec is None or spec.loader is None:
        raise ImportError(f"Could not load Red Wing helper from {RW_HELPER_PATH}")
    module = importlib.util.module_from_spec(spec)
    sys.modules[module_name] = module
    spec.loader.exec_module(module)
    return module


def clean_text(value: object) -> str:
    text = re.sub(r"\s+", " ", str(value or "")).strip()
    return re.sub(r"\s+([,.;:])", r"\1", text)


def text_lines(value: object) -> list[str]:
    lines = []
    for raw_line in str(value or "").splitlines():
        line = clean_text(raw_line)
        if line:
            lines.append(line)
    return lines


def parse_money(value: object) -> float:
    raw = clean_text(value)
    if not raw:
        return 0.0
    raw = raw.replace("$", "").replace(",", "")
    try:
        return float(raw)
    except Exception:
        return 0.0


def parse_quantity(value: object) -> float:
    raw = clean_text(value)
    if not raw:
        return 0.0
    raw = raw.replace(",", "")
    try:
        return float(raw)
    except Exception:
        return 0.0


def format_quantity_value(value: object) -> str:
    qty = parse_quantity(value)
    if float(qty).is_integer():
        return str(int(qty))
    return f"{qty:g}"


def parse_iso_date(value: object) -> str:
    raw = clean_text(value)
    if not raw:
        return ""
    for fmt in ("%Y-%m-%d", "%m/%d/%Y"):
        try:
            return datetime.strptime(raw, fmt).date().isoformat()
        except Exception:
            continue
    return ""


def parse_history_datetime(value: object) -> str:
    raw = clean_text(value)
    if not raw:
        return ""
    for fmt in ("%Y-%m-%d %H:%M", "%Y-%m-%d %H:%M:%S"):
        try:
            return datetime.strptime(raw, fmt).isoformat(timespec="minutes")
        except Exception:
            continue
    return ""


def parse_human_month_day(value: object, reference_date: date | None = None) -> str:
    raw = clean_text(value)
    if not raw:
        return ""
    match = re.search(
        r"(January|February|March|April|May|June|July|August|September|October|November|December)\s+(\d{1,2})",
        raw,
        flags=re.IGNORECASE,
    )
    if not match:
        return ""
    month = MONTHS[match.group(1).lower()]
    day = int(match.group(2))
    ref = reference_date or datetime.now().date()
    candidates: list[tuple[int, date]] = []
    for year in (ref.year - 1, ref.year, ref.year + 1):
        try:
            candidate = date(year, month, day)
        except ValueError:
            continue
        candidates.append((abs((candidate - ref).days), candidate))
    if not candidates:
        return ""
    candidates.sort(key=lambda item: (item[0], item[1]))
    return candidates[0][1].isoformat()


def format_date_for_display(value: object) -> str:
    raw = clean_text(value)
    if not raw:
        return ""
    for fmt in ("%Y-%m-%d", "%Y-%m-%dT%H:%M", "%Y-%m-%dT%H:%M:%S"):
        try:
            parsed = datetime.strptime(raw, fmt)
            return parsed.strftime("%b %d, %Y")
        except Exception:
            continue
    return raw


def clone_without_selector(node, selector: str) -> str:
    clone = BeautifulSoup(str(node), "html.parser")
    for match in clone.select(selector):
        match.extract()
    return "\n".join(text_lines(clone.get_text("\n", strip=True)))


def find_heading(soup: BeautifulSoup, label: str):
    target = clean_text(label).rstrip(":").lower()
    for heading in soup.select(".c-order-details__heading"):
        heading_text = clean_text(heading.get_text(" ", strip=True)).rstrip(":").lower()
        if heading_text == target:
            return heading
    return None


def parse_order_history(html: str) -> list[dict[str, Any]]:
    soup = BeautifulSoup(html, "html.parser")
    rows: list[dict[str, Any]] = []
    for tr in soup.select("table.js-order-history-table tbody tr"):
        cells = tr.find_all("td")
        if len(cells) < 4:
            continue
        link = cells[0].find("a", href=True)
        if link is None:
            continue
        order_number = clean_text(link.get_text(" ", strip=True))
        if not order_number:
            continue
        hidden_dt = cells[2].find("span", class_="d-none")
        hidden_dt_text = clean_text(hidden_dt.get_text(" ", strip=True)) if hidden_dt else ""
        order_date = parse_iso_date(hidden_dt_text.split(" ")[0] if hidden_dt_text else cells[2].get_text(" ", strip=True))
        rows.append(
            {
                "orderNumber": order_number,
                "detailUrl": urljoin(ORDER_HISTORY_URL, link["href"]),
                "poNumber": clean_text(cells[1].get_text(" ", strip=True)),
                "orderDate": order_date,
                "orderDateTime": parse_history_datetime(hidden_dt_text),
                "orderTotalText": clean_text(cells[3].get_text(" ", strip=True)),
                "orderTotal": parse_money(cells[3].get_text(" ", strip=True)),
            }
        )
    return rows


def parse_pricing(card: BeautifulSoup) -> tuple[float, str, float, str]:
    quantity = 0.0
    quantity_text = ""
    total = 0.0
    total_text = ""
    for block in card.select(".c-card-product-info__item-pricing .item-attributes"):
        label = clean_text(block.select_one(".line-item-price-info").get_text(" ", strip=True) if block.select_one(".line-item-price-info") else "").lower()
        value = clean_text(block.select_one(".line-item-total-price-amount").get_text(" ", strip=True) if block.select_one(".line-item-total-price-amount") else "")
        if label == "quantity":
            quantity = parse_quantity(value)
            quantity_text = value
        elif label == "total":
            total = parse_money(value)
            total_text = value
    return quantity, quantity_text, total, total_text


def parse_allocations(card: BeautifulSoup) -> list[dict[str, Any]]:
    table = card.select_one("table.c-order-entry-table")
    if table is None:
        return []
    header_rows = table.select("thead tr")
    if not header_rows:
        return []
    last_header_cells = header_rows[-1].find_all(["th", "td"])
    last_header_text = [clean_text(cell.get_text(" ", strip=True)) for cell in last_header_cells]
    if not last_header_text:
        return []
    column_headers = last_header_text[1:] if len(last_header_text) > 1 else []
    allocations: list[dict[str, Any]] = []
    for row in table.select("tbody tr"):
        row_label = clean_text(row.find("th").get_text(" ", strip=True) if row.find("th") else "")
        cells = row.find_all("td")
        if not cells:
            continue
        column_headers = last_header_text[-len(cells):] if len(last_header_text) >= len(cells) else last_header_text
        for index, cell in enumerate(cells):
            qty = parse_quantity(cell.get_text(" ", strip=True))
            if qty <= 0:
                continue
            size = column_headers[index] if index < len(column_headers) else ""
            label_parts = [part for part in (size, row_label) if part and part.lower() not in {"size", "width"}]
            allocations.append(
                {
                    "size": size,
                    "width": row_label,
                    "quantity": qty,
                    "label": " ".join(label_parts).strip() or size or row_label or "Qty",
                }
            )
    return allocations


def summarize_allocations(allocations: list[dict[str, Any]]) -> list[str]:
    counter: Counter[str] = Counter()
    for allocation in allocations:
        label = clean_text(allocation.get("label"))
        if not label:
            continue
        counter[label] += parse_quantity(allocation.get("quantity"))
    parts = [f"{label} x{format_quantity_value(qty)}" for label, qty in sorted(counter.items(), key=lambda item: (item[0]))]
    return parts


def parse_item_card(card: BeautifulSoup) -> dict[str, Any]:
    header = clean_text(card.select_one(".line-item-name").get_text(" ", strip=True) if card.select_one(".line-item-name") else "")
    dt = clean_text(card.select_one("dl dt").get_text(" ", strip=True) if card.select_one("dl dt") else "")
    style = clean_text(card.select_one("dl dd").get_text(" ", strip=True) if card.select_one("dl dd") else "")
    brand = clean_text(re.sub(r"\s*-\s*Style:\s*$", "", dt, flags=re.IGNORECASE))
    image = card.select_one("img.product-image")
    quantity, quantity_text, total, total_text = parse_pricing(card)
    allocations = parse_allocations(card)
    return {
        "style": style,
        "name": header,
        "brand": brand,
        "image": image.get("src", "").strip() if image else "",
        "quantity": quantity,
        "quantityText": quantity_text,
        "total": total,
        "totalText": total_text,
        "allocations": allocations,
        "allocationSummary": summarize_allocations(allocations),
    }


def parse_order_detail(html: str, history_row: dict[str, Any]) -> dict[str, Any]:
    soup = BeautifulSoup(html, "html.parser")
    order_number = clean_text(
        re.sub(r"^ORDER NUMBER:\s*", "", soup.select_one(".c-order-details__order-number").get_text(" ", strip=True), flags=re.IGNORECASE)
        if soup.select_one(".c-order-details__order-number")
        else history_row.get("orderNumber", "")
    )
    order_date = clean_text(
        re.sub(r"^ORDER DATE:\s*", "", soup.select_one(".c-order-details__order-date").get_text(" ", strip=True), flags=re.IGNORECASE)
        if soup.select_one(".c-order-details__order-date")
        else history_row.get("orderDate", "")
    )

    shipping_address = ""
    billing_address = ""
    payment_method = ""
    purchase_order = clean_text(history_row.get("poNumber"))

    shipping_heading = find_heading(soup, "Shipping Address")
    if shipping_heading and shipping_heading.parent:
        shipping_address = clone_without_selector(shipping_heading.parent, ".c-order-details__heading")

    billing_heading = find_heading(soup, "Billing Address")
    if billing_heading and billing_heading.parent:
        billing_address = clone_without_selector(billing_heading.parent, ".c-order-details__heading")

    payment_heading = find_heading(soup, "Payment Method")
    if payment_heading and payment_heading.parent:
        payment_lines = text_lines(clone_without_selector(payment_heading.parent, ".c-order-details__heading"))
        if payment_lines:
            payment_method = payment_lines[0]
        for index, line in enumerate(payment_lines):
            if line.lower().startswith("purchase order:"):
                inline_value = clean_text(line.split(":", 1)[1] if ":" in line else "")
                if inline_value:
                    purchase_order = inline_value
                elif index + 1 < len(payment_lines):
                    purchase_order = payment_lines[index + 1]
                break

    shipments: list[dict[str, Any]] = []
    shipping_status_heading = find_heading(soup, "Shipping Status")
    if shipping_status_heading:
        shipping_table = shipping_status_heading.find_next("table")
        if shipping_table:
            for row in shipping_table.select("tbody tr"):
                cells = row.find_all("td")
                if not cells:
                    continue
                left_text = clean_text(cells[0].get_text(" ", strip=True))
                right_cell = cells[1] if len(cells) > 1 else None
                tracking_link = right_cell.find("a", href=True) if right_cell else None
                tracking_number = clean_text(tracking_link.get_text(" ", strip=True) if tracking_link else "")
                tracking_url = urljoin(history_row["detailUrl"], tracking_link["href"]) if tracking_link else ""
                shipment_id_match = re.search(r"Shipment ID:\s*([A-Za-z0-9-]+)", left_text, flags=re.IGNORECASE)
                shipped_on_match = re.search(r"Shipped on:\s*(\d{4}-\d{2}-\d{2})", left_text, flags=re.IGNORECASE)
                shipments.append(
                    {
                        "shipmentId": shipment_id_match.group(1) if shipment_id_match else "",
                        "shippedOn": shipped_on_match.group(1) if shipped_on_match else "",
                        "trackingNumber": tracking_number,
                        "trackingUrl": tracking_url,
                        "carrier": "UPS" if "ups.com" in tracking_url.lower() else "",
                        "tracking": None,
                    }
                )

    items = [parse_item_card(card) for card in soup.select(".c-card-product-info.product-info.card")]
    item_count = sum(parse_quantity(item.get("quantity")) for item in items)

    return {
        "orderNumber": order_number or history_row["orderNumber"],
        "detailUrl": history_row["detailUrl"],
        "poNumber": purchase_order,
        "orderDate": parse_iso_date(order_date) or history_row.get("orderDate", ""),
        "orderDateTime": history_row.get("orderDateTime", ""),
        "orderTotal": float(history_row.get("orderTotal", 0.0) or 0.0),
        "orderTotalText": clean_text(history_row.get("orderTotalText")),
        "paymentMethod": payment_method,
        "shippingAddress": shipping_address,
        "billingAddress": billing_address,
        "shipments": shipments,
        "items": items,
        "unitCount": item_count,
    }


def wait_for_ups_page(driver, tracking_number: str) -> None:
    upper = tracking_number.upper()
    WebDriverWait(driver, UPS_WAIT_SECONDS).until(
        lambda browser: upper == clean_text(browser.find_element(By.CSS_SELECTOR, "#stApp_trackingNumber").text)
    )
    WebDriverWait(driver, UPS_WAIT_SECONDS).until(
        lambda browser: "your shipment" in browser.find_element(By.TAG_NAME, "body").text.lower()
    )


def parse_ups_tracking_page(driver, helper, tracking_number: str) -> dict[str, Any]:
    tracking_url = TRACKING_URL_TEMPLATE.format(tracking=tracking_number)
    helper.safe_get(driver, tracking_url, timeout=60, settle=1.5, max_tries=1)
    wait_for_ups_page(driver, tracking_number)
    soup = BeautifulSoup(driver.page_source, "html.parser")

    status = clean_text(soup.select_one("#stApp_txtPackageStatus").get_text(" ", strip=True) if soup.select_one("#stApp_txtPackageStatus") else "")
    eta_label = clean_text(soup.select_one("#st_App_EstDelLabel").get_text(" ", strip=True) if soup.select_one("#st_App_EstDelLabel") else "")
    eta_text = clean_text(soup.select_one("#st_App_PkgStsTimeDayMonthNum").get_text(" ", strip=True) if soup.select_one("#st_App_PkgStsTimeDayMonthNum") else "")
    ship_to = clean_text(
        " ".join(
            part
            for part in (
                clean_text(soup.select_one("#stApp_txtAddress").get_text(" ", strip=True) if soup.select_one("#stApp_txtAddress") else ""),
                clean_text(soup.select_one("#stApp_txtCountry").get_text(" ", strip=True) if soup.select_one("#stApp_txtCountry") else ""),
            )
            if part
        )
    )

    next_update = ""
    for paragraph in soup.select("message-banners p"):
        text = clean_text(paragraph.get_text(" ", strip=True))
        if text:
            next_update = text
            break

    events: list[dict[str, Any]] = []
    for row in soup.select("#stApp_shpmtProgress tr"):
        event_type = clean_text(row.select_one("[id^='stApp_rtxtEvent_']").get_text(" ", strip=True) if row.select_one("[id^='stApp_rtxtEvent_']") else "")
        td = row.find("td")
        if td is None:
            continue
        td_clone = BeautifulSoup(str(td), "html.parser")
        for paragraph in td_clone.find_all("p"):
            paragraph.extract()
        name = clean_text(td_clone.get_text(" ", strip=True))
        location = clean_text(td.select_one("[id^='stApp_milestoneLocation']").get_text(" ", strip=True) if td.select_one("[id^='stApp_milestoneLocation']") else "")
        date_time = clean_text(td.select_one("[id^='stApp_milestoneDateTime']").get_text(" ", strip=True) if td.select_one("[id^='stApp_milestoneDateTime']") else "")
        events.append(
            {
                "eventType": event_type,
                "name": name,
                "location": location,
                "dateTime": date_time,
            }
        )

    reference_date = None
    for event in events:
        match = re.search(r"(\d{2}/\d{2}/\d{4})", event.get("dateTime", ""))
        if match:
            try:
                reference_date = datetime.strptime(match.group(1), "%m/%d/%Y").date()
                break
            except Exception:
                continue

    eta_date = parse_human_month_day(eta_text, reference_date)
    is_delivered = status.lower().startswith("delivered")

    return {
        "trackingNumber": tracking_number.upper(),
        "trackingUrl": tracking_url,
        "status": status,
        "isDelivered": is_delivered,
        "summaryLabel": eta_label,
        "summaryText": eta_text,
        "etaDate": eta_date,
        "etaWindow": clean_text(eta_text.split(",", 1)[1] if "," in eta_text else ""),
        "nextUpdate": next_update,
        "shipTo": ship_to,
        "pieceInfo": clean_text(soup.select_one("#stApp_additionalPackages").get_text(" ", strip=True) if soup.select_one("#stApp_additionalPackages") else ""),
        "events": events,
    }


def classify_order(order: dict[str, Any]) -> tuple[str, bool, str, str]:
    shipments = order.get("shipments", [])
    tracking_shipments = [shipment for shipment in shipments if clean_text(shipment.get("trackingNumber"))]
    tracking_data = [shipment.get("tracking") for shipment in tracking_shipments if isinstance(shipment.get("tracking"), dict)]
    delivered = [item for item in tracking_data if item.get("isDelivered")]
    open_tracking = [item for item in tracking_data if not item.get("isDelivered")]
    missing_tracking = any(not clean_text(shipment.get("trackingNumber")) for shipment in shipments)
    tracking_missing_data = any(clean_text(shipment.get("trackingNumber")) and not shipment.get("tracking") for shipment in shipments)

    eta_candidates = [item for item in open_tracking if clean_text(item.get("etaDate"))]
    eta_candidates.sort(key=lambda item: clean_text(item.get("etaDate")))
    order_eta_date = clean_text(eta_candidates[0].get("etaDate")) if eta_candidates else ""
    order_eta_label = ""
    if eta_candidates:
        label = clean_text(eta_candidates[0].get("summaryLabel"))
        text = clean_text(eta_candidates[0].get("summaryText"))
        order_eta_label = f"{label}: {text}".strip(": ")

    if not shipments:
        return "Pending shipment", True, order_eta_date, "Awaiting Red Wing shipment"
    if tracking_missing_data and not tracking_data:
        return "Tracking update unavailable", True, order_eta_date, order_eta_label or "Carrier update unavailable"
    if missing_tracking and open_tracking:
        return "Partially shipped", True, order_eta_date, order_eta_label or "Pending tracker assignment"
    if missing_tracking:
        return "Tracking pending", True, order_eta_date, order_eta_label or "Pending tracker assignment"
    if open_tracking and delivered:
        return "Partially delivered", True, order_eta_date, order_eta_label or "Some packages still in transit"
    if open_tracking:
        return "In transit", True, order_eta_date, order_eta_label or "Carrier in transit"
    if tracking_data and len(delivered) == len(tracking_shipments):
        return "Delivered", False, "", ""
    return "Tracking pending", True, order_eta_date, order_eta_label or "Pending carrier update"


def aggregate_styles(orders: list[dict[str, Any]]) -> list[dict[str, Any]]:
    by_style: dict[str, dict[str, Any]] = {}
    for order in orders:
        order_number = clean_text(order.get("orderNumber"))
        order_date = clean_text(order.get("orderDate"))
        eta_date = clean_text(order.get("etaDate"))
        for item in order.get("items", []):
            key = clean_text(item.get("style")) or clean_text(item.get("name"))
            if not key:
                continue
            entry = by_style.setdefault(
                key,
                {
                    "style": clean_text(item.get("style")),
                    "name": clean_text(item.get("name")),
                    "brand": clean_text(item.get("brand")),
                    "quantity": 0.0,
                    "orders": set(),
                    "earliestOrderDate": "",
                    "etaDate": "",
                    "allocations": Counter(),
                },
            )
            entry["quantity"] += parse_quantity(item.get("quantity"))
            entry["orders"].add(order_number)
            if order_date and (not entry["earliestOrderDate"] or order_date < entry["earliestOrderDate"]):
                entry["earliestOrderDate"] = order_date
            if eta_date and (not entry["etaDate"] or eta_date < entry["etaDate"]):
                entry["etaDate"] = eta_date
            for allocation in item.get("allocations", []):
                label = clean_text(allocation.get("label"))
                if not label:
                    continue
                entry["allocations"][label] += parse_quantity(allocation.get("quantity"))

    out: list[dict[str, Any]] = []
    for key, entry in by_style.items():
        out.append(
            {
                "style": entry["style"] or key,
                "name": entry["name"],
                "brand": entry["brand"],
                "quantity": entry["quantity"],
                "orderCount": len(entry["orders"]),
                "orders": sorted(entry["orders"]),
                "earliestOrderDate": entry["earliestOrderDate"],
                "etaDate": entry["etaDate"],
                "allocationSummary": [
                    f"{label} x{format_quantity_value(qty)}"
                    for label, qty in sorted(entry["allocations"].items(), key=lambda item: item[0])
                ],
            }
        )
    out.sort(key=lambda item: (-float(item.get("quantity", 0.0)), clean_text(item.get("style"))))
    return out


def order_sort_key(order: dict[str, Any]) -> tuple[Any, ...]:
    eta_date = clean_text(order.get("etaDate"))
    order_date = clean_text(order.get("orderDate"))
    return (
        0 if eta_date else 1,
        eta_date or order_date or "9999-12-31",
        order_date or "9999-12-31",
        clean_text(order.get("orderNumber")),
    )


def save_payload(payload: dict[str, Any]) -> None:
    OUTPUT_PATH.parent.mkdir(parents=True, exist_ok=True)
    temp_path = OUTPUT_PATH.with_suffix(".json.tmp")
    temp_path.write_text(json.dumps(payload, indent=2), encoding="utf-8")
    temp_path.replace(OUTPUT_PATH)


def main() -> int:
    helper = load_rw_helper()
    started_at = datetime.now()
    cutoff_date = started_at.date() - timedelta(days=LOOKBACK_DAYS)
    errors: list[str] = []

    print(f"[incoming-stock] Loading order history from {ORDER_HISTORY_LABEL} ({ORDER_HISTORY_URL})")
    ck = helper.load_checkpoint()
    driver = helper.create_driver(ck, headless=RUN_HEADLESS, block_images=True)

    try:
        helper.ensure_logged_in(driver, ORDER_HISTORY_URL)
        helper.safe_get(driver, ORDER_HISTORY_URL)
        history_rows = parse_order_history(driver.page_source)
        if not history_rows:
            raise RuntimeError("No rows were found on the Red Wing order history page.")

        candidate_rows = [
            row
            for row in history_rows
            if not row.get("orderDate") or row["orderDate"] >= cutoff_date.isoformat()
        ]
        if MAX_ORDERS:
            candidate_rows = candidate_rows[:MAX_ORDERS]

        print(
            f"[incoming-stock] Found {len(history_rows)} history rows; "
            f"fetching {len(candidate_rows)} candidate orders from the last {LOOKBACK_DAYS} days."
        )

        detailed_orders: list[dict[str, Any]] = []
        for index, row in enumerate(candidate_rows, start=1):
            print(f"[incoming-stock] Order {index}/{len(candidate_rows)}: {row['orderNumber']}")
            try:
                helper.safe_get(driver, row["detailUrl"])
                detailed_orders.append(parse_order_detail(driver.page_source, row))
            except Exception as exc:
                errors.append(f"Order {row['orderNumber']}: {exc}")
                print(f"[incoming-stock] Failed {row['orderNumber']}: {exc}")

        tracking_numbers = sorted(
            {
                clean_text(shipment.get("trackingNumber")).upper()
                for order in detailed_orders
                for shipment in order.get("shipments", [])
                if clean_text(shipment.get("trackingNumber"))
            }
        )
        tracking_cache: dict[str, dict[str, Any] | None] = {}
        for index, tracking_number in enumerate(tracking_numbers, start=1):
            print(f"[incoming-stock] Tracking {index}/{len(tracking_numbers)}: {tracking_number}")
            try:
                tracking_cache[tracking_number] = parse_ups_tracking_page(driver, helper, tracking_number)
            except Exception as exc:
                errors.append(f"Tracking {tracking_number}: {exc}")
                tracking_cache[tracking_number] = None
                print(f"[incoming-stock] Failed tracking {tracking_number}: {exc}")

        incoming_orders: list[dict[str, Any]] = []
        delivered_count = 0
        for order in detailed_orders:
            for shipment in order.get("shipments", []):
                tracking_number = clean_text(shipment.get("trackingNumber")).upper()
                if tracking_number:
                    shipment["tracking"] = tracking_cache.get(tracking_number)
            status, incoming, eta_date, eta_label = classify_order(order)
            order["status"] = status
            order["incoming"] = incoming
            order["etaDate"] = eta_date
            order["etaLabel"] = eta_label
            order["orderDateDisplay"] = format_date_for_display(order.get("orderDate"))
            if incoming:
                incoming_orders.append(order)
            else:
                delivered_count += 1

        incoming_orders.sort(key=order_sort_key)
        style_summary = aggregate_styles(incoming_orders)
        incoming_units = sum(parse_quantity(order.get("unitCount")) for order in incoming_orders)
        tracked_shipments = sum(
            1
            for order in incoming_orders
            for shipment in order.get("shipments", [])
            if clean_text(shipment.get("trackingNumber"))
        )
        pending_shipments = 0
        for order in incoming_orders:
            shipments = order.get("shipments", [])
            if not shipments:
                pending_shipments += 1
                continue
            pending_shipments += sum(1 for shipment in shipments if not clean_text(shipment.get("trackingNumber")))

        payload = {
            "meta": {
                "generatedAt": datetime.now().isoformat(timespec="seconds"),
                "historyUrl": ORDER_HISTORY_URL,
                "historyLabel": ORDER_HISTORY_LABEL,
                "lookbackDays": LOOKBACK_DAYS,
                "historyRows": len(history_rows),
                "candidateOrders": len(candidate_rows),
                "deliveredRecentOrders": delivered_count,
                "sourceRoot": str(PULLED_INFO_ROOT),
                "errors": errors,
                "counts": {
                    "orders": len(incoming_orders),
                    "styles": len(style_summary),
                    "units": incoming_units,
                    "trackedShipments": tracked_shipments,
                    "pendingShipments": pending_shipments,
                },
            },
            "summary": {
                "styles": style_summary,
            },
            "orders": incoming_orders,
        }
        save_payload(payload)
        print(
            f"[incoming-stock] Saved {len(incoming_orders)} incoming orders, "
            f"{len(style_summary)} styles, {format_quantity_value(incoming_units)} units "
            f"to {OUTPUT_PATH}"
        )
        return 0
    finally:
        try:
            driver.quit()
        except Exception:
            pass


if __name__ == "__main__":
    raise SystemExit(main())
