"""Import Data - Leads endpoints."""
import io
import os
import uuid
from typing import Optional

from fastapi import APIRouter, Request, Query, UploadFile, File, Form, Header
from fastapi.responses import JSONResponse, Response
import sqlalchemy
from pydantic import BaseModel

from api.feature.import_data.db import get_cloudsql
from api.feature.import_data.parsing import parse_leads_file

router = APIRouter()

LEAD_SOURCES = ("erp", "website_newsletter", "facebook_leadgen", "name_card", "event_seminar")


def _property_id(header: Optional[str], query: Optional[str]) -> str:
    return (query or header or "").strip()


class LeadImportBody(BaseModel):
    property_id: str
    lead_source: str
    leads: list[dict]


@router.get("/")
async def list_leads(
    request: Request,
    property_id: Optional[str] = Query(None),
    limit: int = Query(20, le=100),
    offset: int = Query(0, ge=0),
    sort_by: str = Query("created_at"),
    order: str = Query("desc"),
    lead_source: Optional[str] = Query(None),
    x_property: Optional[str] = Header(None, alias="x-property"),
):
    """GET /api/v1/import-data/leads - list with pagination, filter, sort."""
    pid = _property_id(x_property, property_id)
    if not pid:
        return JSONResponse(
            status_code=400,
            content={"success": False, "message": "property_id (or x-property) required"},
        )

    csql = get_cloudsql()
    order_dir = "DESC" if (order or "desc").lower() == "desc" else "ASC"
    order_col = sort_by if sort_by in ("created_at", "full_name", "email", "lead_source") else "created_at"

    where = " property_id = :pid "
    params = {"pid": pid}
    if lead_source and lead_source in LEAD_SOURCES:
        where += " AND lead_source = :ls "
        params["ls"] = lead_source

    count_sql = f"SELECT COUNT(*) FROM leads WHERE {where}"
    with csql.pool.connect() as conn:
        total = conn.execute(sqlalchemy.text(count_sql), params).scalar() or 0
        list_sql = f"""
            SELECT id, full_name, email, phone, company, lead_source, note, created_at
            FROM leads WHERE {where}
            ORDER BY {order_col} {order_dir} LIMIT :lim OFFSET :off
        """
        params["lim"] = limit
        params["off"] = offset
        rows = conn.execute(sqlalchemy.text(list_sql), params).fetchall()

    data = [
        {
            "id": str(r[0]),
            "full_name": r[1],
            "email": r[2],
            "phone": r[3],
            "company": r[4],
            "lead_source": r[5],
            "note": r[6],
            "created_at": r[7].isoformat() if r[7] else None,
        }
        for r in rows
    ]
    return JSONResponse(
        content={"success": True, "total": total, "limit": limit, "offset": offset, "data": data}
    )


@router.post("/import/file")
async def import_leads_file(
    request: Request,
    file: UploadFile = File(...),
    property_id: str = Form(...),
    lead_source: str = Form(...),
):
    """POST /api/v1/import-data/leads/import/file - upload CSV/XLSX."""
    if lead_source not in LEAD_SOURCES:
        return JSONResponse(
            status_code=400,
            content={
                "success": False,
                "message": "lead_source must be one of: " + ",".join(LEAD_SOURCES),
            },
        )
    content = await file.read()
    try:
        rows = parse_leads_file(content, file.filename or "")
    except Exception as e:
        return JSONResponse(status_code=400, content={"success": False, "message": f"Parse error: {e}"})

    user_id = getattr(request.state, "user_id", None)
    imported = 0
    csql = get_cloudsql()
    with csql.pool.connect() as conn:
        for r in rows:
            fn = (r.get("full_name") or r.get("name") or "").strip()
            em = (r.get("email") or "").strip()
            if not fn and not em:
                continue
            conn.execute(
                sqlalchemy.text("""
                    INSERT INTO leads (property_id, lead_source, full_name, email, phone, company, title, note, created_by)
                    VALUES (:pid, :ls, :fn, :em, :ph, :co, :ti, :no, :uid)
                """),
                {
                    "pid": property_id.strip(),
                    "ls": lead_source,
                    "fn": fn or None,
                    "em": em or None,
                    "ph": (r.get("phone") or "").strip() or None,
                    "co": (r.get("company") or "").strip() or None,
                    "ti": (r.get("title") or "").strip() or None,
                    "no": (r.get("note") or r.get("notes") or "").strip() or None,
                    "uid": user_id,
                },
            )
            imported += 1
        conn.commit()

    return JSONResponse(
        content={"success": True, "message": f"Imported {imported} lead(s)", "imported": imported}
    )


@router.post("/import")
async def import_leads_json(request: Request, body: LeadImportBody):
    """POST /api/v1/import-data/leads/import - import JSON body."""
    if body.lead_source not in LEAD_SOURCES:
        return JSONResponse(
            status_code=400,
            content={"success": False, "message": "lead_source must be one of: " + ",".join(LEAD_SOURCES)},
        )
    user_id = getattr(request.state, "user_id", None)
    imported = 0
    csql = get_cloudsql()
    with csql.pool.connect() as conn:
        for r in body.leads:
            fn = (r.get("full_name") or r.get("name") or "").strip()
            em = (r.get("email") or "").strip()
            if not fn and not em:
                continue
            conn.execute(
                sqlalchemy.text("""
                    INSERT INTO leads (property_id, lead_source, full_name, email, phone, company, title, note, created_by)
                    VALUES (:pid, :ls, :fn, :em, :ph, :co, :ti, :no, :uid)
                """),
                {
                    "pid": body.property_id,
                    "ls": body.lead_source,
                    "fn": fn or None,
                    "em": em or None,
                    "ph": (r.get("phone") or "").strip() or None,
                    "co": (r.get("company") or "").strip() or None,
                    "ti": (r.get("title") or "").strip() or None,
                    "no": (r.get("note") or r.get("notes") or "").strip() or None,
                    "uid": user_id,
                },
            )
            imported += 1
        conn.commit()
    return JSONResponse(
        content={"success": True, "message": f"Imported {imported} lead(s)", "imported": imported}
    )


_LEAD_CSV_HEADER = "full_name,email,phone,company,lead_source,note\n"


@router.get("/import/example/csv")
async def example_leads_csv():
    """GET /api/v1/import-data/leads/import/example/csv."""
    return Response(
        content=_LEAD_CSV_HEADER.encode("utf-8"),
        media_type="text/csv; charset=utf-8",
        headers={"Content-Disposition": 'attachment; filename="leads-example.csv"'},
    )


@router.get("/import/example/xlsx")
async def example_leads_xlsx():
    """GET /api/v1/import-data/leads/import/example/xlsx."""
    try:
        import openpyxl
        from openpyxl import Workbook
    except ImportError:
        return JSONResponse(
            status_code=501,
            content={"success": False, "message": "openpyxl not installed; use example/csv"},
        )
    wb = Workbook()
    ws = wb.active
    ws.title = "Leads"
    for i, h in enumerate(["full_name", "email", "phone", "company", "lead_source", "note"], 1):
        ws.cell(row=1, column=i, value=h)
    buf = io.BytesIO()
    wb.save(buf)
    buf.seek(0)
    return Response(
        content=buf.read(),
        media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
        headers={"Content-Disposition": 'attachment; filename="leads-example.xlsx"'},
    )
