"""Import Data - Contacts endpoints."""
import io
from typing import Optional

from fastapi import APIRouter, Request, Query, UploadFile, File, Form, Header
from fastapi.responses import JSONResponse, Response
import sqlalchemy
from pydantic import BaseModel

from api.feature.import_data.db import get_cloudsql
from api.feature.import_data.parsing import parse_contacts_file

router = APIRouter()


def _property_id(header: Optional[str], query: Optional[str]) -> str:
    return (query or header or "").strip()


def _iso(ts) -> Optional[str]:
    if ts is None:
        return None
    return ts.isoformat() if hasattr(ts, "isoformat") else str(ts)


class ContactImportBody(BaseModel):
    property_id: str
    contacts: list[dict]


@router.get("/")
async def list_contacts(
    request: Request,
    property_id: Optional[str] = Query(None),
    limit: int = Query(20, le=100),
    offset: int = Query(0, ge=0),
    sort_by: str = Query("created_at"),
    order: str = Query("desc"),
    customer_id: Optional[str] = Query(None),
    x_property: Optional[str] = Header(None, alias="x-property"),
):
    pid = _property_id(x_property, property_id)
    if not pid:
        return JSONResponse(status_code=400, content={"success": False, "message": "property_id required"})

    csql = get_cloudsql()
    order_dir = "DESC" if (order or "desc").lower() == "desc" else "ASC"
    order_col = "created_at" if sort_by not in ("created_at", "full_name", "email") else sort_by
    where = " property_id = :pid "
    params = {"pid": pid}
    if customer_id:
        where += " AND customer_id = :cid "
        params["cid"] = customer_id

    with csql.pool.connect() as conn:
        total = conn.execute(sqlalchemy.text(f"SELECT COUNT(*) FROM contacts WHERE {where}"), params).scalar() or 0
        meta = conn.execute(
            sqlalchemy.text(
                "SELECT last_import_at, last_sync_at FROM sync_meta WHERE property_id = :pid AND entity = 'contact'"
            ),
            {"pid": pid},
        ).fetchone()
        last_import = _iso(meta[0]) if meta else None
        last_sync = _iso(meta[1]) if meta and len(meta) > 1 else None
        params["lim"] = limit
        params["off"] = offset
        rows = conn.execute(
            sqlalchemy.text(f"""
                SELECT id, full_name, email, phone, company, title, created_at FROM contacts
                WHERE {where} ORDER BY {order_col} {order_dir} LIMIT :lim OFFSET :off
            """),
            params,
        ).fetchall()

    data = [
        {"id": str(r[0]), "full_name": r[1], "email": r[2], "phone": r[3], "company": r[4], "title": r[5], "created_at": _iso(r[6])}
        for r in rows
    ]
    return JSONResponse(
        content={
            "success": True,
            "total": total,
            "last_import": last_import,
            "last_sync": last_sync,
            "limit": limit,
            "offset": offset,
            "data": data,
        }
    )


@router.post("/import/file")
async def import_contacts_file(
    request: Request,
    file: UploadFile = File(...),
    property_id: str = Form(""),
    x_property: Optional[str] = Header(None, alias="x-property"),
):
    pid = (property_id or (x_property or "")).strip()
    if not pid:
        return JSONResponse(status_code=400, content={"success": False, "message": "property_id required"})
    content = await file.read()
    try:
        rows = parse_contacts_file(content, file.filename or "")
    except Exception as e:
        return JSONResponse(status_code=400, content={"success": False, "message": f"Parse error: {e}"})

    user_id = getattr(request.state, "user_id", None)
    imported = 0
    csql = get_cloudsql()
    with csql.pool.connect() as conn:
        for r in rows:
            fn = (r.get("full_name") or r.get("name") or "").strip()
            if not fn and not (r.get("email") or "").strip():
                continue
            conn.execute(
                sqlalchemy.text("""
                    INSERT INTO contacts (property_id, full_name, email, phone, company, title, created_by)
                    VALUES (:pid, :fn, :em, :ph, :co, :ti, :uid)
                """),
                {
                    "pid": pid,
                    "fn": fn or None,
                    "em": (r.get("email") or "").strip() or None,
                    "ph": (r.get("phone") or "").strip() or None,
                    "co": (r.get("company") or "").strip() or None,
                    "ti": (r.get("title") or "").strip() or None,
                    "uid": user_id,
                },
            )
            imported += 1
        conn.execute(
            sqlalchemy.text("""
                INSERT INTO sync_meta (property_id, entity, last_import_at, updated_at)
                VALUES (:pid, 'contact', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
                ON CONFLICT (property_id, entity) DO UPDATE SET last_import_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP
            """),
            {"pid": pid},
        )
        conn.commit()
    return JSONResponse(content={"success": True, "message": f"Imported {imported} contact(s)", "imported": imported})


@router.post("/import")
async def import_contacts_json(request: Request, body: ContactImportBody):
    user_id = getattr(request.state, "user_id", None)
    imported = 0
    csql = get_cloudsql()
    with csql.pool.connect() as conn:
        for r in body.contacts:
            fn = (r.get("full_name") or r.get("name") or "").strip()
            if not fn and not (r.get("email") or "").strip():
                continue
            conn.execute(
                sqlalchemy.text("""
                    INSERT INTO contacts (property_id, full_name, email, phone, company, title, created_by)
                    VALUES (:pid, :fn, :em, :ph, :co, :ti, :uid)
                """),
                {
                    "pid": body.property_id,
                    "fn": fn or None,
                    "em": (r.get("email") or "").strip() or None,
                    "ph": (r.get("phone") or "").strip() or None,
                    "co": (r.get("company") or "").strip() or None,
                    "ti": (r.get("title") or "").strip() or None,
                    "uid": user_id,
                },
            )
            imported += 1
        conn.execute(
            sqlalchemy.text("""
                INSERT INTO sync_meta (property_id, entity, last_import_at, updated_at)
                VALUES (:pid, 'contact', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
                ON CONFLICT (property_id, entity) DO UPDATE SET last_import_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP
            """),
            {"pid": body.property_id},
        )
        conn.commit()
    return JSONResponse(content={"success": True, "message": f"Imported {imported} contact(s)", "imported": imported})


_CONTACT_CSV = "full_name,email,phone,company,title\n"


@router.get("/import/example/csv")
async def example_contacts_csv():
    return Response(
        content=_CONTACT_CSV.encode("utf-8"),
        media_type="text/csv; charset=utf-8",
        headers={"Content-Disposition": 'attachment; filename="contacts-example.csv"'},
    )


@router.get("/import/example/xlsx")
async def example_contacts_xlsx():
    try:
        from openpyxl import Workbook
    except ImportError:
        return JSONResponse(status_code=501, content={"success": False, "message": "openpyxl not installed"})
    wb = Workbook()
    ws = wb.active
    for i, h in enumerate(["full_name", "email", "phone", "company", "title"], 1):
        ws.cell(row=1, column=i, value=h)
    buf = io.BytesIO()
    wb.save(buf)
    buf.seek(0)
    return Response(
        content=buf.read(),
        media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
        headers={"Content-Disposition": 'attachment; filename="contacts-example.xlsx"'},
    )
