"""
Migration API for Import Data schema (PostgreSQL).

Exposes an endpoint to apply import_data_schema.sql (create tables:
leads, customers, contacts, quotations, quotation_lines, billings, billing_lines, sync_meta).
ก่อนรันจะเช็คว่าตารางมีอยู่ครบหรือยัง ถ้าครบแล้วจะไม่รัน DDL ซ้ำ (idempotent).
"""
import os
import logging
import sqlalchemy
from fastapi import APIRouter
from fastapi.responses import JSONResponse

from api.feature.import_data.db import get_cloudsql

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

router = APIRouter()

# ตารางที่ต้องมีตาม import_data_schema.sql (เรียงตาม FK)
IMPORT_DATA_TABLES = [
    "leads",
    "customers",
    "contacts",
    "quotations",
    "quotation_lines",
    "billings",
    "billing_lines",
    "sync_meta",
]


def _sql_file_path() -> str:
    """Path to import_data_schema.sql จาก system-api root."""
    this_dir = os.path.dirname(os.path.abspath(__file__))
    return os.path.join(this_dir, "..", "..", "migration", "scripts", "import_data_schema.sql")


def _strip_leading_comment_lines(text: str) -> str:
    """Remove leading blank/comment-only lines so CREATE TABLE after -- 1. leads is kept."""
    lines = text.split("\n")
    while lines:
        if not lines[0].strip() or lines[0].strip().startswith("--"):
            lines.pop(0)
        else:
            break
    return "\n".join(lines).strip()


def check_import_data_tables_exist(conn) -> tuple[bool, list[str]]:
    """เช็คว่าตารางใน IMPORT_DATA_TABLES มีใน schema public ครบหรือไม่."""
    placeholders = ",".join(f"'{t}'" for t in IMPORT_DATA_TABLES)
    q = sqlalchemy.text(f"""
        SELECT table_name
        FROM information_schema.tables
        WHERE table_schema = 'public'
          AND table_name IN ({placeholders})
    """)
    rows = conn.execute(q).fetchall()
    existing = [r[0] for r in rows]
    return len(existing) >= len(IMPORT_DATA_TABLES), existing


def apply_import_data_schema(conn) -> tuple[int, int]:
    """รัน import_data_schema.sql ทีละ statement. Returns (success_count, failed_count)."""
    sql_path = _sql_file_path()
    if not os.path.isfile(sql_path):
        raise FileNotFoundError(f"Schema file not found: {sql_path}")

    with open(sql_path, "r", encoding="utf-8") as f:
        content = f.read()

    # Strip leading comment lines per chunk so "-- 1. leads\nCREATE TABLE..." is kept
    statements = []
    for s in content.split(";"):
        stmt = _strip_leading_comment_lines(s)
        if stmt:
            statements.append(stmt)

    success_count = 0
    failed_count = 0
    for i, stmt in enumerate(statements):
        if not stmt or stmt.startswith("--"):
            continue
        try:
            conn.execute(sqlalchemy.text(stmt + ";"))
            conn.commit()
            logger.info("Executed statement %d: %s...", i + 1, stmt[:60].replace("\n", " "))
            success_count += 1
        except Exception as e:
            logger.error("Statement %d failed: %s", i + 1, e)
            failed_count += 1
            raise
    return success_count, failed_count


@router.post(
    "/schema/apply",
    description="Apply import_data schema (create tables if not exist). Skips if all tables already exist.",
)
async def apply_schema():
    """
    รันสร้างตาราง import_data ตาม import_data_schema.sql
    - ถ้าตารางมีครบแล้ว จะไม่รัน DDL (คืน skipped)
    - ถ้ายังไม่ครบ จะรัน SQL (ใช้ CREATE TABLE IF NOT EXISTS อยู่แล้ว จึงปลอดภัย)
    """
    try:
        csql = get_cloudsql()
        with csql.pool.connect() as conn:
            all_exist, existing = check_import_data_tables_exist(conn)
            if all_exist:
                return JSONResponse(
                    status_code=200,
                    content={
                        "status": "ok",
                        "message": "Schema already applied; all import_data tables exist.",
                        "skipped": True,
                        "tables_exist": existing,
                    },
                )

            success_count, failed_count = apply_import_data_schema(conn)
            return JSONResponse(
                status_code=200,
                content={
                    "status": "ok",
                    "message": "Import data schema applied.",
                    "skipped": False,
                    "statements_executed": success_count,
                    "failed": failed_count,
                },
            )
    except FileNotFoundError as e:
        logger.error("Schema file not found: %s", e)
        return JSONResponse(
            status_code=500,
            content={"status": "error", "message": str(e)},
        )
    except RuntimeError as e:
        logger.error("CloudSQL config missing: %s", e)
        return JSONResponse(
            status_code=500,
            content={"status": "error", "message": "Database configuration incomplete."},
        )
    except Exception as e:
        logger.exception("Failed to apply import_data schema")
        return JSONResponse(
            status_code=500,
            content={"status": "error", "message": str(e)},
        )


@router.get(
    "/schema/status",
    description="Check if import_data tables exist (no create).",
)
async def schema_status():
    """เช็คสถานะว่าตาราง import_data มีครบหรือไม่"""
    try:
        csql = get_cloudsql()
        with csql.pool.connect() as conn:
            all_exist, existing = check_import_data_tables_exist(conn)
            missing = [t for t in IMPORT_DATA_TABLES if t not in existing]
            return JSONResponse(
                status_code=200,
                content={
                    "status": "ok",
                    "ready": all_exist,
                    "tables": {
                        "required": IMPORT_DATA_TABLES,
                        "existing": existing,
                        "missing": missing,
                    },
                },
            )
    except RuntimeError as e:
        logger.error("CloudSQL config missing: %s", e)
        return JSONResponse(
            status_code=500,
            content={"status": "error", "message": "Database configuration incomplete."},
        )
    except Exception as e:
        logger.exception("Failed to check schema status")
        return JSONResponse(
            status_code=500,
            content={"status": "error", "message": str(e)},
        )
