#!/usr/bin/env python3
"""
Create import-data tables in Cloud SQL (PostgreSQL).

This script runs import_data_schema.sql to create tables:
leads, customers, contacts, quotations, quotation_lines, billings, billing_lines, sync_meta.

Uses the same Cloud SQL connector and env as the rest of system-api.
Env: POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_DB, INSTANCE_NAME, GCP_PROJECT.
Optional: POSTGRES_ZONE (default asia-southeast1).

Usage:
    python run_import_data_schema.py

    Run from system-api root:
        python migration/scripts/run_import_data_schema.py
"""

import os
import sys
import logging

SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# system-api root = parent of migration/ (same pattern as create_audience_members_table.py)
SYSTEM_API_ROOT = os.path.join(SCRIPT_DIR, "..", "..")
if SYSTEM_API_ROOT not in sys.path:
    sys.path.insert(0, SYSTEM_API_ROOT)

from connectors.cloudSQL.cloudsql import CloudSQL
import sqlalchemy

logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)


def _strip_leading_comment_lines(text: str) -> str:
    """Remove leading blank/comment-only lines so CREATE TABLE after -- 1. leads is kept."""
    lines = text.split("\n")
    while lines:
        if not lines[0].strip() or lines[0].strip().startswith("--"):
            lines.pop(0)
        else:
            break
    return "\n".join(lines).strip()


def apply_import_data_schema(csql: CloudSQL) -> tuple[int, int]:
    """Run import_data_schema.sql and execute each statement. Returns (success_count, failed_count)."""
    sql_path = os.path.join(SCRIPT_DIR, "import_data_schema.sql")
    with open(sql_path, "r", encoding="utf-8") as f:
        content = f.read()

    # Split by semicolon; strip leading comment lines so "-- 1. leads\nCREATE TABLE..." is kept.
    statements = []
    for s in content.split(";"):
        stmt = _strip_leading_comment_lines(s)
        if stmt:
            statements.append(stmt)

    success_count = 0
    failed_count = 0
    with csql.pool.connect() as conn:
        for i, stmt in enumerate(statements):
            if not stmt or stmt.startswith("--"):
                continue
            try:
                conn.execute(sqlalchemy.text(stmt + ";"))
                conn.commit()
                logger.info(
                    "Executed statement %d: %s...",
                    i + 1,
                    stmt[:60].replace("\n", " "),
                )
                success_count += 1
            except Exception as e:
                logger.error("Statement %d failed: %s", i + 1, e)
                failed_count += 1
                raise
    return success_count, failed_count


def main():
    """Main: init Cloud SQL from env, apply schema, print summary."""
    user = os.environ.get("POSTGRES_USER")
    password = os.environ.get("POSTGRES_PASSWORD")
    db_name = os.environ.get("POSTGRES_DB")
    instance = os.environ.get("INSTANCE_NAME")
    if not all([user, password, db_name, instance]):
        logger.error(
            "Missing env. Set POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_DB, INSTANCE_NAME (and GCP_PROJECT if needed)."
        )
        sys.exit(1)

    zone = os.environ.get("POSTGRES_ZONE", "asia-southeast1")
    logger.info("Connecting to Cloud SQL instance in %s", zone)

    csql = CloudSQL(zone, instance)
    csql.create_engine(user, password, db_name)

    try:
        success_count, failed_count = apply_import_data_schema(csql)
        logger.info("\n%s", "=" * 50)
        logger.info("Summary:")
        logger.info("  ✅ Statements executed: %d", success_count)
        logger.info("  ❌ Failed: %d", failed_count)
        logger.info("%s", "=" * 50)
        if failed_count > 0:
            sys.exit(1)
        logger.info("Import data schema applied successfully.")
    finally:
        csql.close()


if __name__ == "__main__":
    main()
