from fastapi import Request, Query, Depends, Header, APIRouter, HTTPException
from fastapi.responses import JSONResponse
import pytz
import os
import uuid
from datetime import datetime, timedelta
from connectors.firebase.firebase import Firebase
import utility.function as func
from connectors.bigquery.bq import BigQuery, Schema

from models.offline_data import *

import logging
logging.basicConfig(level=logging.INFO)
timezone_utc = pytz.utc
timezone_bkk = pytz.timezone('Asia/Bangkok')

fb = Firebase(host=os.environ.get("FIREBASE_HOST"))

router = APIRouter()

LOGGING_PREFIX = "api_offline_schema"

@router.get("/mapping", response_model=DataResponse, description="Get offline table mapping options")
async def get_offline_table_mapping(default_request: Request):
    try:
        offline_mapping = fb.db.reference(f'template/offline_mapping').get()
        mapping_option = list(set(item for sublist in offline_mapping.values() for item in sublist))
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': mapping_option})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/list",response_model=DataResponse, description="Get list of offline tables")
async def get_offline_table_list(
    property_id: str = Query(..., description="Property ID"),
    default_request: Request = None
):
    try:
        offline_schema = fb.db.reference(f'property/{property_id}/offline_mapping').get()
        summaries = []
        
        for name, cfg in offline_schema.items():
            if not cfg:  # skip empty dicts
                continue
            summaries.append({
                "name": name,
                "row_count": cfg.get("row_count", 0),
                "status": cfg.get("status"),
                "createdate": cfg.get("createdate"),
                "lastupdate": cfg.get("lastupdate"),
            })
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': summaries})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/get_schema", response_model=DataResponse, description="Get offline table schema")
async def get_offline_table_schema(
    property_id: str = Query(..., description="Property ID"),
    table_name: str = Query(..., description="Offline Table Name"),
    default_request: Request = None
):
    try:
        bq = BigQuery()
        schema = bq.get_offline_table_schema_as_bqjson(property_id,table_name)
        if schema:
            return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': schema})
        else:
            return JSONResponse(status_code=404, content={"status":"error",'message': f"Schema for table {table_name} not found"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/get_value", description="Get distinct values of a field in offline table")
async def get_offline_table_value(
    property_id: str = Query(..., description="Property ID"),
    table_name: str = Query(..., description="Offline Table Name"),
    field_name: str = Query(..., description="Field Name"),
    limit: int = Query(1000, description="Limit of distinct values"),
    default_request: Request = None
):
    try:
        bq = BigQuery()
        result = bq.get_field_values(
            property_id=property_id.strip(),
            table_name=table_name.strip(),   # usually just the table name
            field_path=field_name.strip(),
            distinct_limit=int(limit)
        )
        if result.get("status") != 200:
            return JSONResponse(status_code=result.get("status"), content={"status":"error",'message': result.get("message")})
        return JSONResponse(status_code=200, content=result)
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.post("/manage", description="Manage offline table schema")
async def post_manage_offline_table(request: ManageOfflineSchemaRequest, default_request: Request):
    try:
        offline_mapping = fb.db.reference(f'template/offline_mapping').get()
        table_exist = fb.db.reference(f'property/{request.property_id}/offline_mapping/offline_{request.table_name}').get()
        if table_exist:
            return JSONResponse(status_code=400, content={"status":"error",'message': f"Table {request.table_name} already exists"})
        
        if len(request.schemas) < 2:
            return {"status":"error",'message': "Atleast 2 columns is required to create schema"}, 400
        
        table_exist = fb.db.reference(f'property/{request.property_id}/offline_mapping/offline_{request.table_name}').get()
        if table_exist:
            return {"status":"error",'message': f"Table {request.table_name} already exists"}, 400
        
        result = Schema.process_schema(request.schemas, offline_mapping)
        schema = result.get('schema', None)
        partition = result.get('partition',None)
        unique_col = result.get('unique_col', None)
        contact_key = result.get("contact_key", None)
        demographic_col = result.get("demographic_col", None)
        sensitive_data = result.get("sensitive_data", None)
        other_key = result.get("other_key", None)

        schema.append({"name": "version_key", "type": "DATETIME", "mode": "REQUIRED","description":"The version key of the data, used to track changes in the schema or data structure."})
        schema.append({"name": "user_pseudo_id", "type": "STRING", "mode": "NULLABLE", "description":"A unique identifier for each user, used to track user behavior and interactions."})
        schema.append({"name": "ref_user_pseudo_id", "type": "STRING", "mode": "NULLABLE", "description":"(REF) A unique identifier for each user, used to track user behavior and interactions."})

        bq = BigQuery()
        bq.create_table_from_json_schema(f"client_{request.property_id}",f"offline_{request.table_name}",schema,partition)
        bq.create_table_from_json_schema(f"client_{request.property_id}",f"offline_{request.table_name}_temp",schema,partition)

        offline_mapping = {}

        if contact_key:
            offline_mapping['contact_key'] = contact_key
        if other_key:
            offline_mapping['other_key'] = other_key
        if partition:
            offline_mapping['date_column'] = partition
        if demographic_col:
            offline_mapping['demographic_col'] = demographic_col
        if sensitive_data:
            offline_mapping['sensitive_data'] = sensitive_data
        if unique_col:
            offline_mapping['unique_col'] = unique_col

        offline_mapping['createdate'] = datetime.now(timezone_bkk).strftime('%Y-%m-%d %H:%M:%S')
        offline_mapping['lastupdate'] = datetime.now(timezone_bkk).strftime('%Y-%m-%d %H:%M:%S')
        offline_mapping['row_count'] = 0
        offline_mapping['status'] = 'active'

        fb.db.reference(f'property/{request.property_id}/offline_mapping/offline_{request.table_name}').set(offline_mapping)
        return JSONResponse(status_code=200, content={"status":"ok",'message': f'Table {request.table_name} created successfully'})

    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})
    
@router.put("/manage", description="Update offline table schema")
async def put_manage_offline_table(request: ManageOfflineSchemaRequest, default_request: Request):
    try:
        offline_mapping = fb.db.reference(f'template/offline_mapping').get()
        if len(request.schemas) < 2:
            return {"status":"error",'message': "Atleast 2 columns is required to create schema"}, 400
        result = Schema.process_schema(request.schemas, offline_mapping)
        schema = result.get('schema', None)
        partition = result.get('partition',None)
        unique_col = result.get('unique_col', None)
        contact_key = result.get("contact_key", None)
        demographic_col = result.get("demographic_col", None)
        sensitive_data = result.get("sensitive_data", None)
        other_key = result.get("internal_key", None)

        schema.append({"name": "version_key", "type": "STRING", "mode": "REQUIRED","description":"The version key of the data, used to track changes in the schema or data structure."})
        bq = BigQuery()
        bq.delete_table("customer-360-profile",f"client_{request.property_id}",f"offline_{request.table_name}")
        bq.create_table_from_json_schema(f"client_{request.property_id}",f"offline_{request.table_name}",schema,partition)

        createDate = fb.db.reference(f'property/{request.property_id}/offline_mapping/offline_{request.table_name}/createdate').get()

        offline_mapping = {}

        if contact_key:
            offline_mapping['contact_key'] = contact_key
        if other_key:
            offline_mapping['other_key'] = other_key
        if partition:
            offline_mapping['date_column'] = partition
        if demographic_col:
            offline_mapping['demographic_col'] = demographic_col
        if sensitive_data:
            offline_mapping['sensitive_data'] = sensitive_data
        if unique_col:
            offline_mapping['unique_col'] = unique_col

        offline_mapping['createdate'] = createDate
        offline_mapping['lastupdate'] = datetime.now(timezone_bkk).strftime('%Y-%m-%d %H:%M:%S')
        offline_mapping['row_count'] = 0
        offline_mapping['status'] = 'active'

        fb.db.reference(f'property/{request.property_id}/offline_mapping/offline_{request.table_name}').set(offline_mapping)
        return JSONResponse(status_code=200, content={"status":"ok",'message': f'Table {request.table_name} updated successfully'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/column", response_model=DataResponse, description="Get offline table columns")
async def get_offline_table_column(default_request: Request,
    property_id: str = Query(..., description="Property ID"),
    table_name: str = Query(..., description="Offline Table Name")
):
    try:
        bq = BigQuery()
        columns = bq.get_bq_columns(property_id, table_name)
        if columns:
            return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': columns})
        else:
            return JSONResponse(status_code=404, content={"status":"error",'message': f"Columns for table {table_name} not found"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/export", response_model=DataResponse, description="Export offline table schema to GCS")
async def export_offline_table_to_gcs(default_request: Request,
    property_id: str = Query(..., description="Property ID"),
    table_name: str = Query(..., description="Offline Table Name")
):
    try:
        bq = BigQuery()
        schema = bq.get_schema_dataset(property_id,table_name)
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': schema})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.put("/edit/status", description="Edit offline table status")
async def put_edit_offline_table_status(default_request: Request, request: ManageOfflineEditStatusRequest):
    try:
        property_id = request.property_id
        table_name = request.table_name
        status = request.status

        current_status = fb.db.reference(f'property/{property_id}/offline_mapping/offline_{table_name}/status').get()   
        if status == current_status:
            return JSONResponse(status_code=400, content={"status":"error",'message': f"Status already set to {status}"})
        else:
            fb.db.reference(f'property/{property_id}/offline_mapping/offline_{table_name}/status').set(status)
            fb.db.reference(f'property/{property_id}/offline_mapping/offline_{table_name}/lastupdate').set(datetime.now(timezone_bkk).strftime('%Y-%m-%d %H:%M:%S'))
            return JSONResponse(status_code=200, content={"status":"ok",'message': f"Schema {table_name}'s status updated to {status}"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.post("/rename_column", description="Rename a column in offline table")
async def post_rename_offline_table_column(default_request: Request, request: ManageOfflineRenameColumnRequest):
    try:
        query = f"""
        ALTER TABLE `client_{request.property_id}.offline_{request.table_name}`
        RENAME COLUMN {request.old_name} TO {request.new_name}
        """
        bq = BigQuery()
        query_job = bq.client.query(query)
        query_job.result()

        ref = fb.db.reference(f'property/{request.property_id}/offline_mapping/offline_{request.table_name}')
        mapping_data = ref.get()

        for group in ["contact_key", "other_key"]:
            group_map = mapping_data.get(group, {})
            for schema_key, mapped_value in group_map.items():
                if mapped_value == request.old_name:
                    # Perform the update: use the original schema_key
                    ref.child(group).child(schema_key).set(request.new_name)
                    break
        
        ref.update({"lastupdate": datetime.now(timezone_bkk).strftime("%Y-%m-%d %H:%M:%S")})
        return JSONResponse(status_code=200, content={"status":"ok","message":"Edit schema successfully"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})
    
@router.post("/add_column", description="Add a column to offline table")
async def post_add_offline_table_column(default_request: Request, request: ManageOfflineSchemaRequest):
    from google.cloud import bigquery
    try:
        table_id = f"customer-360-profile.client_{request.property_id}.offline_{request.table_name}"
        new_schema_fields = [bigquery.SchemaField(**field) for field in request.schema]
        
        bq = BigQuery()
        table = bq.client.get_table(table_id)
        updated_schema = table.schema + new_schema_fields
        table.schema = updated_schema
        bq.client.update_table(table, ["schema"])
        return JSONResponse(status_code=200, content={"status":"ok","message":"Add column successfully"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.delete("/delete", description="Delete columns from offline table")
async def delete_offline_table(default_request: Request, request: ManageOfflineDeleteColumnRequest):
    try:
        if len(request.column_names) == 0:
            return JSONResponse(status_code=400, content={"status":"error","message":"No columns specified for deletion"})
        table_id = f"customer-360-profile.client_{request.property_id}.offline_{request.table_name}"
        bq = BigQuery()
        for col in request.column_names:
            query = f"""
            ALTER TABLE `{table_id}`
            DROP COLUMN {col}
            """
            bq.client.query(query).result()
        return JSONResponse(status_code=200, content={"status":"ok","message":"Delete columns successfully"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})